##// END OF EJS Templates
pull-requests: make the renderer stored and saved for each pull requests....
marcink -
r2903:6d16d1cd default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -0,0 +1,40 b''
1 import logging
2
3 from sqlalchemy import *
4
5 from rhodecode.model import meta
6 from rhodecode.lib.dbmigrate.versions import _reset_base, notify
7
8 log = logging.getLogger(__name__)
9
10
11 def upgrade(migrate_engine):
12 """
13 Upgrade operations go here.
14 Don't create your own engine; bind migrate_engine to your metadata
15 """
16 _reset_base(migrate_engine)
17 from rhodecode.lib.dbmigrate.schema import db_4_11_0_0 as db
18
19 pull_request_table = db.PullRequest.__table__
20 pull_request_version_table = db.PullRequestVersion.__table__
21
22 renderer = Column('description_renderer', Unicode(64), nullable=True)
23 renderer.create(table=pull_request_table)
24
25 renderer_ver = Column('description_renderer', Unicode(64), nullable=True)
26 renderer_ver.create(table=pull_request_version_table)
27
28 # issue fixups
29 fixups(db, meta.Session)
30
31
32 def downgrade(migrate_engine):
33 meta = MetaData()
34 meta.bind = migrate_engine
35
36
37 def fixups(models, _SESSION):
38 pass
39
40
@@ -1,63 +1,63 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22
22
23 RhodeCode, a web based repository management software
23 RhodeCode, a web based repository management software
24 versioning implementation: http://www.python.org/dev/peps/pep-0386/
24 versioning implementation: http://www.python.org/dev/peps/pep-0386/
25 """
25 """
26
26
27 import os
27 import os
28 import sys
28 import sys
29 import platform
29 import platform
30
30
31 VERSION = tuple(open(os.path.join(
31 VERSION = tuple(open(os.path.join(
32 os.path.dirname(__file__), 'VERSION')).read().split('.'))
32 os.path.dirname(__file__), 'VERSION')).read().split('.'))
33
33
34 BACKENDS = {
34 BACKENDS = {
35 'hg': 'Mercurial repository',
35 'hg': 'Mercurial repository',
36 'git': 'Git repository',
36 'git': 'Git repository',
37 'svn': 'Subversion repository',
37 'svn': 'Subversion repository',
38 }
38 }
39
39
40 CELERY_ENABLED = False
40 CELERY_ENABLED = False
41 CELERY_EAGER = False
41 CELERY_EAGER = False
42
42
43 # link to config for pyramid
43 # link to config for pyramid
44 CONFIG = {}
44 CONFIG = {}
45
45
46 # Populated with the settings dictionary from application init in
46 # Populated with the settings dictionary from application init in
47 # rhodecode.conf.environment.load_pyramid_environment
47 # rhodecode.conf.environment.load_pyramid_environment
48 PYRAMID_SETTINGS = {}
48 PYRAMID_SETTINGS = {}
49
49
50 # Linked module for extensions
50 # Linked module for extensions
51 EXTENSIONS = {}
51 EXTENSIONS = {}
52
52
53 __version__ = ('.'.join((str(each) for each in VERSION[:3])))
53 __version__ = ('.'.join((str(each) for each in VERSION[:3])))
54 __dbversion__ = 86 # defines current db version for migrations
54 __dbversion__ = 87 # defines current db version for migrations
55 __platform__ = platform.system()
55 __platform__ = platform.system()
56 __license__ = 'AGPLv3, and Commercial License'
56 __license__ = 'AGPLv3, and Commercial License'
57 __author__ = 'RhodeCode GmbH'
57 __author__ = 'RhodeCode GmbH'
58 __url__ = 'https://code.rhodecode.com'
58 __url__ = 'https://code.rhodecode.com'
59
59
60 is_windows = __platform__ in ['Windows']
60 is_windows = __platform__ in ['Windows']
61 is_unix = not is_windows
61 is_unix = not is_windows
62 is_test = False
62 is_test = False
63 disable_error_handler = False
63 disable_error_handler = False
@@ -1,1203 +1,1206 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import mock
20 import mock
21 import pytest
21 import pytest
22
22
23 import rhodecode
23 import rhodecode
24 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
24 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
25 from rhodecode.lib.vcs.nodes import FileNode
25 from rhodecode.lib.vcs.nodes import FileNode
26 from rhodecode.lib import helpers as h
26 from rhodecode.lib import helpers as h
27 from rhodecode.model.changeset_status import ChangesetStatusModel
27 from rhodecode.model.changeset_status import ChangesetStatusModel
28 from rhodecode.model.db import (
28 from rhodecode.model.db import (
29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment)
29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment)
30 from rhodecode.model.meta import Session
30 from rhodecode.model.meta import Session
31 from rhodecode.model.pull_request import PullRequestModel
31 from rhodecode.model.pull_request import PullRequestModel
32 from rhodecode.model.user import UserModel
32 from rhodecode.model.user import UserModel
33 from rhodecode.tests import (
33 from rhodecode.tests import (
34 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
34 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
35 from rhodecode.tests.utils import AssertResponse
35 from rhodecode.tests.utils import AssertResponse
36
36
37
37
38 def route_path(name, params=None, **kwargs):
38 def route_path(name, params=None, **kwargs):
39 import urllib
39 import urllib
40
40
41 base_url = {
41 base_url = {
42 'repo_changelog': '/{repo_name}/changelog',
42 'repo_changelog': '/{repo_name}/changelog',
43 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
43 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
44 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}',
44 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}',
45 'pullrequest_show_all': '/{repo_name}/pull-request',
45 'pullrequest_show_all': '/{repo_name}/pull-request',
46 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
46 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
47 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
47 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
48 'pullrequest_repo_destinations': '/{repo_name}/pull-request/repo-destinations',
48 'pullrequest_repo_destinations': '/{repo_name}/pull-request/repo-destinations',
49 'pullrequest_new': '/{repo_name}/pull-request/new',
49 'pullrequest_new': '/{repo_name}/pull-request/new',
50 'pullrequest_create': '/{repo_name}/pull-request/create',
50 'pullrequest_create': '/{repo_name}/pull-request/create',
51 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update',
51 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update',
52 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge',
52 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge',
53 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete',
53 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete',
54 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment',
54 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment',
55 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete',
55 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete',
56 }[name].format(**kwargs)
56 }[name].format(**kwargs)
57
57
58 if params:
58 if params:
59 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
59 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
60 return base_url
60 return base_url
61
61
62
62
63 @pytest.mark.usefixtures('app', 'autologin_user')
63 @pytest.mark.usefixtures('app', 'autologin_user')
64 @pytest.mark.backends("git", "hg")
64 @pytest.mark.backends("git", "hg")
65 class TestPullrequestsView(object):
65 class TestPullrequestsView(object):
66
66
67 def test_index(self, backend):
67 def test_index(self, backend):
68 self.app.get(route_path(
68 self.app.get(route_path(
69 'pullrequest_new',
69 'pullrequest_new',
70 repo_name=backend.repo_name))
70 repo_name=backend.repo_name))
71
71
72 def test_option_menu_create_pull_request_exists(self, backend):
72 def test_option_menu_create_pull_request_exists(self, backend):
73 repo_name = backend.repo_name
73 repo_name = backend.repo_name
74 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
74 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
75
75
76 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
76 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
77 'pullrequest_new', repo_name=repo_name)
77 'pullrequest_new', repo_name=repo_name)
78 response.mustcontain(create_pr_link)
78 response.mustcontain(create_pr_link)
79
79
80 def test_create_pr_form_with_raw_commit_id(self, backend):
80 def test_create_pr_form_with_raw_commit_id(self, backend):
81 repo = backend.repo
81 repo = backend.repo
82
82
83 self.app.get(
83 self.app.get(
84 route_path('pullrequest_new',
84 route_path('pullrequest_new',
85 repo_name=repo.repo_name,
85 repo_name=repo.repo_name,
86 commit=repo.get_commit().raw_id),
86 commit=repo.get_commit().raw_id),
87 status=200)
87 status=200)
88
88
89 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
89 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
90 def test_show(self, pr_util, pr_merge_enabled):
90 def test_show(self, pr_util, pr_merge_enabled):
91 pull_request = pr_util.create_pull_request(
91 pull_request = pr_util.create_pull_request(
92 mergeable=pr_merge_enabled, enable_notifications=False)
92 mergeable=pr_merge_enabled, enable_notifications=False)
93
93
94 response = self.app.get(route_path(
94 response = self.app.get(route_path(
95 'pullrequest_show',
95 'pullrequest_show',
96 repo_name=pull_request.target_repo.scm_instance().name,
96 repo_name=pull_request.target_repo.scm_instance().name,
97 pull_request_id=pull_request.pull_request_id))
97 pull_request_id=pull_request.pull_request_id))
98
98
99 for commit_id in pull_request.revisions:
99 for commit_id in pull_request.revisions:
100 response.mustcontain(commit_id)
100 response.mustcontain(commit_id)
101
101
102 assert pull_request.target_ref_parts.type in response
102 assert pull_request.target_ref_parts.type in response
103 assert pull_request.target_ref_parts.name in response
103 assert pull_request.target_ref_parts.name in response
104 target_clone_url = pull_request.target_repo.clone_url()
104 target_clone_url = pull_request.target_repo.clone_url()
105 assert target_clone_url in response
105 assert target_clone_url in response
106
106
107 assert 'class="pull-request-merge"' in response
107 assert 'class="pull-request-merge"' in response
108 assert (
108 assert (
109 'Server-side pull request merging is disabled.'
109 'Server-side pull request merging is disabled.'
110 in response) != pr_merge_enabled
110 in response) != pr_merge_enabled
111
111
112 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
112 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
113 # Logout
113 # Logout
114 response = self.app.post(
114 response = self.app.post(
115 h.route_path('logout'),
115 h.route_path('logout'),
116 params={'csrf_token': csrf_token})
116 params={'csrf_token': csrf_token})
117 # Login as regular user
117 # Login as regular user
118 response = self.app.post(h.route_path('login'),
118 response = self.app.post(h.route_path('login'),
119 {'username': TEST_USER_REGULAR_LOGIN,
119 {'username': TEST_USER_REGULAR_LOGIN,
120 'password': 'test12'})
120 'password': 'test12'})
121
121
122 pull_request = pr_util.create_pull_request(
122 pull_request = pr_util.create_pull_request(
123 author=TEST_USER_REGULAR_LOGIN)
123 author=TEST_USER_REGULAR_LOGIN)
124
124
125 response = self.app.get(route_path(
125 response = self.app.get(route_path(
126 'pullrequest_show',
126 'pullrequest_show',
127 repo_name=pull_request.target_repo.scm_instance().name,
127 repo_name=pull_request.target_repo.scm_instance().name,
128 pull_request_id=pull_request.pull_request_id))
128 pull_request_id=pull_request.pull_request_id))
129
129
130 response.mustcontain('Server-side pull request merging is disabled.')
130 response.mustcontain('Server-side pull request merging is disabled.')
131
131
132 assert_response = response.assert_response()
132 assert_response = response.assert_response()
133 # for regular user without a merge permissions, we don't see it
133 # for regular user without a merge permissions, we don't see it
134 assert_response.no_element_exists('#close-pull-request-action')
134 assert_response.no_element_exists('#close-pull-request-action')
135
135
136 user_util.grant_user_permission_to_repo(
136 user_util.grant_user_permission_to_repo(
137 pull_request.target_repo,
137 pull_request.target_repo,
138 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
138 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
139 'repository.write')
139 'repository.write')
140 response = self.app.get(route_path(
140 response = self.app.get(route_path(
141 'pullrequest_show',
141 'pullrequest_show',
142 repo_name=pull_request.target_repo.scm_instance().name,
142 repo_name=pull_request.target_repo.scm_instance().name,
143 pull_request_id=pull_request.pull_request_id))
143 pull_request_id=pull_request.pull_request_id))
144
144
145 response.mustcontain('Server-side pull request merging is disabled.')
145 response.mustcontain('Server-side pull request merging is disabled.')
146
146
147 assert_response = response.assert_response()
147 assert_response = response.assert_response()
148 # now regular user has a merge permissions, we have CLOSE button
148 # now regular user has a merge permissions, we have CLOSE button
149 assert_response.one_element_exists('#close-pull-request-action')
149 assert_response.one_element_exists('#close-pull-request-action')
150
150
151 def test_show_invalid_commit_id(self, pr_util):
151 def test_show_invalid_commit_id(self, pr_util):
152 # Simulating invalid revisions which will cause a lookup error
152 # Simulating invalid revisions which will cause a lookup error
153 pull_request = pr_util.create_pull_request()
153 pull_request = pr_util.create_pull_request()
154 pull_request.revisions = ['invalid']
154 pull_request.revisions = ['invalid']
155 Session().add(pull_request)
155 Session().add(pull_request)
156 Session().commit()
156 Session().commit()
157
157
158 response = self.app.get(route_path(
158 response = self.app.get(route_path(
159 'pullrequest_show',
159 'pullrequest_show',
160 repo_name=pull_request.target_repo.scm_instance().name,
160 repo_name=pull_request.target_repo.scm_instance().name,
161 pull_request_id=pull_request.pull_request_id))
161 pull_request_id=pull_request.pull_request_id))
162
162
163 for commit_id in pull_request.revisions:
163 for commit_id in pull_request.revisions:
164 response.mustcontain(commit_id)
164 response.mustcontain(commit_id)
165
165
166 def test_show_invalid_source_reference(self, pr_util):
166 def test_show_invalid_source_reference(self, pr_util):
167 pull_request = pr_util.create_pull_request()
167 pull_request = pr_util.create_pull_request()
168 pull_request.source_ref = 'branch:b:invalid'
168 pull_request.source_ref = 'branch:b:invalid'
169 Session().add(pull_request)
169 Session().add(pull_request)
170 Session().commit()
170 Session().commit()
171
171
172 self.app.get(route_path(
172 self.app.get(route_path(
173 'pullrequest_show',
173 'pullrequest_show',
174 repo_name=pull_request.target_repo.scm_instance().name,
174 repo_name=pull_request.target_repo.scm_instance().name,
175 pull_request_id=pull_request.pull_request_id))
175 pull_request_id=pull_request.pull_request_id))
176
176
177 def test_edit_title_description(self, pr_util, csrf_token):
177 def test_edit_title_description(self, pr_util, csrf_token):
178 pull_request = pr_util.create_pull_request()
178 pull_request = pr_util.create_pull_request()
179 pull_request_id = pull_request.pull_request_id
179 pull_request_id = pull_request.pull_request_id
180
180
181 response = self.app.post(
181 response = self.app.post(
182 route_path('pullrequest_update',
182 route_path('pullrequest_update',
183 repo_name=pull_request.target_repo.repo_name,
183 repo_name=pull_request.target_repo.repo_name,
184 pull_request_id=pull_request_id),
184 pull_request_id=pull_request_id),
185 params={
185 params={
186 'edit_pull_request': 'true',
186 'edit_pull_request': 'true',
187 'title': 'New title',
187 'title': 'New title',
188 'description': 'New description',
188 'description': 'New description',
189 'csrf_token': csrf_token})
189 'csrf_token': csrf_token})
190
190
191 assert_session_flash(
191 assert_session_flash(
192 response, u'Pull request title & description updated.',
192 response, u'Pull request title & description updated.',
193 category='success')
193 category='success')
194
194
195 pull_request = PullRequest.get(pull_request_id)
195 pull_request = PullRequest.get(pull_request_id)
196 assert pull_request.title == 'New title'
196 assert pull_request.title == 'New title'
197 assert pull_request.description == 'New description'
197 assert pull_request.description == 'New description'
198
198
199 def test_edit_title_description_closed(self, pr_util, csrf_token):
199 def test_edit_title_description_closed(self, pr_util, csrf_token):
200 pull_request = pr_util.create_pull_request()
200 pull_request = pr_util.create_pull_request()
201 pull_request_id = pull_request.pull_request_id
201 pull_request_id = pull_request.pull_request_id
202 repo_name = pull_request.target_repo.repo_name
202 repo_name = pull_request.target_repo.repo_name
203 pr_util.close()
203 pr_util.close()
204
204
205 response = self.app.post(
205 response = self.app.post(
206 route_path('pullrequest_update',
206 route_path('pullrequest_update',
207 repo_name=repo_name, pull_request_id=pull_request_id),
207 repo_name=repo_name, pull_request_id=pull_request_id),
208 params={
208 params={
209 'edit_pull_request': 'true',
209 'edit_pull_request': 'true',
210 'title': 'New title',
210 'title': 'New title',
211 'description': 'New description',
211 'description': 'New description',
212 'csrf_token': csrf_token}, status=200)
212 'csrf_token': csrf_token}, status=200)
213 assert_session_flash(
213 assert_session_flash(
214 response, u'Cannot update closed pull requests.',
214 response, u'Cannot update closed pull requests.',
215 category='error')
215 category='error')
216
216
217 def test_update_invalid_source_reference(self, pr_util, csrf_token):
217 def test_update_invalid_source_reference(self, pr_util, csrf_token):
218 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
218 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
219
219
220 pull_request = pr_util.create_pull_request()
220 pull_request = pr_util.create_pull_request()
221 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
221 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
222 Session().add(pull_request)
222 Session().add(pull_request)
223 Session().commit()
223 Session().commit()
224
224
225 pull_request_id = pull_request.pull_request_id
225 pull_request_id = pull_request.pull_request_id
226
226
227 response = self.app.post(
227 response = self.app.post(
228 route_path('pullrequest_update',
228 route_path('pullrequest_update',
229 repo_name=pull_request.target_repo.repo_name,
229 repo_name=pull_request.target_repo.repo_name,
230 pull_request_id=pull_request_id),
230 pull_request_id=pull_request_id),
231 params={'update_commits': 'true',
231 params={'update_commits': 'true',
232 'csrf_token': csrf_token})
232 'csrf_token': csrf_token})
233
233
234 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
234 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
235 UpdateFailureReason.MISSING_SOURCE_REF])
235 UpdateFailureReason.MISSING_SOURCE_REF])
236 assert_session_flash(response, expected_msg, category='error')
236 assert_session_flash(response, expected_msg, category='error')
237
237
238 def test_missing_target_reference(self, pr_util, csrf_token):
238 def test_missing_target_reference(self, pr_util, csrf_token):
239 from rhodecode.lib.vcs.backends.base import MergeFailureReason
239 from rhodecode.lib.vcs.backends.base import MergeFailureReason
240 pull_request = pr_util.create_pull_request(
240 pull_request = pr_util.create_pull_request(
241 approved=True, mergeable=True)
241 approved=True, mergeable=True)
242 pull_request.target_ref = 'branch:invalid-branch:invalid-commit-id'
242 pull_request.target_ref = 'branch:invalid-branch:invalid-commit-id'
243 Session().add(pull_request)
243 Session().add(pull_request)
244 Session().commit()
244 Session().commit()
245
245
246 pull_request_id = pull_request.pull_request_id
246 pull_request_id = pull_request.pull_request_id
247 pull_request_url = route_path(
247 pull_request_url = route_path(
248 'pullrequest_show',
248 'pullrequest_show',
249 repo_name=pull_request.target_repo.repo_name,
249 repo_name=pull_request.target_repo.repo_name,
250 pull_request_id=pull_request_id)
250 pull_request_id=pull_request_id)
251
251
252 response = self.app.get(pull_request_url)
252 response = self.app.get(pull_request_url)
253
253
254 assertr = AssertResponse(response)
254 assertr = AssertResponse(response)
255 expected_msg = PullRequestModel.MERGE_STATUS_MESSAGES[
255 expected_msg = PullRequestModel.MERGE_STATUS_MESSAGES[
256 MergeFailureReason.MISSING_TARGET_REF]
256 MergeFailureReason.MISSING_TARGET_REF]
257 assertr.element_contains(
257 assertr.element_contains(
258 'span[data-role="merge-message"]', str(expected_msg))
258 'span[data-role="merge-message"]', str(expected_msg))
259
259
260 def test_comment_and_close_pull_request_custom_message_approved(
260 def test_comment_and_close_pull_request_custom_message_approved(
261 self, pr_util, csrf_token, xhr_header):
261 self, pr_util, csrf_token, xhr_header):
262
262
263 pull_request = pr_util.create_pull_request(approved=True)
263 pull_request = pr_util.create_pull_request(approved=True)
264 pull_request_id = pull_request.pull_request_id
264 pull_request_id = pull_request.pull_request_id
265 author = pull_request.user_id
265 author = pull_request.user_id
266 repo = pull_request.target_repo.repo_id
266 repo = pull_request.target_repo.repo_id
267
267
268 self.app.post(
268 self.app.post(
269 route_path('pullrequest_comment_create',
269 route_path('pullrequest_comment_create',
270 repo_name=pull_request.target_repo.scm_instance().name,
270 repo_name=pull_request.target_repo.scm_instance().name,
271 pull_request_id=pull_request_id),
271 pull_request_id=pull_request_id),
272 params={
272 params={
273 'close_pull_request': '1',
273 'close_pull_request': '1',
274 'text': 'Closing a PR',
274 'text': 'Closing a PR',
275 'csrf_token': csrf_token},
275 'csrf_token': csrf_token},
276 extra_environ=xhr_header,)
276 extra_environ=xhr_header,)
277
277
278 journal = UserLog.query()\
278 journal = UserLog.query()\
279 .filter(UserLog.user_id == author)\
279 .filter(UserLog.user_id == author)\
280 .filter(UserLog.repository_id == repo) \
280 .filter(UserLog.repository_id == repo) \
281 .order_by('user_log_id') \
281 .order_by('user_log_id') \
282 .all()
282 .all()
283 assert journal[-1].action == 'repo.pull_request.close'
283 assert journal[-1].action == 'repo.pull_request.close'
284
284
285 pull_request = PullRequest.get(pull_request_id)
285 pull_request = PullRequest.get(pull_request_id)
286 assert pull_request.is_closed()
286 assert pull_request.is_closed()
287
287
288 status = ChangesetStatusModel().get_status(
288 status = ChangesetStatusModel().get_status(
289 pull_request.source_repo, pull_request=pull_request)
289 pull_request.source_repo, pull_request=pull_request)
290 assert status == ChangesetStatus.STATUS_APPROVED
290 assert status == ChangesetStatus.STATUS_APPROVED
291 comments = ChangesetComment().query() \
291 comments = ChangesetComment().query() \
292 .filter(ChangesetComment.pull_request == pull_request) \
292 .filter(ChangesetComment.pull_request == pull_request) \
293 .order_by(ChangesetComment.comment_id.asc())\
293 .order_by(ChangesetComment.comment_id.asc())\
294 .all()
294 .all()
295 assert comments[-1].text == 'Closing a PR'
295 assert comments[-1].text == 'Closing a PR'
296
296
297 def test_comment_force_close_pull_request_rejected(
297 def test_comment_force_close_pull_request_rejected(
298 self, pr_util, csrf_token, xhr_header):
298 self, pr_util, csrf_token, xhr_header):
299 pull_request = pr_util.create_pull_request()
299 pull_request = pr_util.create_pull_request()
300 pull_request_id = pull_request.pull_request_id
300 pull_request_id = pull_request.pull_request_id
301 PullRequestModel().update_reviewers(
301 PullRequestModel().update_reviewers(
302 pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])],
302 pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])],
303 pull_request.author)
303 pull_request.author)
304 author = pull_request.user_id
304 author = pull_request.user_id
305 repo = pull_request.target_repo.repo_id
305 repo = pull_request.target_repo.repo_id
306
306
307 self.app.post(
307 self.app.post(
308 route_path('pullrequest_comment_create',
308 route_path('pullrequest_comment_create',
309 repo_name=pull_request.target_repo.scm_instance().name,
309 repo_name=pull_request.target_repo.scm_instance().name,
310 pull_request_id=pull_request_id),
310 pull_request_id=pull_request_id),
311 params={
311 params={
312 'close_pull_request': '1',
312 'close_pull_request': '1',
313 'csrf_token': csrf_token},
313 'csrf_token': csrf_token},
314 extra_environ=xhr_header)
314 extra_environ=xhr_header)
315
315
316 pull_request = PullRequest.get(pull_request_id)
316 pull_request = PullRequest.get(pull_request_id)
317
317
318 journal = UserLog.query()\
318 journal = UserLog.query()\
319 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
319 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
320 .order_by('user_log_id') \
320 .order_by('user_log_id') \
321 .all()
321 .all()
322 assert journal[-1].action == 'repo.pull_request.close'
322 assert journal[-1].action == 'repo.pull_request.close'
323
323
324 # check only the latest status, not the review status
324 # check only the latest status, not the review status
325 status = ChangesetStatusModel().get_status(
325 status = ChangesetStatusModel().get_status(
326 pull_request.source_repo, pull_request=pull_request)
326 pull_request.source_repo, pull_request=pull_request)
327 assert status == ChangesetStatus.STATUS_REJECTED
327 assert status == ChangesetStatus.STATUS_REJECTED
328
328
329 def test_comment_and_close_pull_request(
329 def test_comment_and_close_pull_request(
330 self, pr_util, csrf_token, xhr_header):
330 self, pr_util, csrf_token, xhr_header):
331 pull_request = pr_util.create_pull_request()
331 pull_request = pr_util.create_pull_request()
332 pull_request_id = pull_request.pull_request_id
332 pull_request_id = pull_request.pull_request_id
333
333
334 response = self.app.post(
334 response = self.app.post(
335 route_path('pullrequest_comment_create',
335 route_path('pullrequest_comment_create',
336 repo_name=pull_request.target_repo.scm_instance().name,
336 repo_name=pull_request.target_repo.scm_instance().name,
337 pull_request_id=pull_request.pull_request_id),
337 pull_request_id=pull_request.pull_request_id),
338 params={
338 params={
339 'close_pull_request': 'true',
339 'close_pull_request': 'true',
340 'csrf_token': csrf_token},
340 'csrf_token': csrf_token},
341 extra_environ=xhr_header)
341 extra_environ=xhr_header)
342
342
343 assert response.json
343 assert response.json
344
344
345 pull_request = PullRequest.get(pull_request_id)
345 pull_request = PullRequest.get(pull_request_id)
346 assert pull_request.is_closed()
346 assert pull_request.is_closed()
347
347
348 # check only the latest status, not the review status
348 # check only the latest status, not the review status
349 status = ChangesetStatusModel().get_status(
349 status = ChangesetStatusModel().get_status(
350 pull_request.source_repo, pull_request=pull_request)
350 pull_request.source_repo, pull_request=pull_request)
351 assert status == ChangesetStatus.STATUS_REJECTED
351 assert status == ChangesetStatus.STATUS_REJECTED
352
352
353 def test_create_pull_request(self, backend, csrf_token):
353 def test_create_pull_request(self, backend, csrf_token):
354 commits = [
354 commits = [
355 {'message': 'ancestor'},
355 {'message': 'ancestor'},
356 {'message': 'change'},
356 {'message': 'change'},
357 {'message': 'change2'},
357 {'message': 'change2'},
358 ]
358 ]
359 commit_ids = backend.create_master_repo(commits)
359 commit_ids = backend.create_master_repo(commits)
360 target = backend.create_repo(heads=['ancestor'])
360 target = backend.create_repo(heads=['ancestor'])
361 source = backend.create_repo(heads=['change2'])
361 source = backend.create_repo(heads=['change2'])
362
362
363 response = self.app.post(
363 response = self.app.post(
364 route_path('pullrequest_create', repo_name=source.repo_name),
364 route_path('pullrequest_create', repo_name=source.repo_name),
365 [
365 [
366 ('source_repo', source.repo_name),
366 ('source_repo', source.repo_name),
367 ('source_ref', 'branch:default:' + commit_ids['change2']),
367 ('source_ref', 'branch:default:' + commit_ids['change2']),
368 ('target_repo', target.repo_name),
368 ('target_repo', target.repo_name),
369 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
369 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
370 ('common_ancestor', commit_ids['ancestor']),
370 ('common_ancestor', commit_ids['ancestor']),
371 ('pullrequest_title', 'Title'),
371 ('pullrequest_desc', 'Description'),
372 ('pullrequest_desc', 'Description'),
372 ('pullrequest_title', 'Title'),
373 ('description_renderer', 'markdown'),
373 ('__start__', 'review_members:sequence'),
374 ('__start__', 'review_members:sequence'),
374 ('__start__', 'reviewer:mapping'),
375 ('__start__', 'reviewer:mapping'),
375 ('user_id', '1'),
376 ('user_id', '1'),
376 ('__start__', 'reasons:sequence'),
377 ('__start__', 'reasons:sequence'),
377 ('reason', 'Some reason'),
378 ('reason', 'Some reason'),
378 ('__end__', 'reasons:sequence'),
379 ('__end__', 'reasons:sequence'),
379 ('__start__', 'rules:sequence'),
380 ('__start__', 'rules:sequence'),
380 ('__end__', 'rules:sequence'),
381 ('__end__', 'rules:sequence'),
381 ('mandatory', 'False'),
382 ('mandatory', 'False'),
382 ('__end__', 'reviewer:mapping'),
383 ('__end__', 'reviewer:mapping'),
383 ('__end__', 'review_members:sequence'),
384 ('__end__', 'review_members:sequence'),
384 ('__start__', 'revisions:sequence'),
385 ('__start__', 'revisions:sequence'),
385 ('revisions', commit_ids['change']),
386 ('revisions', commit_ids['change']),
386 ('revisions', commit_ids['change2']),
387 ('revisions', commit_ids['change2']),
387 ('__end__', 'revisions:sequence'),
388 ('__end__', 'revisions:sequence'),
388 ('user', ''),
389 ('user', ''),
389 ('csrf_token', csrf_token),
390 ('csrf_token', csrf_token),
390 ],
391 ],
391 status=302)
392 status=302)
392
393
393 location = response.headers['Location']
394 location = response.headers['Location']
394 pull_request_id = location.rsplit('/', 1)[1]
395 pull_request_id = location.rsplit('/', 1)[1]
395 assert pull_request_id != 'new'
396 assert pull_request_id != 'new'
396 pull_request = PullRequest.get(int(pull_request_id))
397 pull_request = PullRequest.get(int(pull_request_id))
397
398
398 # check that we have now both revisions
399 # check that we have now both revisions
399 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
400 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
400 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
401 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
401 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
402 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
402 assert pull_request.target_ref == expected_target_ref
403 assert pull_request.target_ref == expected_target_ref
403
404
404 def test_reviewer_notifications(self, backend, csrf_token):
405 def test_reviewer_notifications(self, backend, csrf_token):
405 # We have to use the app.post for this test so it will create the
406 # We have to use the app.post for this test so it will create the
406 # notifications properly with the new PR
407 # notifications properly with the new PR
407 commits = [
408 commits = [
408 {'message': 'ancestor',
409 {'message': 'ancestor',
409 'added': [FileNode('file_A', content='content_of_ancestor')]},
410 'added': [FileNode('file_A', content='content_of_ancestor')]},
410 {'message': 'change',
411 {'message': 'change',
411 'added': [FileNode('file_a', content='content_of_change')]},
412 'added': [FileNode('file_a', content='content_of_change')]},
412 {'message': 'change-child'},
413 {'message': 'change-child'},
413 {'message': 'ancestor-child', 'parents': ['ancestor'],
414 {'message': 'ancestor-child', 'parents': ['ancestor'],
414 'added': [
415 'added': [
415 FileNode('file_B', content='content_of_ancestor_child')]},
416 FileNode('file_B', content='content_of_ancestor_child')]},
416 {'message': 'ancestor-child-2'},
417 {'message': 'ancestor-child-2'},
417 ]
418 ]
418 commit_ids = backend.create_master_repo(commits)
419 commit_ids = backend.create_master_repo(commits)
419 target = backend.create_repo(heads=['ancestor-child'])
420 target = backend.create_repo(heads=['ancestor-child'])
420 source = backend.create_repo(heads=['change'])
421 source = backend.create_repo(heads=['change'])
421
422
422 response = self.app.post(
423 response = self.app.post(
423 route_path('pullrequest_create', repo_name=source.repo_name),
424 route_path('pullrequest_create', repo_name=source.repo_name),
424 [
425 [
425 ('source_repo', source.repo_name),
426 ('source_repo', source.repo_name),
426 ('source_ref', 'branch:default:' + commit_ids['change']),
427 ('source_ref', 'branch:default:' + commit_ids['change']),
427 ('target_repo', target.repo_name),
428 ('target_repo', target.repo_name),
428 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
429 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
429 ('common_ancestor', commit_ids['ancestor']),
430 ('common_ancestor', commit_ids['ancestor']),
431 ('pullrequest_title', 'Title'),
430 ('pullrequest_desc', 'Description'),
432 ('pullrequest_desc', 'Description'),
431 ('pullrequest_title', 'Title'),
433 ('description_renderer', 'markdown'),
432 ('__start__', 'review_members:sequence'),
434 ('__start__', 'review_members:sequence'),
433 ('__start__', 'reviewer:mapping'),
435 ('__start__', 'reviewer:mapping'),
434 ('user_id', '2'),
436 ('user_id', '2'),
435 ('__start__', 'reasons:sequence'),
437 ('__start__', 'reasons:sequence'),
436 ('reason', 'Some reason'),
438 ('reason', 'Some reason'),
437 ('__end__', 'reasons:sequence'),
439 ('__end__', 'reasons:sequence'),
438 ('__start__', 'rules:sequence'),
440 ('__start__', 'rules:sequence'),
439 ('__end__', 'rules:sequence'),
441 ('__end__', 'rules:sequence'),
440 ('mandatory', 'False'),
442 ('mandatory', 'False'),
441 ('__end__', 'reviewer:mapping'),
443 ('__end__', 'reviewer:mapping'),
442 ('__end__', 'review_members:sequence'),
444 ('__end__', 'review_members:sequence'),
443 ('__start__', 'revisions:sequence'),
445 ('__start__', 'revisions:sequence'),
444 ('revisions', commit_ids['change']),
446 ('revisions', commit_ids['change']),
445 ('__end__', 'revisions:sequence'),
447 ('__end__', 'revisions:sequence'),
446 ('user', ''),
448 ('user', ''),
447 ('csrf_token', csrf_token),
449 ('csrf_token', csrf_token),
448 ],
450 ],
449 status=302)
451 status=302)
450
452
451 location = response.headers['Location']
453 location = response.headers['Location']
452
454
453 pull_request_id = location.rsplit('/', 1)[1]
455 pull_request_id = location.rsplit('/', 1)[1]
454 assert pull_request_id != 'new'
456 assert pull_request_id != 'new'
455 pull_request = PullRequest.get(int(pull_request_id))
457 pull_request = PullRequest.get(int(pull_request_id))
456
458
457 # Check that a notification was made
459 # Check that a notification was made
458 notifications = Notification.query()\
460 notifications = Notification.query()\
459 .filter(Notification.created_by == pull_request.author.user_id,
461 .filter(Notification.created_by == pull_request.author.user_id,
460 Notification.type_ == Notification.TYPE_PULL_REQUEST,
462 Notification.type_ == Notification.TYPE_PULL_REQUEST,
461 Notification.subject.contains(
463 Notification.subject.contains(
462 "wants you to review pull request #%s" % pull_request_id))
464 "wants you to review pull request #%s" % pull_request_id))
463 assert len(notifications.all()) == 1
465 assert len(notifications.all()) == 1
464
466
465 # Change reviewers and check that a notification was made
467 # Change reviewers and check that a notification was made
466 PullRequestModel().update_reviewers(
468 PullRequestModel().update_reviewers(
467 pull_request.pull_request_id, [(1, [], False, [])],
469 pull_request.pull_request_id, [(1, [], False, [])],
468 pull_request.author)
470 pull_request.author)
469 assert len(notifications.all()) == 2
471 assert len(notifications.all()) == 2
470
472
471 def test_create_pull_request_stores_ancestor_commit_id(self, backend,
473 def test_create_pull_request_stores_ancestor_commit_id(self, backend,
472 csrf_token):
474 csrf_token):
473 commits = [
475 commits = [
474 {'message': 'ancestor',
476 {'message': 'ancestor',
475 'added': [FileNode('file_A', content='content_of_ancestor')]},
477 'added': [FileNode('file_A', content='content_of_ancestor')]},
476 {'message': 'change',
478 {'message': 'change',
477 'added': [FileNode('file_a', content='content_of_change')]},
479 'added': [FileNode('file_a', content='content_of_change')]},
478 {'message': 'change-child'},
480 {'message': 'change-child'},
479 {'message': 'ancestor-child', 'parents': ['ancestor'],
481 {'message': 'ancestor-child', 'parents': ['ancestor'],
480 'added': [
482 'added': [
481 FileNode('file_B', content='content_of_ancestor_child')]},
483 FileNode('file_B', content='content_of_ancestor_child')]},
482 {'message': 'ancestor-child-2'},
484 {'message': 'ancestor-child-2'},
483 ]
485 ]
484 commit_ids = backend.create_master_repo(commits)
486 commit_ids = backend.create_master_repo(commits)
485 target = backend.create_repo(heads=['ancestor-child'])
487 target = backend.create_repo(heads=['ancestor-child'])
486 source = backend.create_repo(heads=['change'])
488 source = backend.create_repo(heads=['change'])
487
489
488 response = self.app.post(
490 response = self.app.post(
489 route_path('pullrequest_create', repo_name=source.repo_name),
491 route_path('pullrequest_create', repo_name=source.repo_name),
490 [
492 [
491 ('source_repo', source.repo_name),
493 ('source_repo', source.repo_name),
492 ('source_ref', 'branch:default:' + commit_ids['change']),
494 ('source_ref', 'branch:default:' + commit_ids['change']),
493 ('target_repo', target.repo_name),
495 ('target_repo', target.repo_name),
494 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
496 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
495 ('common_ancestor', commit_ids['ancestor']),
497 ('common_ancestor', commit_ids['ancestor']),
498 ('pullrequest_title', 'Title'),
496 ('pullrequest_desc', 'Description'),
499 ('pullrequest_desc', 'Description'),
497 ('pullrequest_title', 'Title'),
500 ('description_renderer', 'markdown'),
498 ('__start__', 'review_members:sequence'),
501 ('__start__', 'review_members:sequence'),
499 ('__start__', 'reviewer:mapping'),
502 ('__start__', 'reviewer:mapping'),
500 ('user_id', '1'),
503 ('user_id', '1'),
501 ('__start__', 'reasons:sequence'),
504 ('__start__', 'reasons:sequence'),
502 ('reason', 'Some reason'),
505 ('reason', 'Some reason'),
503 ('__end__', 'reasons:sequence'),
506 ('__end__', 'reasons:sequence'),
504 ('__start__', 'rules:sequence'),
507 ('__start__', 'rules:sequence'),
505 ('__end__', 'rules:sequence'),
508 ('__end__', 'rules:sequence'),
506 ('mandatory', 'False'),
509 ('mandatory', 'False'),
507 ('__end__', 'reviewer:mapping'),
510 ('__end__', 'reviewer:mapping'),
508 ('__end__', 'review_members:sequence'),
511 ('__end__', 'review_members:sequence'),
509 ('__start__', 'revisions:sequence'),
512 ('__start__', 'revisions:sequence'),
510 ('revisions', commit_ids['change']),
513 ('revisions', commit_ids['change']),
511 ('__end__', 'revisions:sequence'),
514 ('__end__', 'revisions:sequence'),
512 ('user', ''),
515 ('user', ''),
513 ('csrf_token', csrf_token),
516 ('csrf_token', csrf_token),
514 ],
517 ],
515 status=302)
518 status=302)
516
519
517 location = response.headers['Location']
520 location = response.headers['Location']
518
521
519 pull_request_id = location.rsplit('/', 1)[1]
522 pull_request_id = location.rsplit('/', 1)[1]
520 assert pull_request_id != 'new'
523 assert pull_request_id != 'new'
521 pull_request = PullRequest.get(int(pull_request_id))
524 pull_request = PullRequest.get(int(pull_request_id))
522
525
523 # target_ref has to point to the ancestor's commit_id in order to
526 # target_ref has to point to the ancestor's commit_id in order to
524 # show the correct diff
527 # show the correct diff
525 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
528 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
526 assert pull_request.target_ref == expected_target_ref
529 assert pull_request.target_ref == expected_target_ref
527
530
528 # Check generated diff contents
531 # Check generated diff contents
529 response = response.follow()
532 response = response.follow()
530 assert 'content_of_ancestor' not in response.body
533 assert 'content_of_ancestor' not in response.body
531 assert 'content_of_ancestor-child' not in response.body
534 assert 'content_of_ancestor-child' not in response.body
532 assert 'content_of_change' in response.body
535 assert 'content_of_change' in response.body
533
536
534 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
537 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
535 # Clear any previous calls to rcextensions
538 # Clear any previous calls to rcextensions
536 rhodecode.EXTENSIONS.calls.clear()
539 rhodecode.EXTENSIONS.calls.clear()
537
540
538 pull_request = pr_util.create_pull_request(
541 pull_request = pr_util.create_pull_request(
539 approved=True, mergeable=True)
542 approved=True, mergeable=True)
540 pull_request_id = pull_request.pull_request_id
543 pull_request_id = pull_request.pull_request_id
541 repo_name = pull_request.target_repo.scm_instance().name,
544 repo_name = pull_request.target_repo.scm_instance().name,
542
545
543 response = self.app.post(
546 response = self.app.post(
544 route_path('pullrequest_merge',
547 route_path('pullrequest_merge',
545 repo_name=str(repo_name[0]),
548 repo_name=str(repo_name[0]),
546 pull_request_id=pull_request_id),
549 pull_request_id=pull_request_id),
547 params={'csrf_token': csrf_token}).follow()
550 params={'csrf_token': csrf_token}).follow()
548
551
549 pull_request = PullRequest.get(pull_request_id)
552 pull_request = PullRequest.get(pull_request_id)
550
553
551 assert response.status_int == 200
554 assert response.status_int == 200
552 assert pull_request.is_closed()
555 assert pull_request.is_closed()
553 assert_pull_request_status(
556 assert_pull_request_status(
554 pull_request, ChangesetStatus.STATUS_APPROVED)
557 pull_request, ChangesetStatus.STATUS_APPROVED)
555
558
556 # Check the relevant log entries were added
559 # Check the relevant log entries were added
557 user_logs = UserLog.query().order_by('-user_log_id').limit(3)
560 user_logs = UserLog.query().order_by('-user_log_id').limit(3)
558 actions = [log.action for log in user_logs]
561 actions = [log.action for log in user_logs]
559 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
562 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
560 expected_actions = [
563 expected_actions = [
561 u'repo.pull_request.close',
564 u'repo.pull_request.close',
562 u'repo.pull_request.merge',
565 u'repo.pull_request.merge',
563 u'repo.pull_request.comment.create'
566 u'repo.pull_request.comment.create'
564 ]
567 ]
565 assert actions == expected_actions
568 assert actions == expected_actions
566
569
567 user_logs = UserLog.query().order_by('-user_log_id').limit(4)
570 user_logs = UserLog.query().order_by('-user_log_id').limit(4)
568 actions = [log for log in user_logs]
571 actions = [log for log in user_logs]
569 assert actions[-1].action == 'user.push'
572 assert actions[-1].action == 'user.push'
570 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
573 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
571
574
572 # Check post_push rcextension was really executed
575 # Check post_push rcextension was really executed
573 push_calls = rhodecode.EXTENSIONS.calls['post_push']
576 push_calls = rhodecode.EXTENSIONS.calls['post_push']
574 assert len(push_calls) == 1
577 assert len(push_calls) == 1
575 unused_last_call_args, last_call_kwargs = push_calls[0]
578 unused_last_call_args, last_call_kwargs = push_calls[0]
576 assert last_call_kwargs['action'] == 'push'
579 assert last_call_kwargs['action'] == 'push'
577 assert last_call_kwargs['pushed_revs'] == pr_commit_ids
580 assert last_call_kwargs['pushed_revs'] == pr_commit_ids
578
581
579 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
582 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
580 pull_request = pr_util.create_pull_request(mergeable=False)
583 pull_request = pr_util.create_pull_request(mergeable=False)
581 pull_request_id = pull_request.pull_request_id
584 pull_request_id = pull_request.pull_request_id
582 pull_request = PullRequest.get(pull_request_id)
585 pull_request = PullRequest.get(pull_request_id)
583
586
584 response = self.app.post(
587 response = self.app.post(
585 route_path('pullrequest_merge',
588 route_path('pullrequest_merge',
586 repo_name=pull_request.target_repo.scm_instance().name,
589 repo_name=pull_request.target_repo.scm_instance().name,
587 pull_request_id=pull_request.pull_request_id),
590 pull_request_id=pull_request.pull_request_id),
588 params={'csrf_token': csrf_token}).follow()
591 params={'csrf_token': csrf_token}).follow()
589
592
590 assert response.status_int == 200
593 assert response.status_int == 200
591 response.mustcontain(
594 response.mustcontain(
592 'Merge is not currently possible because of below failed checks.')
595 'Merge is not currently possible because of below failed checks.')
593 response.mustcontain('Server-side pull request merging is disabled.')
596 response.mustcontain('Server-side pull request merging is disabled.')
594
597
595 @pytest.mark.skip_backends('svn')
598 @pytest.mark.skip_backends('svn')
596 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
599 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
597 pull_request = pr_util.create_pull_request(mergeable=True)
600 pull_request = pr_util.create_pull_request(mergeable=True)
598 pull_request_id = pull_request.pull_request_id
601 pull_request_id = pull_request.pull_request_id
599 repo_name = pull_request.target_repo.scm_instance().name
602 repo_name = pull_request.target_repo.scm_instance().name
600
603
601 response = self.app.post(
604 response = self.app.post(
602 route_path('pullrequest_merge',
605 route_path('pullrequest_merge',
603 repo_name=repo_name,
606 repo_name=repo_name,
604 pull_request_id=pull_request_id),
607 pull_request_id=pull_request_id),
605 params={'csrf_token': csrf_token}).follow()
608 params={'csrf_token': csrf_token}).follow()
606
609
607 assert response.status_int == 200
610 assert response.status_int == 200
608
611
609 response.mustcontain(
612 response.mustcontain(
610 'Merge is not currently possible because of below failed checks.')
613 'Merge is not currently possible because of below failed checks.')
611 response.mustcontain('Pull request reviewer approval is pending.')
614 response.mustcontain('Pull request reviewer approval is pending.')
612
615
613 def test_merge_pull_request_renders_failure_reason(
616 def test_merge_pull_request_renders_failure_reason(
614 self, user_regular, csrf_token, pr_util):
617 self, user_regular, csrf_token, pr_util):
615 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
618 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
616 pull_request_id = pull_request.pull_request_id
619 pull_request_id = pull_request.pull_request_id
617 repo_name = pull_request.target_repo.scm_instance().name
620 repo_name = pull_request.target_repo.scm_instance().name
618
621
619 model_patcher = mock.patch.multiple(
622 model_patcher = mock.patch.multiple(
620 PullRequestModel,
623 PullRequestModel,
621 merge_repo=mock.Mock(return_value=MergeResponse(
624 merge_repo=mock.Mock(return_value=MergeResponse(
622 True, False, 'STUB_COMMIT_ID', MergeFailureReason.PUSH_FAILED)),
625 True, False, 'STUB_COMMIT_ID', MergeFailureReason.PUSH_FAILED)),
623 merge_status=mock.Mock(return_value=(True, 'WRONG_MESSAGE')))
626 merge_status=mock.Mock(return_value=(True, 'WRONG_MESSAGE')))
624
627
625 with model_patcher:
628 with model_patcher:
626 response = self.app.post(
629 response = self.app.post(
627 route_path('pullrequest_merge',
630 route_path('pullrequest_merge',
628 repo_name=repo_name,
631 repo_name=repo_name,
629 pull_request_id=pull_request_id),
632 pull_request_id=pull_request_id),
630 params={'csrf_token': csrf_token}, status=302)
633 params={'csrf_token': csrf_token}, status=302)
631
634
632 assert_session_flash(response, PullRequestModel.MERGE_STATUS_MESSAGES[
635 assert_session_flash(response, PullRequestModel.MERGE_STATUS_MESSAGES[
633 MergeFailureReason.PUSH_FAILED])
636 MergeFailureReason.PUSH_FAILED])
634
637
635 def test_update_source_revision(self, backend, csrf_token):
638 def test_update_source_revision(self, backend, csrf_token):
636 commits = [
639 commits = [
637 {'message': 'ancestor'},
640 {'message': 'ancestor'},
638 {'message': 'change'},
641 {'message': 'change'},
639 {'message': 'change-2'},
642 {'message': 'change-2'},
640 ]
643 ]
641 commit_ids = backend.create_master_repo(commits)
644 commit_ids = backend.create_master_repo(commits)
642 target = backend.create_repo(heads=['ancestor'])
645 target = backend.create_repo(heads=['ancestor'])
643 source = backend.create_repo(heads=['change'])
646 source = backend.create_repo(heads=['change'])
644
647
645 # create pr from a in source to A in target
648 # create pr from a in source to A in target
646 pull_request = PullRequest()
649 pull_request = PullRequest()
647 pull_request.source_repo = source
650 pull_request.source_repo = source
648 # TODO: johbo: Make sure that we write the source ref this way!
651 # TODO: johbo: Make sure that we write the source ref this way!
649 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
652 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
650 branch=backend.default_branch_name, commit_id=commit_ids['change'])
653 branch=backend.default_branch_name, commit_id=commit_ids['change'])
651 pull_request.target_repo = target
654 pull_request.target_repo = target
652
655
653 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
656 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
654 branch=backend.default_branch_name,
657 branch=backend.default_branch_name,
655 commit_id=commit_ids['ancestor'])
658 commit_id=commit_ids['ancestor'])
656 pull_request.revisions = [commit_ids['change']]
659 pull_request.revisions = [commit_ids['change']]
657 pull_request.title = u"Test"
660 pull_request.title = u"Test"
658 pull_request.description = u"Description"
661 pull_request.description = u"Description"
659 pull_request.author = UserModel().get_by_username(
662 pull_request.author = UserModel().get_by_username(
660 TEST_USER_ADMIN_LOGIN)
663 TEST_USER_ADMIN_LOGIN)
661 Session().add(pull_request)
664 Session().add(pull_request)
662 Session().commit()
665 Session().commit()
663 pull_request_id = pull_request.pull_request_id
666 pull_request_id = pull_request.pull_request_id
664
667
665 # source has ancestor - change - change-2
668 # source has ancestor - change - change-2
666 backend.pull_heads(source, heads=['change-2'])
669 backend.pull_heads(source, heads=['change-2'])
667
670
668 # update PR
671 # update PR
669 self.app.post(
672 self.app.post(
670 route_path('pullrequest_update',
673 route_path('pullrequest_update',
671 repo_name=target.repo_name,
674 repo_name=target.repo_name,
672 pull_request_id=pull_request_id),
675 pull_request_id=pull_request_id),
673 params={'update_commits': 'true',
676 params={'update_commits': 'true',
674 'csrf_token': csrf_token})
677 'csrf_token': csrf_token})
675
678
676 # check that we have now both revisions
679 # check that we have now both revisions
677 pull_request = PullRequest.get(pull_request_id)
680 pull_request = PullRequest.get(pull_request_id)
678 assert pull_request.revisions == [
681 assert pull_request.revisions == [
679 commit_ids['change-2'], commit_ids['change']]
682 commit_ids['change-2'], commit_ids['change']]
680
683
681 # TODO: johbo: this should be a test on its own
684 # TODO: johbo: this should be a test on its own
682 response = self.app.get(route_path(
685 response = self.app.get(route_path(
683 'pullrequest_new',
686 'pullrequest_new',
684 repo_name=target.repo_name))
687 repo_name=target.repo_name))
685 assert response.status_int == 200
688 assert response.status_int == 200
686 assert 'Pull request updated to' in response.body
689 assert 'Pull request updated to' in response.body
687 assert 'with 1 added, 0 removed commits.' in response.body
690 assert 'with 1 added, 0 removed commits.' in response.body
688
691
689 def test_update_target_revision(self, backend, csrf_token):
692 def test_update_target_revision(self, backend, csrf_token):
690 commits = [
693 commits = [
691 {'message': 'ancestor'},
694 {'message': 'ancestor'},
692 {'message': 'change'},
695 {'message': 'change'},
693 {'message': 'ancestor-new', 'parents': ['ancestor']},
696 {'message': 'ancestor-new', 'parents': ['ancestor']},
694 {'message': 'change-rebased'},
697 {'message': 'change-rebased'},
695 ]
698 ]
696 commit_ids = backend.create_master_repo(commits)
699 commit_ids = backend.create_master_repo(commits)
697 target = backend.create_repo(heads=['ancestor'])
700 target = backend.create_repo(heads=['ancestor'])
698 source = backend.create_repo(heads=['change'])
701 source = backend.create_repo(heads=['change'])
699
702
700 # create pr from a in source to A in target
703 # create pr from a in source to A in target
701 pull_request = PullRequest()
704 pull_request = PullRequest()
702 pull_request.source_repo = source
705 pull_request.source_repo = source
703 # TODO: johbo: Make sure that we write the source ref this way!
706 # TODO: johbo: Make sure that we write the source ref this way!
704 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
707 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
705 branch=backend.default_branch_name, commit_id=commit_ids['change'])
708 branch=backend.default_branch_name, commit_id=commit_ids['change'])
706 pull_request.target_repo = target
709 pull_request.target_repo = target
707 # TODO: johbo: Target ref should be branch based, since tip can jump
710 # TODO: johbo: Target ref should be branch based, since tip can jump
708 # from branch to branch
711 # from branch to branch
709 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
712 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
710 branch=backend.default_branch_name,
713 branch=backend.default_branch_name,
711 commit_id=commit_ids['ancestor'])
714 commit_id=commit_ids['ancestor'])
712 pull_request.revisions = [commit_ids['change']]
715 pull_request.revisions = [commit_ids['change']]
713 pull_request.title = u"Test"
716 pull_request.title = u"Test"
714 pull_request.description = u"Description"
717 pull_request.description = u"Description"
715 pull_request.author = UserModel().get_by_username(
718 pull_request.author = UserModel().get_by_username(
716 TEST_USER_ADMIN_LOGIN)
719 TEST_USER_ADMIN_LOGIN)
717 Session().add(pull_request)
720 Session().add(pull_request)
718 Session().commit()
721 Session().commit()
719 pull_request_id = pull_request.pull_request_id
722 pull_request_id = pull_request.pull_request_id
720
723
721 # target has ancestor - ancestor-new
724 # target has ancestor - ancestor-new
722 # source has ancestor - ancestor-new - change-rebased
725 # source has ancestor - ancestor-new - change-rebased
723 backend.pull_heads(target, heads=['ancestor-new'])
726 backend.pull_heads(target, heads=['ancestor-new'])
724 backend.pull_heads(source, heads=['change-rebased'])
727 backend.pull_heads(source, heads=['change-rebased'])
725
728
726 # update PR
729 # update PR
727 self.app.post(
730 self.app.post(
728 route_path('pullrequest_update',
731 route_path('pullrequest_update',
729 repo_name=target.repo_name,
732 repo_name=target.repo_name,
730 pull_request_id=pull_request_id),
733 pull_request_id=pull_request_id),
731 params={'update_commits': 'true',
734 params={'update_commits': 'true',
732 'csrf_token': csrf_token},
735 'csrf_token': csrf_token},
733 status=200)
736 status=200)
734
737
735 # check that we have now both revisions
738 # check that we have now both revisions
736 pull_request = PullRequest.get(pull_request_id)
739 pull_request = PullRequest.get(pull_request_id)
737 assert pull_request.revisions == [commit_ids['change-rebased']]
740 assert pull_request.revisions == [commit_ids['change-rebased']]
738 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
741 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
739 branch=backend.default_branch_name,
742 branch=backend.default_branch_name,
740 commit_id=commit_ids['ancestor-new'])
743 commit_id=commit_ids['ancestor-new'])
741
744
742 # TODO: johbo: This should be a test on its own
745 # TODO: johbo: This should be a test on its own
743 response = self.app.get(route_path(
746 response = self.app.get(route_path(
744 'pullrequest_new',
747 'pullrequest_new',
745 repo_name=target.repo_name))
748 repo_name=target.repo_name))
746 assert response.status_int == 200
749 assert response.status_int == 200
747 assert 'Pull request updated to' in response.body
750 assert 'Pull request updated to' in response.body
748 assert 'with 1 added, 1 removed commits.' in response.body
751 assert 'with 1 added, 1 removed commits.' in response.body
749
752
750 def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token):
753 def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token):
751 backend = backend_git
754 backend = backend_git
752 commits = [
755 commits = [
753 {'message': 'master-commit-1'},
756 {'message': 'master-commit-1'},
754 {'message': 'master-commit-2-change-1'},
757 {'message': 'master-commit-2-change-1'},
755 {'message': 'master-commit-3-change-2'},
758 {'message': 'master-commit-3-change-2'},
756
759
757 {'message': 'feat-commit-1', 'parents': ['master-commit-1']},
760 {'message': 'feat-commit-1', 'parents': ['master-commit-1']},
758 {'message': 'feat-commit-2'},
761 {'message': 'feat-commit-2'},
759 ]
762 ]
760 commit_ids = backend.create_master_repo(commits)
763 commit_ids = backend.create_master_repo(commits)
761 target = backend.create_repo(heads=['master-commit-3-change-2'])
764 target = backend.create_repo(heads=['master-commit-3-change-2'])
762 source = backend.create_repo(heads=['feat-commit-2'])
765 source = backend.create_repo(heads=['feat-commit-2'])
763
766
764 # create pr from a in source to A in target
767 # create pr from a in source to A in target
765 pull_request = PullRequest()
768 pull_request = PullRequest()
766 pull_request.source_repo = source
769 pull_request.source_repo = source
767 # TODO: johbo: Make sure that we write the source ref this way!
770 # TODO: johbo: Make sure that we write the source ref this way!
768 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
771 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
769 branch=backend.default_branch_name,
772 branch=backend.default_branch_name,
770 commit_id=commit_ids['master-commit-3-change-2'])
773 commit_id=commit_ids['master-commit-3-change-2'])
771
774
772 pull_request.target_repo = target
775 pull_request.target_repo = target
773 # TODO: johbo: Target ref should be branch based, since tip can jump
776 # TODO: johbo: Target ref should be branch based, since tip can jump
774 # from branch to branch
777 # from branch to branch
775 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
778 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
776 branch=backend.default_branch_name,
779 branch=backend.default_branch_name,
777 commit_id=commit_ids['feat-commit-2'])
780 commit_id=commit_ids['feat-commit-2'])
778
781
779 pull_request.revisions = [
782 pull_request.revisions = [
780 commit_ids['feat-commit-1'],
783 commit_ids['feat-commit-1'],
781 commit_ids['feat-commit-2']
784 commit_ids['feat-commit-2']
782 ]
785 ]
783 pull_request.title = u"Test"
786 pull_request.title = u"Test"
784 pull_request.description = u"Description"
787 pull_request.description = u"Description"
785 pull_request.author = UserModel().get_by_username(
788 pull_request.author = UserModel().get_by_username(
786 TEST_USER_ADMIN_LOGIN)
789 TEST_USER_ADMIN_LOGIN)
787 Session().add(pull_request)
790 Session().add(pull_request)
788 Session().commit()
791 Session().commit()
789 pull_request_id = pull_request.pull_request_id
792 pull_request_id = pull_request.pull_request_id
790
793
791 # PR is created, now we simulate a force-push into target,
794 # PR is created, now we simulate a force-push into target,
792 # that drops a 2 last commits
795 # that drops a 2 last commits
793 vcsrepo = target.scm_instance()
796 vcsrepo = target.scm_instance()
794 vcsrepo.config.clear_section('hooks')
797 vcsrepo.config.clear_section('hooks')
795 vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2'])
798 vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2'])
796
799
797 # update PR
800 # update PR
798 self.app.post(
801 self.app.post(
799 route_path('pullrequest_update',
802 route_path('pullrequest_update',
800 repo_name=target.repo_name,
803 repo_name=target.repo_name,
801 pull_request_id=pull_request_id),
804 pull_request_id=pull_request_id),
802 params={'update_commits': 'true',
805 params={'update_commits': 'true',
803 'csrf_token': csrf_token},
806 'csrf_token': csrf_token},
804 status=200)
807 status=200)
805
808
806 response = self.app.get(route_path(
809 response = self.app.get(route_path(
807 'pullrequest_new',
810 'pullrequest_new',
808 repo_name=target.repo_name))
811 repo_name=target.repo_name))
809 assert response.status_int == 200
812 assert response.status_int == 200
810 response.mustcontain('Pull request updated to')
813 response.mustcontain('Pull request updated to')
811 response.mustcontain('with 0 added, 0 removed commits.')
814 response.mustcontain('with 0 added, 0 removed commits.')
812
815
813 def test_update_of_ancestor_reference(self, backend, csrf_token):
816 def test_update_of_ancestor_reference(self, backend, csrf_token):
814 commits = [
817 commits = [
815 {'message': 'ancestor'},
818 {'message': 'ancestor'},
816 {'message': 'change'},
819 {'message': 'change'},
817 {'message': 'change-2'},
820 {'message': 'change-2'},
818 {'message': 'ancestor-new', 'parents': ['ancestor']},
821 {'message': 'ancestor-new', 'parents': ['ancestor']},
819 {'message': 'change-rebased'},
822 {'message': 'change-rebased'},
820 ]
823 ]
821 commit_ids = backend.create_master_repo(commits)
824 commit_ids = backend.create_master_repo(commits)
822 target = backend.create_repo(heads=['ancestor'])
825 target = backend.create_repo(heads=['ancestor'])
823 source = backend.create_repo(heads=['change'])
826 source = backend.create_repo(heads=['change'])
824
827
825 # create pr from a in source to A in target
828 # create pr from a in source to A in target
826 pull_request = PullRequest()
829 pull_request = PullRequest()
827 pull_request.source_repo = source
830 pull_request.source_repo = source
828 # TODO: johbo: Make sure that we write the source ref this way!
831 # TODO: johbo: Make sure that we write the source ref this way!
829 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
832 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
830 branch=backend.default_branch_name,
833 branch=backend.default_branch_name,
831 commit_id=commit_ids['change'])
834 commit_id=commit_ids['change'])
832 pull_request.target_repo = target
835 pull_request.target_repo = target
833 # TODO: johbo: Target ref should be branch based, since tip can jump
836 # TODO: johbo: Target ref should be branch based, since tip can jump
834 # from branch to branch
837 # from branch to branch
835 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
838 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
836 branch=backend.default_branch_name,
839 branch=backend.default_branch_name,
837 commit_id=commit_ids['ancestor'])
840 commit_id=commit_ids['ancestor'])
838 pull_request.revisions = [commit_ids['change']]
841 pull_request.revisions = [commit_ids['change']]
839 pull_request.title = u"Test"
842 pull_request.title = u"Test"
840 pull_request.description = u"Description"
843 pull_request.description = u"Description"
841 pull_request.author = UserModel().get_by_username(
844 pull_request.author = UserModel().get_by_username(
842 TEST_USER_ADMIN_LOGIN)
845 TEST_USER_ADMIN_LOGIN)
843 Session().add(pull_request)
846 Session().add(pull_request)
844 Session().commit()
847 Session().commit()
845 pull_request_id = pull_request.pull_request_id
848 pull_request_id = pull_request.pull_request_id
846
849
847 # target has ancestor - ancestor-new
850 # target has ancestor - ancestor-new
848 # source has ancestor - ancestor-new - change-rebased
851 # source has ancestor - ancestor-new - change-rebased
849 backend.pull_heads(target, heads=['ancestor-new'])
852 backend.pull_heads(target, heads=['ancestor-new'])
850 backend.pull_heads(source, heads=['change-rebased'])
853 backend.pull_heads(source, heads=['change-rebased'])
851
854
852 # update PR
855 # update PR
853 self.app.post(
856 self.app.post(
854 route_path('pullrequest_update',
857 route_path('pullrequest_update',
855 repo_name=target.repo_name,
858 repo_name=target.repo_name,
856 pull_request_id=pull_request_id),
859 pull_request_id=pull_request_id),
857 params={'update_commits': 'true',
860 params={'update_commits': 'true',
858 'csrf_token': csrf_token},
861 'csrf_token': csrf_token},
859 status=200)
862 status=200)
860
863
861 # Expect the target reference to be updated correctly
864 # Expect the target reference to be updated correctly
862 pull_request = PullRequest.get(pull_request_id)
865 pull_request = PullRequest.get(pull_request_id)
863 assert pull_request.revisions == [commit_ids['change-rebased']]
866 assert pull_request.revisions == [commit_ids['change-rebased']]
864 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
867 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
865 branch=backend.default_branch_name,
868 branch=backend.default_branch_name,
866 commit_id=commit_ids['ancestor-new'])
869 commit_id=commit_ids['ancestor-new'])
867 assert pull_request.target_ref == expected_target_ref
870 assert pull_request.target_ref == expected_target_ref
868
871
869 def test_remove_pull_request_branch(self, backend_git, csrf_token):
872 def test_remove_pull_request_branch(self, backend_git, csrf_token):
870 branch_name = 'development'
873 branch_name = 'development'
871 commits = [
874 commits = [
872 {'message': 'initial-commit'},
875 {'message': 'initial-commit'},
873 {'message': 'old-feature'},
876 {'message': 'old-feature'},
874 {'message': 'new-feature', 'branch': branch_name},
877 {'message': 'new-feature', 'branch': branch_name},
875 ]
878 ]
876 repo = backend_git.create_repo(commits)
879 repo = backend_git.create_repo(commits)
877 commit_ids = backend_git.commit_ids
880 commit_ids = backend_git.commit_ids
878
881
879 pull_request = PullRequest()
882 pull_request = PullRequest()
880 pull_request.source_repo = repo
883 pull_request.source_repo = repo
881 pull_request.target_repo = repo
884 pull_request.target_repo = repo
882 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
885 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
883 branch=branch_name, commit_id=commit_ids['new-feature'])
886 branch=branch_name, commit_id=commit_ids['new-feature'])
884 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
887 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
885 branch=backend_git.default_branch_name,
888 branch=backend_git.default_branch_name,
886 commit_id=commit_ids['old-feature'])
889 commit_id=commit_ids['old-feature'])
887 pull_request.revisions = [commit_ids['new-feature']]
890 pull_request.revisions = [commit_ids['new-feature']]
888 pull_request.title = u"Test"
891 pull_request.title = u"Test"
889 pull_request.description = u"Description"
892 pull_request.description = u"Description"
890 pull_request.author = UserModel().get_by_username(
893 pull_request.author = UserModel().get_by_username(
891 TEST_USER_ADMIN_LOGIN)
894 TEST_USER_ADMIN_LOGIN)
892 Session().add(pull_request)
895 Session().add(pull_request)
893 Session().commit()
896 Session().commit()
894
897
895 vcs = repo.scm_instance()
898 vcs = repo.scm_instance()
896 vcs.remove_ref('refs/heads/{}'.format(branch_name))
899 vcs.remove_ref('refs/heads/{}'.format(branch_name))
897
900
898 response = self.app.get(route_path(
901 response = self.app.get(route_path(
899 'pullrequest_show',
902 'pullrequest_show',
900 repo_name=repo.repo_name,
903 repo_name=repo.repo_name,
901 pull_request_id=pull_request.pull_request_id))
904 pull_request_id=pull_request.pull_request_id))
902
905
903 assert response.status_int == 200
906 assert response.status_int == 200
904 assert_response = AssertResponse(response)
907 assert_response = AssertResponse(response)
905 assert_response.element_contains(
908 assert_response.element_contains(
906 '#changeset_compare_view_content .alert strong',
909 '#changeset_compare_view_content .alert strong',
907 'Missing commits')
910 'Missing commits')
908 assert_response.element_contains(
911 assert_response.element_contains(
909 '#changeset_compare_view_content .alert',
912 '#changeset_compare_view_content .alert',
910 'This pull request cannot be displayed, because one or more'
913 'This pull request cannot be displayed, because one or more'
911 ' commits no longer exist in the source repository.')
914 ' commits no longer exist in the source repository.')
912
915
913 def test_strip_commits_from_pull_request(
916 def test_strip_commits_from_pull_request(
914 self, backend, pr_util, csrf_token):
917 self, backend, pr_util, csrf_token):
915 commits = [
918 commits = [
916 {'message': 'initial-commit'},
919 {'message': 'initial-commit'},
917 {'message': 'old-feature'},
920 {'message': 'old-feature'},
918 {'message': 'new-feature', 'parents': ['initial-commit']},
921 {'message': 'new-feature', 'parents': ['initial-commit']},
919 ]
922 ]
920 pull_request = pr_util.create_pull_request(
923 pull_request = pr_util.create_pull_request(
921 commits, target_head='initial-commit', source_head='new-feature',
924 commits, target_head='initial-commit', source_head='new-feature',
922 revisions=['new-feature'])
925 revisions=['new-feature'])
923
926
924 vcs = pr_util.source_repository.scm_instance()
927 vcs = pr_util.source_repository.scm_instance()
925 if backend.alias == 'git':
928 if backend.alias == 'git':
926 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
929 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
927 else:
930 else:
928 vcs.strip(pr_util.commit_ids['new-feature'])
931 vcs.strip(pr_util.commit_ids['new-feature'])
929
932
930 response = self.app.get(route_path(
933 response = self.app.get(route_path(
931 'pullrequest_show',
934 'pullrequest_show',
932 repo_name=pr_util.target_repository.repo_name,
935 repo_name=pr_util.target_repository.repo_name,
933 pull_request_id=pull_request.pull_request_id))
936 pull_request_id=pull_request.pull_request_id))
934
937
935 assert response.status_int == 200
938 assert response.status_int == 200
936 assert_response = AssertResponse(response)
939 assert_response = AssertResponse(response)
937 assert_response.element_contains(
940 assert_response.element_contains(
938 '#changeset_compare_view_content .alert strong',
941 '#changeset_compare_view_content .alert strong',
939 'Missing commits')
942 'Missing commits')
940 assert_response.element_contains(
943 assert_response.element_contains(
941 '#changeset_compare_view_content .alert',
944 '#changeset_compare_view_content .alert',
942 'This pull request cannot be displayed, because one or more'
945 'This pull request cannot be displayed, because one or more'
943 ' commits no longer exist in the source repository.')
946 ' commits no longer exist in the source repository.')
944 assert_response.element_contains(
947 assert_response.element_contains(
945 '#update_commits',
948 '#update_commits',
946 'Update commits')
949 'Update commits')
947
950
948 def test_strip_commits_and_update(
951 def test_strip_commits_and_update(
949 self, backend, pr_util, csrf_token):
952 self, backend, pr_util, csrf_token):
950 commits = [
953 commits = [
951 {'message': 'initial-commit'},
954 {'message': 'initial-commit'},
952 {'message': 'old-feature'},
955 {'message': 'old-feature'},
953 {'message': 'new-feature', 'parents': ['old-feature']},
956 {'message': 'new-feature', 'parents': ['old-feature']},
954 ]
957 ]
955 pull_request = pr_util.create_pull_request(
958 pull_request = pr_util.create_pull_request(
956 commits, target_head='old-feature', source_head='new-feature',
959 commits, target_head='old-feature', source_head='new-feature',
957 revisions=['new-feature'], mergeable=True)
960 revisions=['new-feature'], mergeable=True)
958
961
959 vcs = pr_util.source_repository.scm_instance()
962 vcs = pr_util.source_repository.scm_instance()
960 if backend.alias == 'git':
963 if backend.alias == 'git':
961 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
964 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
962 else:
965 else:
963 vcs.strip(pr_util.commit_ids['new-feature'])
966 vcs.strip(pr_util.commit_ids['new-feature'])
964
967
965 response = self.app.post(
968 response = self.app.post(
966 route_path('pullrequest_update',
969 route_path('pullrequest_update',
967 repo_name=pull_request.target_repo.repo_name,
970 repo_name=pull_request.target_repo.repo_name,
968 pull_request_id=pull_request.pull_request_id),
971 pull_request_id=pull_request.pull_request_id),
969 params={'update_commits': 'true',
972 params={'update_commits': 'true',
970 'csrf_token': csrf_token})
973 'csrf_token': csrf_token})
971
974
972 assert response.status_int == 200
975 assert response.status_int == 200
973 assert response.body == 'true'
976 assert response.body == 'true'
974
977
975 # Make sure that after update, it won't raise 500 errors
978 # Make sure that after update, it won't raise 500 errors
976 response = self.app.get(route_path(
979 response = self.app.get(route_path(
977 'pullrequest_show',
980 'pullrequest_show',
978 repo_name=pr_util.target_repository.repo_name,
981 repo_name=pr_util.target_repository.repo_name,
979 pull_request_id=pull_request.pull_request_id))
982 pull_request_id=pull_request.pull_request_id))
980
983
981 assert response.status_int == 200
984 assert response.status_int == 200
982 assert_response = AssertResponse(response)
985 assert_response = AssertResponse(response)
983 assert_response.element_contains(
986 assert_response.element_contains(
984 '#changeset_compare_view_content .alert strong',
987 '#changeset_compare_view_content .alert strong',
985 'Missing commits')
988 'Missing commits')
986
989
987 def test_branch_is_a_link(self, pr_util):
990 def test_branch_is_a_link(self, pr_util):
988 pull_request = pr_util.create_pull_request()
991 pull_request = pr_util.create_pull_request()
989 pull_request.source_ref = 'branch:origin:1234567890abcdef'
992 pull_request.source_ref = 'branch:origin:1234567890abcdef'
990 pull_request.target_ref = 'branch:target:abcdef1234567890'
993 pull_request.target_ref = 'branch:target:abcdef1234567890'
991 Session().add(pull_request)
994 Session().add(pull_request)
992 Session().commit()
995 Session().commit()
993
996
994 response = self.app.get(route_path(
997 response = self.app.get(route_path(
995 'pullrequest_show',
998 'pullrequest_show',
996 repo_name=pull_request.target_repo.scm_instance().name,
999 repo_name=pull_request.target_repo.scm_instance().name,
997 pull_request_id=pull_request.pull_request_id))
1000 pull_request_id=pull_request.pull_request_id))
998 assert response.status_int == 200
1001 assert response.status_int == 200
999 assert_response = AssertResponse(response)
1002 assert_response = AssertResponse(response)
1000
1003
1001 origin = assert_response.get_element('.pr-origininfo .tag')
1004 origin = assert_response.get_element('.pr-origininfo .tag')
1002 origin_children = origin.getchildren()
1005 origin_children = origin.getchildren()
1003 assert len(origin_children) == 1
1006 assert len(origin_children) == 1
1004 target = assert_response.get_element('.pr-targetinfo .tag')
1007 target = assert_response.get_element('.pr-targetinfo .tag')
1005 target_children = target.getchildren()
1008 target_children = target.getchildren()
1006 assert len(target_children) == 1
1009 assert len(target_children) == 1
1007
1010
1008 expected_origin_link = route_path(
1011 expected_origin_link = route_path(
1009 'repo_changelog',
1012 'repo_changelog',
1010 repo_name=pull_request.source_repo.scm_instance().name,
1013 repo_name=pull_request.source_repo.scm_instance().name,
1011 params=dict(branch='origin'))
1014 params=dict(branch='origin'))
1012 expected_target_link = route_path(
1015 expected_target_link = route_path(
1013 'repo_changelog',
1016 'repo_changelog',
1014 repo_name=pull_request.target_repo.scm_instance().name,
1017 repo_name=pull_request.target_repo.scm_instance().name,
1015 params=dict(branch='target'))
1018 params=dict(branch='target'))
1016 assert origin_children[0].attrib['href'] == expected_origin_link
1019 assert origin_children[0].attrib['href'] == expected_origin_link
1017 assert origin_children[0].text == 'branch: origin'
1020 assert origin_children[0].text == 'branch: origin'
1018 assert target_children[0].attrib['href'] == expected_target_link
1021 assert target_children[0].attrib['href'] == expected_target_link
1019 assert target_children[0].text == 'branch: target'
1022 assert target_children[0].text == 'branch: target'
1020
1023
1021 def test_bookmark_is_not_a_link(self, pr_util):
1024 def test_bookmark_is_not_a_link(self, pr_util):
1022 pull_request = pr_util.create_pull_request()
1025 pull_request = pr_util.create_pull_request()
1023 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
1026 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
1024 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
1027 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
1025 Session().add(pull_request)
1028 Session().add(pull_request)
1026 Session().commit()
1029 Session().commit()
1027
1030
1028 response = self.app.get(route_path(
1031 response = self.app.get(route_path(
1029 'pullrequest_show',
1032 'pullrequest_show',
1030 repo_name=pull_request.target_repo.scm_instance().name,
1033 repo_name=pull_request.target_repo.scm_instance().name,
1031 pull_request_id=pull_request.pull_request_id))
1034 pull_request_id=pull_request.pull_request_id))
1032 assert response.status_int == 200
1035 assert response.status_int == 200
1033 assert_response = AssertResponse(response)
1036 assert_response = AssertResponse(response)
1034
1037
1035 origin = assert_response.get_element('.pr-origininfo .tag')
1038 origin = assert_response.get_element('.pr-origininfo .tag')
1036 assert origin.text.strip() == 'bookmark: origin'
1039 assert origin.text.strip() == 'bookmark: origin'
1037 assert origin.getchildren() == []
1040 assert origin.getchildren() == []
1038
1041
1039 target = assert_response.get_element('.pr-targetinfo .tag')
1042 target = assert_response.get_element('.pr-targetinfo .tag')
1040 assert target.text.strip() == 'bookmark: target'
1043 assert target.text.strip() == 'bookmark: target'
1041 assert target.getchildren() == []
1044 assert target.getchildren() == []
1042
1045
1043 def test_tag_is_not_a_link(self, pr_util):
1046 def test_tag_is_not_a_link(self, pr_util):
1044 pull_request = pr_util.create_pull_request()
1047 pull_request = pr_util.create_pull_request()
1045 pull_request.source_ref = 'tag:origin:1234567890abcdef'
1048 pull_request.source_ref = 'tag:origin:1234567890abcdef'
1046 pull_request.target_ref = 'tag:target:abcdef1234567890'
1049 pull_request.target_ref = 'tag:target:abcdef1234567890'
1047 Session().add(pull_request)
1050 Session().add(pull_request)
1048 Session().commit()
1051 Session().commit()
1049
1052
1050 response = self.app.get(route_path(
1053 response = self.app.get(route_path(
1051 'pullrequest_show',
1054 'pullrequest_show',
1052 repo_name=pull_request.target_repo.scm_instance().name,
1055 repo_name=pull_request.target_repo.scm_instance().name,
1053 pull_request_id=pull_request.pull_request_id))
1056 pull_request_id=pull_request.pull_request_id))
1054 assert response.status_int == 200
1057 assert response.status_int == 200
1055 assert_response = AssertResponse(response)
1058 assert_response = AssertResponse(response)
1056
1059
1057 origin = assert_response.get_element('.pr-origininfo .tag')
1060 origin = assert_response.get_element('.pr-origininfo .tag')
1058 assert origin.text.strip() == 'tag: origin'
1061 assert origin.text.strip() == 'tag: origin'
1059 assert origin.getchildren() == []
1062 assert origin.getchildren() == []
1060
1063
1061 target = assert_response.get_element('.pr-targetinfo .tag')
1064 target = assert_response.get_element('.pr-targetinfo .tag')
1062 assert target.text.strip() == 'tag: target'
1065 assert target.text.strip() == 'tag: target'
1063 assert target.getchildren() == []
1066 assert target.getchildren() == []
1064
1067
1065 @pytest.mark.parametrize('mergeable', [True, False])
1068 @pytest.mark.parametrize('mergeable', [True, False])
1066 def test_shadow_repository_link(
1069 def test_shadow_repository_link(
1067 self, mergeable, pr_util, http_host_only_stub):
1070 self, mergeable, pr_util, http_host_only_stub):
1068 """
1071 """
1069 Check that the pull request summary page displays a link to the shadow
1072 Check that the pull request summary page displays a link to the shadow
1070 repository if the pull request is mergeable. If it is not mergeable
1073 repository if the pull request is mergeable. If it is not mergeable
1071 the link should not be displayed.
1074 the link should not be displayed.
1072 """
1075 """
1073 pull_request = pr_util.create_pull_request(
1076 pull_request = pr_util.create_pull_request(
1074 mergeable=mergeable, enable_notifications=False)
1077 mergeable=mergeable, enable_notifications=False)
1075 target_repo = pull_request.target_repo.scm_instance()
1078 target_repo = pull_request.target_repo.scm_instance()
1076 pr_id = pull_request.pull_request_id
1079 pr_id = pull_request.pull_request_id
1077 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1080 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1078 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1081 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1079
1082
1080 response = self.app.get(route_path(
1083 response = self.app.get(route_path(
1081 'pullrequest_show',
1084 'pullrequest_show',
1082 repo_name=target_repo.name,
1085 repo_name=target_repo.name,
1083 pull_request_id=pr_id))
1086 pull_request_id=pr_id))
1084
1087
1085 assertr = AssertResponse(response)
1088 assertr = AssertResponse(response)
1086 if mergeable:
1089 if mergeable:
1087 assertr.element_value_contains('input.pr-mergeinfo', shadow_url)
1090 assertr.element_value_contains('input.pr-mergeinfo', shadow_url)
1088 assertr.element_value_contains('input.pr-mergeinfo ', 'pr-merge')
1091 assertr.element_value_contains('input.pr-mergeinfo ', 'pr-merge')
1089 else:
1092 else:
1090 assertr.no_element_exists('.pr-mergeinfo')
1093 assertr.no_element_exists('.pr-mergeinfo')
1091
1094
1092
1095
1093 @pytest.mark.usefixtures('app')
1096 @pytest.mark.usefixtures('app')
1094 @pytest.mark.backends("git", "hg")
1097 @pytest.mark.backends("git", "hg")
1095 class TestPullrequestsControllerDelete(object):
1098 class TestPullrequestsControllerDelete(object):
1096 def test_pull_request_delete_button_permissions_admin(
1099 def test_pull_request_delete_button_permissions_admin(
1097 self, autologin_user, user_admin, pr_util):
1100 self, autologin_user, user_admin, pr_util):
1098 pull_request = pr_util.create_pull_request(
1101 pull_request = pr_util.create_pull_request(
1099 author=user_admin.username, enable_notifications=False)
1102 author=user_admin.username, enable_notifications=False)
1100
1103
1101 response = self.app.get(route_path(
1104 response = self.app.get(route_path(
1102 'pullrequest_show',
1105 'pullrequest_show',
1103 repo_name=pull_request.target_repo.scm_instance().name,
1106 repo_name=pull_request.target_repo.scm_instance().name,
1104 pull_request_id=pull_request.pull_request_id))
1107 pull_request_id=pull_request.pull_request_id))
1105
1108
1106 response.mustcontain('id="delete_pullrequest"')
1109 response.mustcontain('id="delete_pullrequest"')
1107 response.mustcontain('Confirm to delete this pull request')
1110 response.mustcontain('Confirm to delete this pull request')
1108
1111
1109 def test_pull_request_delete_button_permissions_owner(
1112 def test_pull_request_delete_button_permissions_owner(
1110 self, autologin_regular_user, user_regular, pr_util):
1113 self, autologin_regular_user, user_regular, pr_util):
1111 pull_request = pr_util.create_pull_request(
1114 pull_request = pr_util.create_pull_request(
1112 author=user_regular.username, enable_notifications=False)
1115 author=user_regular.username, enable_notifications=False)
1113
1116
1114 response = self.app.get(route_path(
1117 response = self.app.get(route_path(
1115 'pullrequest_show',
1118 'pullrequest_show',
1116 repo_name=pull_request.target_repo.scm_instance().name,
1119 repo_name=pull_request.target_repo.scm_instance().name,
1117 pull_request_id=pull_request.pull_request_id))
1120 pull_request_id=pull_request.pull_request_id))
1118
1121
1119 response.mustcontain('id="delete_pullrequest"')
1122 response.mustcontain('id="delete_pullrequest"')
1120 response.mustcontain('Confirm to delete this pull request')
1123 response.mustcontain('Confirm to delete this pull request')
1121
1124
1122 def test_pull_request_delete_button_permissions_forbidden(
1125 def test_pull_request_delete_button_permissions_forbidden(
1123 self, autologin_regular_user, user_regular, user_admin, pr_util):
1126 self, autologin_regular_user, user_regular, user_admin, pr_util):
1124 pull_request = pr_util.create_pull_request(
1127 pull_request = pr_util.create_pull_request(
1125 author=user_admin.username, enable_notifications=False)
1128 author=user_admin.username, enable_notifications=False)
1126
1129
1127 response = self.app.get(route_path(
1130 response = self.app.get(route_path(
1128 'pullrequest_show',
1131 'pullrequest_show',
1129 repo_name=pull_request.target_repo.scm_instance().name,
1132 repo_name=pull_request.target_repo.scm_instance().name,
1130 pull_request_id=pull_request.pull_request_id))
1133 pull_request_id=pull_request.pull_request_id))
1131 response.mustcontain(no=['id="delete_pullrequest"'])
1134 response.mustcontain(no=['id="delete_pullrequest"'])
1132 response.mustcontain(no=['Confirm to delete this pull request'])
1135 response.mustcontain(no=['Confirm to delete this pull request'])
1133
1136
1134 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1137 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1135 self, autologin_regular_user, user_regular, user_admin, pr_util,
1138 self, autologin_regular_user, user_regular, user_admin, pr_util,
1136 user_util):
1139 user_util):
1137
1140
1138 pull_request = pr_util.create_pull_request(
1141 pull_request = pr_util.create_pull_request(
1139 author=user_admin.username, enable_notifications=False)
1142 author=user_admin.username, enable_notifications=False)
1140
1143
1141 user_util.grant_user_permission_to_repo(
1144 user_util.grant_user_permission_to_repo(
1142 pull_request.target_repo, user_regular,
1145 pull_request.target_repo, user_regular,
1143 'repository.write')
1146 'repository.write')
1144
1147
1145 response = self.app.get(route_path(
1148 response = self.app.get(route_path(
1146 'pullrequest_show',
1149 'pullrequest_show',
1147 repo_name=pull_request.target_repo.scm_instance().name,
1150 repo_name=pull_request.target_repo.scm_instance().name,
1148 pull_request_id=pull_request.pull_request_id))
1151 pull_request_id=pull_request.pull_request_id))
1149
1152
1150 response.mustcontain('id="open_edit_pullrequest"')
1153 response.mustcontain('id="open_edit_pullrequest"')
1151 response.mustcontain('id="delete_pullrequest"')
1154 response.mustcontain('id="delete_pullrequest"')
1152 response.mustcontain(no=['Confirm to delete this pull request'])
1155 response.mustcontain(no=['Confirm to delete this pull request'])
1153
1156
1154 def test_delete_comment_returns_404_if_comment_does_not_exist(
1157 def test_delete_comment_returns_404_if_comment_does_not_exist(
1155 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1158 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1156
1159
1157 pull_request = pr_util.create_pull_request(
1160 pull_request = pr_util.create_pull_request(
1158 author=user_admin.username, enable_notifications=False)
1161 author=user_admin.username, enable_notifications=False)
1159
1162
1160 self.app.post(
1163 self.app.post(
1161 route_path(
1164 route_path(
1162 'pullrequest_comment_delete',
1165 'pullrequest_comment_delete',
1163 repo_name=pull_request.target_repo.scm_instance().name,
1166 repo_name=pull_request.target_repo.scm_instance().name,
1164 pull_request_id=pull_request.pull_request_id,
1167 pull_request_id=pull_request.pull_request_id,
1165 comment_id=1024404),
1168 comment_id=1024404),
1166 extra_environ=xhr_header,
1169 extra_environ=xhr_header,
1167 params={'csrf_token': csrf_token},
1170 params={'csrf_token': csrf_token},
1168 status=404
1171 status=404
1169 )
1172 )
1170
1173
1171 def test_delete_comment(
1174 def test_delete_comment(
1172 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1175 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1173
1176
1174 pull_request = pr_util.create_pull_request(
1177 pull_request = pr_util.create_pull_request(
1175 author=user_admin.username, enable_notifications=False)
1178 author=user_admin.username, enable_notifications=False)
1176 comment = pr_util.create_comment()
1179 comment = pr_util.create_comment()
1177 comment_id = comment.comment_id
1180 comment_id = comment.comment_id
1178
1181
1179 response = self.app.post(
1182 response = self.app.post(
1180 route_path(
1183 route_path(
1181 'pullrequest_comment_delete',
1184 'pullrequest_comment_delete',
1182 repo_name=pull_request.target_repo.scm_instance().name,
1185 repo_name=pull_request.target_repo.scm_instance().name,
1183 pull_request_id=pull_request.pull_request_id,
1186 pull_request_id=pull_request.pull_request_id,
1184 comment_id=comment_id),
1187 comment_id=comment_id),
1185 extra_environ=xhr_header,
1188 extra_environ=xhr_header,
1186 params={'csrf_token': csrf_token},
1189 params={'csrf_token': csrf_token},
1187 status=200
1190 status=200
1188 )
1191 )
1189 assert response.body == 'true'
1192 assert response.body == 'true'
1190
1193
1191
1194
1192 def assert_pull_request_status(pull_request, expected_status):
1195 def assert_pull_request_status(pull_request, expected_status):
1193 status = ChangesetStatusModel().calculated_review_status(
1196 status = ChangesetStatusModel().calculated_review_status(
1194 pull_request=pull_request)
1197 pull_request=pull_request)
1195 assert status == expected_status
1198 assert status == expected_status
1196
1199
1197
1200
1198 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1201 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1199 @pytest.mark.usefixtures("autologin_user")
1202 @pytest.mark.usefixtures("autologin_user")
1200 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1203 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1201 response = app.get(
1204 response = app.get(
1202 route_path(route, repo_name=backend_svn.repo_name), status=404)
1205 route_path(route, repo_name=backend_svn.repo_name), status=404)
1203
1206
@@ -1,1316 +1,1324 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2018 RhodeCode GmbH
3 # Copyright (C) 2011-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 import collections
22 import collections
23
23
24 import formencode
24 import formencode
25 import formencode.htmlfill
25 import formencode.htmlfill
26 import peppercorn
26 import peppercorn
27 from pyramid.httpexceptions import (
27 from pyramid.httpexceptions import (
28 HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest)
28 HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest)
29 from pyramid.view import view_config
29 from pyramid.view import view_config
30 from pyramid.renderers import render
30 from pyramid.renderers import render
31
31
32 from rhodecode import events
32 from rhodecode import events
33 from rhodecode.apps._base import RepoAppView, DataGridAppView
33 from rhodecode.apps._base import RepoAppView, DataGridAppView
34
34
35 from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream
35 from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream
36 from rhodecode.lib.base import vcs_operation_context
36 from rhodecode.lib.base import vcs_operation_context
37 from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist
37 from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist
38 from rhodecode.lib.ext_json import json
38 from rhodecode.lib.ext_json import json
39 from rhodecode.lib.auth import (
39 from rhodecode.lib.auth import (
40 LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator,
40 LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator,
41 NotAnonymous, CSRFRequired)
41 NotAnonymous, CSRFRequired)
42 from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode
42 from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode
43 from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason
43 from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason
44 from rhodecode.lib.vcs.exceptions import (CommitDoesNotExistError,
44 from rhodecode.lib.vcs.exceptions import (CommitDoesNotExistError,
45 RepositoryRequirementError, EmptyRepositoryError)
45 RepositoryRequirementError, EmptyRepositoryError)
46 from rhodecode.model.changeset_status import ChangesetStatusModel
46 from rhodecode.model.changeset_status import ChangesetStatusModel
47 from rhodecode.model.comment import CommentsModel
47 from rhodecode.model.comment import CommentsModel
48 from rhodecode.model.db import (func, or_, PullRequest, PullRequestVersion,
48 from rhodecode.model.db import (func, or_, PullRequest, PullRequestVersion,
49 ChangesetComment, ChangesetStatus, Repository)
49 ChangesetComment, ChangesetStatus, Repository)
50 from rhodecode.model.forms import PullRequestForm
50 from rhodecode.model.forms import PullRequestForm
51 from rhodecode.model.meta import Session
51 from rhodecode.model.meta import Session
52 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
52 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
53 from rhodecode.model.scm import ScmModel
53 from rhodecode.model.scm import ScmModel
54
54
55 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
56
56
57
57
58 class RepoPullRequestsView(RepoAppView, DataGridAppView):
58 class RepoPullRequestsView(RepoAppView, DataGridAppView):
59
59
60 def load_default_context(self):
60 def load_default_context(self):
61 c = self._get_local_tmpl_context(include_app_defaults=True)
61 c = self._get_local_tmpl_context(include_app_defaults=True)
62 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
62 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
63 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
63 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
64
64 # backward compat., we use for OLD PRs a plain renderer
65 c.renderer = 'plain'
65 return c
66 return c
66
67
67 def _get_pull_requests_list(
68 def _get_pull_requests_list(
68 self, repo_name, source, filter_type, opened_by, statuses):
69 self, repo_name, source, filter_type, opened_by, statuses):
69
70
70 draw, start, limit = self._extract_chunk(self.request)
71 draw, start, limit = self._extract_chunk(self.request)
71 search_q, order_by, order_dir = self._extract_ordering(self.request)
72 search_q, order_by, order_dir = self._extract_ordering(self.request)
72 _render = self.request.get_partial_renderer(
73 _render = self.request.get_partial_renderer(
73 'rhodecode:templates/data_table/_dt_elements.mako')
74 'rhodecode:templates/data_table/_dt_elements.mako')
74
75
75 # pagination
76 # pagination
76
77
77 if filter_type == 'awaiting_review':
78 if filter_type == 'awaiting_review':
78 pull_requests = PullRequestModel().get_awaiting_review(
79 pull_requests = PullRequestModel().get_awaiting_review(
79 repo_name, source=source, opened_by=opened_by,
80 repo_name, source=source, opened_by=opened_by,
80 statuses=statuses, offset=start, length=limit,
81 statuses=statuses, offset=start, length=limit,
81 order_by=order_by, order_dir=order_dir)
82 order_by=order_by, order_dir=order_dir)
82 pull_requests_total_count = PullRequestModel().count_awaiting_review(
83 pull_requests_total_count = PullRequestModel().count_awaiting_review(
83 repo_name, source=source, statuses=statuses,
84 repo_name, source=source, statuses=statuses,
84 opened_by=opened_by)
85 opened_by=opened_by)
85 elif filter_type == 'awaiting_my_review':
86 elif filter_type == 'awaiting_my_review':
86 pull_requests = PullRequestModel().get_awaiting_my_review(
87 pull_requests = PullRequestModel().get_awaiting_my_review(
87 repo_name, source=source, opened_by=opened_by,
88 repo_name, source=source, opened_by=opened_by,
88 user_id=self._rhodecode_user.user_id, statuses=statuses,
89 user_id=self._rhodecode_user.user_id, statuses=statuses,
89 offset=start, length=limit, order_by=order_by,
90 offset=start, length=limit, order_by=order_by,
90 order_dir=order_dir)
91 order_dir=order_dir)
91 pull_requests_total_count = PullRequestModel().count_awaiting_my_review(
92 pull_requests_total_count = PullRequestModel().count_awaiting_my_review(
92 repo_name, source=source, user_id=self._rhodecode_user.user_id,
93 repo_name, source=source, user_id=self._rhodecode_user.user_id,
93 statuses=statuses, opened_by=opened_by)
94 statuses=statuses, opened_by=opened_by)
94 else:
95 else:
95 pull_requests = PullRequestModel().get_all(
96 pull_requests = PullRequestModel().get_all(
96 repo_name, source=source, opened_by=opened_by,
97 repo_name, source=source, opened_by=opened_by,
97 statuses=statuses, offset=start, length=limit,
98 statuses=statuses, offset=start, length=limit,
98 order_by=order_by, order_dir=order_dir)
99 order_by=order_by, order_dir=order_dir)
99 pull_requests_total_count = PullRequestModel().count_all(
100 pull_requests_total_count = PullRequestModel().count_all(
100 repo_name, source=source, statuses=statuses,
101 repo_name, source=source, statuses=statuses,
101 opened_by=opened_by)
102 opened_by=opened_by)
102
103
103 data = []
104 data = []
104 comments_model = CommentsModel()
105 comments_model = CommentsModel()
105 for pr in pull_requests:
106 for pr in pull_requests:
106 comments = comments_model.get_all_comments(
107 comments = comments_model.get_all_comments(
107 self.db_repo.repo_id, pull_request=pr)
108 self.db_repo.repo_id, pull_request=pr)
108
109
109 data.append({
110 data.append({
110 'name': _render('pullrequest_name',
111 'name': _render('pullrequest_name',
111 pr.pull_request_id, pr.target_repo.repo_name),
112 pr.pull_request_id, pr.target_repo.repo_name),
112 'name_raw': pr.pull_request_id,
113 'name_raw': pr.pull_request_id,
113 'status': _render('pullrequest_status',
114 'status': _render('pullrequest_status',
114 pr.calculated_review_status()),
115 pr.calculated_review_status()),
115 'title': _render(
116 'title': _render(
116 'pullrequest_title', pr.title, pr.description),
117 'pullrequest_title', pr.title, pr.description),
117 'description': h.escape(pr.description),
118 'description': h.escape(pr.description),
118 'updated_on': _render('pullrequest_updated_on',
119 'updated_on': _render('pullrequest_updated_on',
119 h.datetime_to_time(pr.updated_on)),
120 h.datetime_to_time(pr.updated_on)),
120 'updated_on_raw': h.datetime_to_time(pr.updated_on),
121 'updated_on_raw': h.datetime_to_time(pr.updated_on),
121 'created_on': _render('pullrequest_updated_on',
122 'created_on': _render('pullrequest_updated_on',
122 h.datetime_to_time(pr.created_on)),
123 h.datetime_to_time(pr.created_on)),
123 'created_on_raw': h.datetime_to_time(pr.created_on),
124 'created_on_raw': h.datetime_to_time(pr.created_on),
124 'author': _render('pullrequest_author',
125 'author': _render('pullrequest_author',
125 pr.author.full_contact, ),
126 pr.author.full_contact, ),
126 'author_raw': pr.author.full_name,
127 'author_raw': pr.author.full_name,
127 'comments': _render('pullrequest_comments', len(comments)),
128 'comments': _render('pullrequest_comments', len(comments)),
128 'comments_raw': len(comments),
129 'comments_raw': len(comments),
129 'closed': pr.is_closed(),
130 'closed': pr.is_closed(),
130 })
131 })
131
132
132 data = ({
133 data = ({
133 'draw': draw,
134 'draw': draw,
134 'data': data,
135 'data': data,
135 'recordsTotal': pull_requests_total_count,
136 'recordsTotal': pull_requests_total_count,
136 'recordsFiltered': pull_requests_total_count,
137 'recordsFiltered': pull_requests_total_count,
137 })
138 })
138 return data
139 return data
139
140
140 @LoginRequired()
141 @LoginRequired()
141 @HasRepoPermissionAnyDecorator(
142 @HasRepoPermissionAnyDecorator(
142 'repository.read', 'repository.write', 'repository.admin')
143 'repository.read', 'repository.write', 'repository.admin')
143 @view_config(
144 @view_config(
144 route_name='pullrequest_show_all', request_method='GET',
145 route_name='pullrequest_show_all', request_method='GET',
145 renderer='rhodecode:templates/pullrequests/pullrequests.mako')
146 renderer='rhodecode:templates/pullrequests/pullrequests.mako')
146 def pull_request_list(self):
147 def pull_request_list(self):
147 c = self.load_default_context()
148 c = self.load_default_context()
148
149
149 req_get = self.request.GET
150 req_get = self.request.GET
150 c.source = str2bool(req_get.get('source'))
151 c.source = str2bool(req_get.get('source'))
151 c.closed = str2bool(req_get.get('closed'))
152 c.closed = str2bool(req_get.get('closed'))
152 c.my = str2bool(req_get.get('my'))
153 c.my = str2bool(req_get.get('my'))
153 c.awaiting_review = str2bool(req_get.get('awaiting_review'))
154 c.awaiting_review = str2bool(req_get.get('awaiting_review'))
154 c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
155 c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
155
156
156 c.active = 'open'
157 c.active = 'open'
157 if c.my:
158 if c.my:
158 c.active = 'my'
159 c.active = 'my'
159 if c.closed:
160 if c.closed:
160 c.active = 'closed'
161 c.active = 'closed'
161 if c.awaiting_review and not c.source:
162 if c.awaiting_review and not c.source:
162 c.active = 'awaiting'
163 c.active = 'awaiting'
163 if c.source and not c.awaiting_review:
164 if c.source and not c.awaiting_review:
164 c.active = 'source'
165 c.active = 'source'
165 if c.awaiting_my_review:
166 if c.awaiting_my_review:
166 c.active = 'awaiting_my'
167 c.active = 'awaiting_my'
167
168
168 return self._get_template_context(c)
169 return self._get_template_context(c)
169
170
170 @LoginRequired()
171 @LoginRequired()
171 @HasRepoPermissionAnyDecorator(
172 @HasRepoPermissionAnyDecorator(
172 'repository.read', 'repository.write', 'repository.admin')
173 'repository.read', 'repository.write', 'repository.admin')
173 @view_config(
174 @view_config(
174 route_name='pullrequest_show_all_data', request_method='GET',
175 route_name='pullrequest_show_all_data', request_method='GET',
175 renderer='json_ext', xhr=True)
176 renderer='json_ext', xhr=True)
176 def pull_request_list_data(self):
177 def pull_request_list_data(self):
177 self.load_default_context()
178 self.load_default_context()
178
179
179 # additional filters
180 # additional filters
180 req_get = self.request.GET
181 req_get = self.request.GET
181 source = str2bool(req_get.get('source'))
182 source = str2bool(req_get.get('source'))
182 closed = str2bool(req_get.get('closed'))
183 closed = str2bool(req_get.get('closed'))
183 my = str2bool(req_get.get('my'))
184 my = str2bool(req_get.get('my'))
184 awaiting_review = str2bool(req_get.get('awaiting_review'))
185 awaiting_review = str2bool(req_get.get('awaiting_review'))
185 awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
186 awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
186
187
187 filter_type = 'awaiting_review' if awaiting_review \
188 filter_type = 'awaiting_review' if awaiting_review \
188 else 'awaiting_my_review' if awaiting_my_review \
189 else 'awaiting_my_review' if awaiting_my_review \
189 else None
190 else None
190
191
191 opened_by = None
192 opened_by = None
192 if my:
193 if my:
193 opened_by = [self._rhodecode_user.user_id]
194 opened_by = [self._rhodecode_user.user_id]
194
195
195 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
196 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
196 if closed:
197 if closed:
197 statuses = [PullRequest.STATUS_CLOSED]
198 statuses = [PullRequest.STATUS_CLOSED]
198
199
199 data = self._get_pull_requests_list(
200 data = self._get_pull_requests_list(
200 repo_name=self.db_repo_name, source=source,
201 repo_name=self.db_repo_name, source=source,
201 filter_type=filter_type, opened_by=opened_by, statuses=statuses)
202 filter_type=filter_type, opened_by=opened_by, statuses=statuses)
202
203
203 return data
204 return data
204
205
205 def _is_diff_cache_enabled(self, target_repo):
206 def _is_diff_cache_enabled(self, target_repo):
206 caching_enabled = self._get_general_setting(
207 caching_enabled = self._get_general_setting(
207 target_repo, 'rhodecode_diff_cache')
208 target_repo, 'rhodecode_diff_cache')
208 log.debug('Diff caching enabled: %s', caching_enabled)
209 log.debug('Diff caching enabled: %s', caching_enabled)
209 return caching_enabled
210 return caching_enabled
210
211
211 def _get_diffset(self, source_repo_name, source_repo,
212 def _get_diffset(self, source_repo_name, source_repo,
212 source_ref_id, target_ref_id,
213 source_ref_id, target_ref_id,
213 target_commit, source_commit, diff_limit, file_limit,
214 target_commit, source_commit, diff_limit, file_limit,
214 fulldiff):
215 fulldiff):
215
216
216 vcs_diff = PullRequestModel().get_diff(
217 vcs_diff = PullRequestModel().get_diff(
217 source_repo, source_ref_id, target_ref_id)
218 source_repo, source_ref_id, target_ref_id)
218
219
219 diff_processor = diffs.DiffProcessor(
220 diff_processor = diffs.DiffProcessor(
220 vcs_diff, format='newdiff', diff_limit=diff_limit,
221 vcs_diff, format='newdiff', diff_limit=diff_limit,
221 file_limit=file_limit, show_full_diff=fulldiff)
222 file_limit=file_limit, show_full_diff=fulldiff)
222
223
223 _parsed = diff_processor.prepare()
224 _parsed = diff_processor.prepare()
224
225
225 diffset = codeblocks.DiffSet(
226 diffset = codeblocks.DiffSet(
226 repo_name=self.db_repo_name,
227 repo_name=self.db_repo_name,
227 source_repo_name=source_repo_name,
228 source_repo_name=source_repo_name,
228 source_node_getter=codeblocks.diffset_node_getter(target_commit),
229 source_node_getter=codeblocks.diffset_node_getter(target_commit),
229 target_node_getter=codeblocks.diffset_node_getter(source_commit),
230 target_node_getter=codeblocks.diffset_node_getter(source_commit),
230 )
231 )
231 diffset = self.path_filter.render_patchset_filtered(
232 diffset = self.path_filter.render_patchset_filtered(
232 diffset, _parsed, target_commit.raw_id, source_commit.raw_id)
233 diffset, _parsed, target_commit.raw_id, source_commit.raw_id)
233
234
234 return diffset
235 return diffset
235
236
236 @LoginRequired()
237 @LoginRequired()
237 @HasRepoPermissionAnyDecorator(
238 @HasRepoPermissionAnyDecorator(
238 'repository.read', 'repository.write', 'repository.admin')
239 'repository.read', 'repository.write', 'repository.admin')
239 @view_config(
240 @view_config(
240 route_name='pullrequest_show', request_method='GET',
241 route_name='pullrequest_show', request_method='GET',
241 renderer='rhodecode:templates/pullrequests/pullrequest_show.mako')
242 renderer='rhodecode:templates/pullrequests/pullrequest_show.mako')
242 def pull_request_show(self):
243 def pull_request_show(self):
243 pull_request_id = self.request.matchdict['pull_request_id']
244 pull_request_id = self.request.matchdict['pull_request_id']
244
245
245 c = self.load_default_context()
246 c = self.load_default_context()
246
247
247 version = self.request.GET.get('version')
248 version = self.request.GET.get('version')
248 from_version = self.request.GET.get('from_version') or version
249 from_version = self.request.GET.get('from_version') or version
249 merge_checks = self.request.GET.get('merge_checks')
250 merge_checks = self.request.GET.get('merge_checks')
250 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
251 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
251 force_refresh = str2bool(self.request.GET.get('force_refresh'))
252 force_refresh = str2bool(self.request.GET.get('force_refresh'))
252
253
253 (pull_request_latest,
254 (pull_request_latest,
254 pull_request_at_ver,
255 pull_request_at_ver,
255 pull_request_display_obj,
256 pull_request_display_obj,
256 at_version) = PullRequestModel().get_pr_version(
257 at_version) = PullRequestModel().get_pr_version(
257 pull_request_id, version=version)
258 pull_request_id, version=version)
258 pr_closed = pull_request_latest.is_closed()
259 pr_closed = pull_request_latest.is_closed()
259
260
260 if pr_closed and (version or from_version):
261 if pr_closed and (version or from_version):
261 # not allow to browse versions
262 # not allow to browse versions
262 raise HTTPFound(h.route_path(
263 raise HTTPFound(h.route_path(
263 'pullrequest_show', repo_name=self.db_repo_name,
264 'pullrequest_show', repo_name=self.db_repo_name,
264 pull_request_id=pull_request_id))
265 pull_request_id=pull_request_id))
265
266
266 versions = pull_request_display_obj.versions()
267 versions = pull_request_display_obj.versions()
267
268
268 c.at_version = at_version
269 c.at_version = at_version
269 c.at_version_num = (at_version
270 c.at_version_num = (at_version
270 if at_version and at_version != 'latest'
271 if at_version and at_version != 'latest'
271 else None)
272 else None)
272 c.at_version_pos = ChangesetComment.get_index_from_version(
273 c.at_version_pos = ChangesetComment.get_index_from_version(
273 c.at_version_num, versions)
274 c.at_version_num, versions)
274
275
275 (prev_pull_request_latest,
276 (prev_pull_request_latest,
276 prev_pull_request_at_ver,
277 prev_pull_request_at_ver,
277 prev_pull_request_display_obj,
278 prev_pull_request_display_obj,
278 prev_at_version) = PullRequestModel().get_pr_version(
279 prev_at_version) = PullRequestModel().get_pr_version(
279 pull_request_id, version=from_version)
280 pull_request_id, version=from_version)
280
281
281 c.from_version = prev_at_version
282 c.from_version = prev_at_version
282 c.from_version_num = (prev_at_version
283 c.from_version_num = (prev_at_version
283 if prev_at_version and prev_at_version != 'latest'
284 if prev_at_version and prev_at_version != 'latest'
284 else None)
285 else None)
285 c.from_version_pos = ChangesetComment.get_index_from_version(
286 c.from_version_pos = ChangesetComment.get_index_from_version(
286 c.from_version_num, versions)
287 c.from_version_num, versions)
287
288
288 # define if we're in COMPARE mode or VIEW at version mode
289 # define if we're in COMPARE mode or VIEW at version mode
289 compare = at_version != prev_at_version
290 compare = at_version != prev_at_version
290
291
291 # pull_requests repo_name we opened it against
292 # pull_requests repo_name we opened it against
292 # ie. target_repo must match
293 # ie. target_repo must match
293 if self.db_repo_name != pull_request_at_ver.target_repo.repo_name:
294 if self.db_repo_name != pull_request_at_ver.target_repo.repo_name:
294 raise HTTPNotFound()
295 raise HTTPNotFound()
295
296
296 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(
297 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(
297 pull_request_at_ver)
298 pull_request_at_ver)
298
299
299 c.pull_request = pull_request_display_obj
300 c.pull_request = pull_request_display_obj
301 c.renderer = pull_request_at_ver.description_renderer or c.renderer
300 c.pull_request_latest = pull_request_latest
302 c.pull_request_latest = pull_request_latest
301
303
302 if compare or (at_version and not at_version == 'latest'):
304 if compare or (at_version and not at_version == 'latest'):
303 c.allowed_to_change_status = False
305 c.allowed_to_change_status = False
304 c.allowed_to_update = False
306 c.allowed_to_update = False
305 c.allowed_to_merge = False
307 c.allowed_to_merge = False
306 c.allowed_to_delete = False
308 c.allowed_to_delete = False
307 c.allowed_to_comment = False
309 c.allowed_to_comment = False
308 c.allowed_to_close = False
310 c.allowed_to_close = False
309 else:
311 else:
310 can_change_status = PullRequestModel().check_user_change_status(
312 can_change_status = PullRequestModel().check_user_change_status(
311 pull_request_at_ver, self._rhodecode_user)
313 pull_request_at_ver, self._rhodecode_user)
312 c.allowed_to_change_status = can_change_status and not pr_closed
314 c.allowed_to_change_status = can_change_status and not pr_closed
313
315
314 c.allowed_to_update = PullRequestModel().check_user_update(
316 c.allowed_to_update = PullRequestModel().check_user_update(
315 pull_request_latest, self._rhodecode_user) and not pr_closed
317 pull_request_latest, self._rhodecode_user) and not pr_closed
316 c.allowed_to_merge = PullRequestModel().check_user_merge(
318 c.allowed_to_merge = PullRequestModel().check_user_merge(
317 pull_request_latest, self._rhodecode_user) and not pr_closed
319 pull_request_latest, self._rhodecode_user) and not pr_closed
318 c.allowed_to_delete = PullRequestModel().check_user_delete(
320 c.allowed_to_delete = PullRequestModel().check_user_delete(
319 pull_request_latest, self._rhodecode_user) and not pr_closed
321 pull_request_latest, self._rhodecode_user) and not pr_closed
320 c.allowed_to_comment = not pr_closed
322 c.allowed_to_comment = not pr_closed
321 c.allowed_to_close = c.allowed_to_merge and not pr_closed
323 c.allowed_to_close = c.allowed_to_merge and not pr_closed
322
324
323 c.forbid_adding_reviewers = False
325 c.forbid_adding_reviewers = False
324 c.forbid_author_to_review = False
326 c.forbid_author_to_review = False
325 c.forbid_commit_author_to_review = False
327 c.forbid_commit_author_to_review = False
326
328
327 if pull_request_latest.reviewer_data and \
329 if pull_request_latest.reviewer_data and \
328 'rules' in pull_request_latest.reviewer_data:
330 'rules' in pull_request_latest.reviewer_data:
329 rules = pull_request_latest.reviewer_data['rules'] or {}
331 rules = pull_request_latest.reviewer_data['rules'] or {}
330 try:
332 try:
331 c.forbid_adding_reviewers = rules.get(
333 c.forbid_adding_reviewers = rules.get(
332 'forbid_adding_reviewers')
334 'forbid_adding_reviewers')
333 c.forbid_author_to_review = rules.get(
335 c.forbid_author_to_review = rules.get(
334 'forbid_author_to_review')
336 'forbid_author_to_review')
335 c.forbid_commit_author_to_review = rules.get(
337 c.forbid_commit_author_to_review = rules.get(
336 'forbid_commit_author_to_review')
338 'forbid_commit_author_to_review')
337 except Exception:
339 except Exception:
338 pass
340 pass
339
341
340 # check merge capabilities
342 # check merge capabilities
341 _merge_check = MergeCheck.validate(
343 _merge_check = MergeCheck.validate(
342 pull_request_latest, user=self._rhodecode_user,
344 pull_request_latest, user=self._rhodecode_user,
343 translator=self.request.translate,
345 translator=self.request.translate,
344 force_shadow_repo_refresh=force_refresh)
346 force_shadow_repo_refresh=force_refresh)
345 c.pr_merge_errors = _merge_check.error_details
347 c.pr_merge_errors = _merge_check.error_details
346 c.pr_merge_possible = not _merge_check.failed
348 c.pr_merge_possible = not _merge_check.failed
347 c.pr_merge_message = _merge_check.merge_msg
349 c.pr_merge_message = _merge_check.merge_msg
348
350
349 c.pr_merge_info = MergeCheck.get_merge_conditions(
351 c.pr_merge_info = MergeCheck.get_merge_conditions(
350 pull_request_latest, translator=self.request.translate)
352 pull_request_latest, translator=self.request.translate)
351
353
352 c.pull_request_review_status = _merge_check.review_status
354 c.pull_request_review_status = _merge_check.review_status
353 if merge_checks:
355 if merge_checks:
354 self.request.override_renderer = \
356 self.request.override_renderer = \
355 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako'
357 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako'
356 return self._get_template_context(c)
358 return self._get_template_context(c)
357
359
358 comments_model = CommentsModel()
360 comments_model = CommentsModel()
359
361
360 # reviewers and statuses
362 # reviewers and statuses
361 c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses()
363 c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses()
362 allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers]
364 allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers]
363
365
364 # GENERAL COMMENTS with versions #
366 # GENERAL COMMENTS with versions #
365 q = comments_model._all_general_comments_of_pull_request(pull_request_latest)
367 q = comments_model._all_general_comments_of_pull_request(pull_request_latest)
366 q = q.order_by(ChangesetComment.comment_id.asc())
368 q = q.order_by(ChangesetComment.comment_id.asc())
367 general_comments = q
369 general_comments = q
368
370
369 # pick comments we want to render at current version
371 # pick comments we want to render at current version
370 c.comment_versions = comments_model.aggregate_comments(
372 c.comment_versions = comments_model.aggregate_comments(
371 general_comments, versions, c.at_version_num)
373 general_comments, versions, c.at_version_num)
372 c.comments = c.comment_versions[c.at_version_num]['until']
374 c.comments = c.comment_versions[c.at_version_num]['until']
373
375
374 # INLINE COMMENTS with versions #
376 # INLINE COMMENTS with versions #
375 q = comments_model._all_inline_comments_of_pull_request(pull_request_latest)
377 q = comments_model._all_inline_comments_of_pull_request(pull_request_latest)
376 q = q.order_by(ChangesetComment.comment_id.asc())
378 q = q.order_by(ChangesetComment.comment_id.asc())
377 inline_comments = q
379 inline_comments = q
378
380
379 c.inline_versions = comments_model.aggregate_comments(
381 c.inline_versions = comments_model.aggregate_comments(
380 inline_comments, versions, c.at_version_num, inline=True)
382 inline_comments, versions, c.at_version_num, inline=True)
381
383
382 # inject latest version
384 # inject latest version
383 latest_ver = PullRequest.get_pr_display_object(
385 latest_ver = PullRequest.get_pr_display_object(
384 pull_request_latest, pull_request_latest)
386 pull_request_latest, pull_request_latest)
385
387
386 c.versions = versions + [latest_ver]
388 c.versions = versions + [latest_ver]
387
389
388 # if we use version, then do not show later comments
390 # if we use version, then do not show later comments
389 # than current version
391 # than current version
390 display_inline_comments = collections.defaultdict(
392 display_inline_comments = collections.defaultdict(
391 lambda: collections.defaultdict(list))
393 lambda: collections.defaultdict(list))
392 for co in inline_comments:
394 for co in inline_comments:
393 if c.at_version_num:
395 if c.at_version_num:
394 # pick comments that are at least UPTO given version, so we
396 # pick comments that are at least UPTO given version, so we
395 # don't render comments for higher version
397 # don't render comments for higher version
396 should_render = co.pull_request_version_id and \
398 should_render = co.pull_request_version_id and \
397 co.pull_request_version_id <= c.at_version_num
399 co.pull_request_version_id <= c.at_version_num
398 else:
400 else:
399 # showing all, for 'latest'
401 # showing all, for 'latest'
400 should_render = True
402 should_render = True
401
403
402 if should_render:
404 if should_render:
403 display_inline_comments[co.f_path][co.line_no].append(co)
405 display_inline_comments[co.f_path][co.line_no].append(co)
404
406
405 # load diff data into template context, if we use compare mode then
407 # load diff data into template context, if we use compare mode then
406 # diff is calculated based on changes between versions of PR
408 # diff is calculated based on changes between versions of PR
407
409
408 source_repo = pull_request_at_ver.source_repo
410 source_repo = pull_request_at_ver.source_repo
409 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
411 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
410
412
411 target_repo = pull_request_at_ver.target_repo
413 target_repo = pull_request_at_ver.target_repo
412 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
414 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
413
415
414 if compare:
416 if compare:
415 # in compare switch the diff base to latest commit from prev version
417 # in compare switch the diff base to latest commit from prev version
416 target_ref_id = prev_pull_request_display_obj.revisions[0]
418 target_ref_id = prev_pull_request_display_obj.revisions[0]
417
419
418 # despite opening commits for bookmarks/branches/tags, we always
420 # despite opening commits for bookmarks/branches/tags, we always
419 # convert this to rev to prevent changes after bookmark or branch change
421 # convert this to rev to prevent changes after bookmark or branch change
420 c.source_ref_type = 'rev'
422 c.source_ref_type = 'rev'
421 c.source_ref = source_ref_id
423 c.source_ref = source_ref_id
422
424
423 c.target_ref_type = 'rev'
425 c.target_ref_type = 'rev'
424 c.target_ref = target_ref_id
426 c.target_ref = target_ref_id
425
427
426 c.source_repo = source_repo
428 c.source_repo = source_repo
427 c.target_repo = target_repo
429 c.target_repo = target_repo
428
430
429 c.commit_ranges = []
431 c.commit_ranges = []
430 source_commit = EmptyCommit()
432 source_commit = EmptyCommit()
431 target_commit = EmptyCommit()
433 target_commit = EmptyCommit()
432 c.missing_requirements = False
434 c.missing_requirements = False
433
435
434 source_scm = source_repo.scm_instance()
436 source_scm = source_repo.scm_instance()
435 target_scm = target_repo.scm_instance()
437 target_scm = target_repo.scm_instance()
436
438
437 shadow_scm = None
439 shadow_scm = None
438 try:
440 try:
439 shadow_scm = pull_request_latest.get_shadow_repo()
441 shadow_scm = pull_request_latest.get_shadow_repo()
440 except Exception:
442 except Exception:
441 log.debug('Failed to get shadow repo', exc_info=True)
443 log.debug('Failed to get shadow repo', exc_info=True)
442 # try first the existing source_repo, and then shadow
444 # try first the existing source_repo, and then shadow
443 # repo if we can obtain one
445 # repo if we can obtain one
444 commits_source_repo = source_scm or shadow_scm
446 commits_source_repo = source_scm or shadow_scm
445
447
446 c.commits_source_repo = commits_source_repo
448 c.commits_source_repo = commits_source_repo
447 c.ancestor = None # set it to None, to hide it from PR view
449 c.ancestor = None # set it to None, to hide it from PR view
448
450
449 # empty version means latest, so we keep this to prevent
451 # empty version means latest, so we keep this to prevent
450 # double caching
452 # double caching
451 version_normalized = version or 'latest'
453 version_normalized = version or 'latest'
452 from_version_normalized = from_version or 'latest'
454 from_version_normalized = from_version or 'latest'
453
455
454 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(
456 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(
455 target_repo)
457 target_repo)
456 cache_file_path = diff_cache_exist(
458 cache_file_path = diff_cache_exist(
457 cache_path, 'pull_request', pull_request_id, version_normalized,
459 cache_path, 'pull_request', pull_request_id, version_normalized,
458 from_version_normalized, source_ref_id, target_ref_id, c.fulldiff)
460 from_version_normalized, source_ref_id, target_ref_id, c.fulldiff)
459
461
460 caching_enabled = self._is_diff_cache_enabled(c.target_repo)
462 caching_enabled = self._is_diff_cache_enabled(c.target_repo)
461 force_recache = str2bool(self.request.GET.get('force_recache'))
463 force_recache = str2bool(self.request.GET.get('force_recache'))
462
464
463 cached_diff = None
465 cached_diff = None
464 if caching_enabled:
466 if caching_enabled:
465 cached_diff = load_cached_diff(cache_file_path)
467 cached_diff = load_cached_diff(cache_file_path)
466
468
467 has_proper_commit_cache = (
469 has_proper_commit_cache = (
468 cached_diff and cached_diff.get('commits')
470 cached_diff and cached_diff.get('commits')
469 and len(cached_diff.get('commits', [])) == 5
471 and len(cached_diff.get('commits', [])) == 5
470 and cached_diff.get('commits')[0]
472 and cached_diff.get('commits')[0]
471 and cached_diff.get('commits')[3])
473 and cached_diff.get('commits')[3])
472 if not force_recache and has_proper_commit_cache:
474 if not force_recache and has_proper_commit_cache:
473 diff_commit_cache = \
475 diff_commit_cache = \
474 (ancestor_commit, commit_cache, missing_requirements,
476 (ancestor_commit, commit_cache, missing_requirements,
475 source_commit, target_commit) = cached_diff['commits']
477 source_commit, target_commit) = cached_diff['commits']
476 else:
478 else:
477 diff_commit_cache = \
479 diff_commit_cache = \
478 (ancestor_commit, commit_cache, missing_requirements,
480 (ancestor_commit, commit_cache, missing_requirements,
479 source_commit, target_commit) = self.get_commits(
481 source_commit, target_commit) = self.get_commits(
480 commits_source_repo,
482 commits_source_repo,
481 pull_request_at_ver,
483 pull_request_at_ver,
482 source_commit,
484 source_commit,
483 source_ref_id,
485 source_ref_id,
484 source_scm,
486 source_scm,
485 target_commit,
487 target_commit,
486 target_ref_id,
488 target_ref_id,
487 target_scm)
489 target_scm)
488
490
489 # register our commit range
491 # register our commit range
490 for comm in commit_cache.values():
492 for comm in commit_cache.values():
491 c.commit_ranges.append(comm)
493 c.commit_ranges.append(comm)
492
494
493 c.missing_requirements = missing_requirements
495 c.missing_requirements = missing_requirements
494 c.ancestor_commit = ancestor_commit
496 c.ancestor_commit = ancestor_commit
495 c.statuses = source_repo.statuses(
497 c.statuses = source_repo.statuses(
496 [x.raw_id for x in c.commit_ranges])
498 [x.raw_id for x in c.commit_ranges])
497
499
498 # auto collapse if we have more than limit
500 # auto collapse if we have more than limit
499 collapse_limit = diffs.DiffProcessor._collapse_commits_over
501 collapse_limit = diffs.DiffProcessor._collapse_commits_over
500 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
502 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
501 c.compare_mode = compare
503 c.compare_mode = compare
502
504
503 # diff_limit is the old behavior, will cut off the whole diff
505 # diff_limit is the old behavior, will cut off the whole diff
504 # if the limit is applied otherwise will just hide the
506 # if the limit is applied otherwise will just hide the
505 # big files from the front-end
507 # big files from the front-end
506 diff_limit = c.visual.cut_off_limit_diff
508 diff_limit = c.visual.cut_off_limit_diff
507 file_limit = c.visual.cut_off_limit_file
509 file_limit = c.visual.cut_off_limit_file
508
510
509 c.missing_commits = False
511 c.missing_commits = False
510 if (c.missing_requirements
512 if (c.missing_requirements
511 or isinstance(source_commit, EmptyCommit)
513 or isinstance(source_commit, EmptyCommit)
512 or source_commit == target_commit):
514 or source_commit == target_commit):
513
515
514 c.missing_commits = True
516 c.missing_commits = True
515 else:
517 else:
516 c.inline_comments = display_inline_comments
518 c.inline_comments = display_inline_comments
517
519
518 has_proper_diff_cache = cached_diff and cached_diff.get('commits')
520 has_proper_diff_cache = cached_diff and cached_diff.get('commits')
519 if not force_recache and has_proper_diff_cache:
521 if not force_recache and has_proper_diff_cache:
520 c.diffset = cached_diff['diff']
522 c.diffset = cached_diff['diff']
521 (ancestor_commit, commit_cache, missing_requirements,
523 (ancestor_commit, commit_cache, missing_requirements,
522 source_commit, target_commit) = cached_diff['commits']
524 source_commit, target_commit) = cached_diff['commits']
523 else:
525 else:
524 c.diffset = self._get_diffset(
526 c.diffset = self._get_diffset(
525 c.source_repo.repo_name, commits_source_repo,
527 c.source_repo.repo_name, commits_source_repo,
526 source_ref_id, target_ref_id,
528 source_ref_id, target_ref_id,
527 target_commit, source_commit,
529 target_commit, source_commit,
528 diff_limit, file_limit, c.fulldiff)
530 diff_limit, file_limit, c.fulldiff)
529
531
530 # save cached diff
532 # save cached diff
531 if caching_enabled:
533 if caching_enabled:
532 cache_diff(cache_file_path, c.diffset, diff_commit_cache)
534 cache_diff(cache_file_path, c.diffset, diff_commit_cache)
533
535
534 c.limited_diff = c.diffset.limited_diff
536 c.limited_diff = c.diffset.limited_diff
535
537
536 # calculate removed files that are bound to comments
538 # calculate removed files that are bound to comments
537 comment_deleted_files = [
539 comment_deleted_files = [
538 fname for fname in display_inline_comments
540 fname for fname in display_inline_comments
539 if fname not in c.diffset.file_stats]
541 if fname not in c.diffset.file_stats]
540
542
541 c.deleted_files_comments = collections.defaultdict(dict)
543 c.deleted_files_comments = collections.defaultdict(dict)
542 for fname, per_line_comments in display_inline_comments.items():
544 for fname, per_line_comments in display_inline_comments.items():
543 if fname in comment_deleted_files:
545 if fname in comment_deleted_files:
544 c.deleted_files_comments[fname]['stats'] = 0
546 c.deleted_files_comments[fname]['stats'] = 0
545 c.deleted_files_comments[fname]['comments'] = list()
547 c.deleted_files_comments[fname]['comments'] = list()
546 for lno, comments in per_line_comments.items():
548 for lno, comments in per_line_comments.items():
547 c.deleted_files_comments[fname]['comments'].extend(
549 c.deleted_files_comments[fname]['comments'].extend(
548 comments)
550 comments)
549
551
550 # this is a hack to properly display links, when creating PR, the
552 # this is a hack to properly display links, when creating PR, the
551 # compare view and others uses different notation, and
553 # compare view and others uses different notation, and
552 # compare_commits.mako renders links based on the target_repo.
554 # compare_commits.mako renders links based on the target_repo.
553 # We need to swap that here to generate it properly on the html side
555 # We need to swap that here to generate it properly on the html side
554 c.target_repo = c.source_repo
556 c.target_repo = c.source_repo
555
557
556 c.commit_statuses = ChangesetStatus.STATUSES
558 c.commit_statuses = ChangesetStatus.STATUSES
557
559
558 c.show_version_changes = not pr_closed
560 c.show_version_changes = not pr_closed
559 if c.show_version_changes:
561 if c.show_version_changes:
560 cur_obj = pull_request_at_ver
562 cur_obj = pull_request_at_ver
561 prev_obj = prev_pull_request_at_ver
563 prev_obj = prev_pull_request_at_ver
562
564
563 old_commit_ids = prev_obj.revisions
565 old_commit_ids = prev_obj.revisions
564 new_commit_ids = cur_obj.revisions
566 new_commit_ids = cur_obj.revisions
565 commit_changes = PullRequestModel()._calculate_commit_id_changes(
567 commit_changes = PullRequestModel()._calculate_commit_id_changes(
566 old_commit_ids, new_commit_ids)
568 old_commit_ids, new_commit_ids)
567 c.commit_changes_summary = commit_changes
569 c.commit_changes_summary = commit_changes
568
570
569 # calculate the diff for commits between versions
571 # calculate the diff for commits between versions
570 c.commit_changes = []
572 c.commit_changes = []
571 mark = lambda cs, fw: list(
573 mark = lambda cs, fw: list(
572 h.itertools.izip_longest([], cs, fillvalue=fw))
574 h.itertools.izip_longest([], cs, fillvalue=fw))
573 for c_type, raw_id in mark(commit_changes.added, 'a') \
575 for c_type, raw_id in mark(commit_changes.added, 'a') \
574 + mark(commit_changes.removed, 'r') \
576 + mark(commit_changes.removed, 'r') \
575 + mark(commit_changes.common, 'c'):
577 + mark(commit_changes.common, 'c'):
576
578
577 if raw_id in commit_cache:
579 if raw_id in commit_cache:
578 commit = commit_cache[raw_id]
580 commit = commit_cache[raw_id]
579 else:
581 else:
580 try:
582 try:
581 commit = commits_source_repo.get_commit(raw_id)
583 commit = commits_source_repo.get_commit(raw_id)
582 except CommitDoesNotExistError:
584 except CommitDoesNotExistError:
583 # in case we fail extracting still use "dummy" commit
585 # in case we fail extracting still use "dummy" commit
584 # for display in commit diff
586 # for display in commit diff
585 commit = h.AttributeDict(
587 commit = h.AttributeDict(
586 {'raw_id': raw_id,
588 {'raw_id': raw_id,
587 'message': 'EMPTY or MISSING COMMIT'})
589 'message': 'EMPTY or MISSING COMMIT'})
588 c.commit_changes.append([c_type, commit])
590 c.commit_changes.append([c_type, commit])
589
591
590 # current user review statuses for each version
592 # current user review statuses for each version
591 c.review_versions = {}
593 c.review_versions = {}
592 if self._rhodecode_user.user_id in allowed_reviewers:
594 if self._rhodecode_user.user_id in allowed_reviewers:
593 for co in general_comments:
595 for co in general_comments:
594 if co.author.user_id == self._rhodecode_user.user_id:
596 if co.author.user_id == self._rhodecode_user.user_id:
595 status = co.status_change
597 status = co.status_change
596 if status:
598 if status:
597 _ver_pr = status[0].comment.pull_request_version_id
599 _ver_pr = status[0].comment.pull_request_version_id
598 c.review_versions[_ver_pr] = status[0]
600 c.review_versions[_ver_pr] = status[0]
599
601
600 return self._get_template_context(c)
602 return self._get_template_context(c)
601
603
602 def get_commits(
604 def get_commits(
603 self, commits_source_repo, pull_request_at_ver, source_commit,
605 self, commits_source_repo, pull_request_at_ver, source_commit,
604 source_ref_id, source_scm, target_commit, target_ref_id, target_scm):
606 source_ref_id, source_scm, target_commit, target_ref_id, target_scm):
605 commit_cache = collections.OrderedDict()
607 commit_cache = collections.OrderedDict()
606 missing_requirements = False
608 missing_requirements = False
607 try:
609 try:
608 pre_load = ["author", "branch", "date", "message"]
610 pre_load = ["author", "branch", "date", "message"]
609 show_revs = pull_request_at_ver.revisions
611 show_revs = pull_request_at_ver.revisions
610 for rev in show_revs:
612 for rev in show_revs:
611 comm = commits_source_repo.get_commit(
613 comm = commits_source_repo.get_commit(
612 commit_id=rev, pre_load=pre_load)
614 commit_id=rev, pre_load=pre_load)
613 commit_cache[comm.raw_id] = comm
615 commit_cache[comm.raw_id] = comm
614
616
615 # Order here matters, we first need to get target, and then
617 # Order here matters, we first need to get target, and then
616 # the source
618 # the source
617 target_commit = commits_source_repo.get_commit(
619 target_commit = commits_source_repo.get_commit(
618 commit_id=safe_str(target_ref_id))
620 commit_id=safe_str(target_ref_id))
619
621
620 source_commit = commits_source_repo.get_commit(
622 source_commit = commits_source_repo.get_commit(
621 commit_id=safe_str(source_ref_id))
623 commit_id=safe_str(source_ref_id))
622 except CommitDoesNotExistError:
624 except CommitDoesNotExistError:
623 log.warning(
625 log.warning(
624 'Failed to get commit from `{}` repo'.format(
626 'Failed to get commit from `{}` repo'.format(
625 commits_source_repo), exc_info=True)
627 commits_source_repo), exc_info=True)
626 except RepositoryRequirementError:
628 except RepositoryRequirementError:
627 log.warning(
629 log.warning(
628 'Failed to get all required data from repo', exc_info=True)
630 'Failed to get all required data from repo', exc_info=True)
629 missing_requirements = True
631 missing_requirements = True
630 ancestor_commit = None
632 ancestor_commit = None
631 try:
633 try:
632 ancestor_id = source_scm.get_common_ancestor(
634 ancestor_id = source_scm.get_common_ancestor(
633 source_commit.raw_id, target_commit.raw_id, target_scm)
635 source_commit.raw_id, target_commit.raw_id, target_scm)
634 ancestor_commit = source_scm.get_commit(ancestor_id)
636 ancestor_commit = source_scm.get_commit(ancestor_id)
635 except Exception:
637 except Exception:
636 ancestor_commit = None
638 ancestor_commit = None
637 return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit
639 return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit
638
640
639 def assure_not_empty_repo(self):
641 def assure_not_empty_repo(self):
640 _ = self.request.translate
642 _ = self.request.translate
641
643
642 try:
644 try:
643 self.db_repo.scm_instance().get_commit()
645 self.db_repo.scm_instance().get_commit()
644 except EmptyRepositoryError:
646 except EmptyRepositoryError:
645 h.flash(h.literal(_('There are no commits yet')),
647 h.flash(h.literal(_('There are no commits yet')),
646 category='warning')
648 category='warning')
647 raise HTTPFound(
649 raise HTTPFound(
648 h.route_path('repo_summary', repo_name=self.db_repo.repo_name))
650 h.route_path('repo_summary', repo_name=self.db_repo.repo_name))
649
651
650 @LoginRequired()
652 @LoginRequired()
651 @NotAnonymous()
653 @NotAnonymous()
652 @HasRepoPermissionAnyDecorator(
654 @HasRepoPermissionAnyDecorator(
653 'repository.read', 'repository.write', 'repository.admin')
655 'repository.read', 'repository.write', 'repository.admin')
654 @view_config(
656 @view_config(
655 route_name='pullrequest_new', request_method='GET',
657 route_name='pullrequest_new', request_method='GET',
656 renderer='rhodecode:templates/pullrequests/pullrequest.mako')
658 renderer='rhodecode:templates/pullrequests/pullrequest.mako')
657 def pull_request_new(self):
659 def pull_request_new(self):
658 _ = self.request.translate
660 _ = self.request.translate
659 c = self.load_default_context()
661 c = self.load_default_context()
660
662
661 self.assure_not_empty_repo()
663 self.assure_not_empty_repo()
662 source_repo = self.db_repo
664 source_repo = self.db_repo
663
665
664 commit_id = self.request.GET.get('commit')
666 commit_id = self.request.GET.get('commit')
665 branch_ref = self.request.GET.get('branch')
667 branch_ref = self.request.GET.get('branch')
666 bookmark_ref = self.request.GET.get('bookmark')
668 bookmark_ref = self.request.GET.get('bookmark')
667
669
668 try:
670 try:
669 source_repo_data = PullRequestModel().generate_repo_data(
671 source_repo_data = PullRequestModel().generate_repo_data(
670 source_repo, commit_id=commit_id,
672 source_repo, commit_id=commit_id,
671 branch=branch_ref, bookmark=bookmark_ref,
673 branch=branch_ref, bookmark=bookmark_ref,
672 translator=self.request.translate)
674 translator=self.request.translate)
673 except CommitDoesNotExistError as e:
675 except CommitDoesNotExistError as e:
674 log.exception(e)
676 log.exception(e)
675 h.flash(_('Commit does not exist'), 'error')
677 h.flash(_('Commit does not exist'), 'error')
676 raise HTTPFound(
678 raise HTTPFound(
677 h.route_path('pullrequest_new', repo_name=source_repo.repo_name))
679 h.route_path('pullrequest_new', repo_name=source_repo.repo_name))
678
680
679 default_target_repo = source_repo
681 default_target_repo = source_repo
680
682
681 if source_repo.parent:
683 if source_repo.parent:
682 parent_vcs_obj = source_repo.parent.scm_instance()
684 parent_vcs_obj = source_repo.parent.scm_instance()
683 if parent_vcs_obj and not parent_vcs_obj.is_empty():
685 if parent_vcs_obj and not parent_vcs_obj.is_empty():
684 # change default if we have a parent repo
686 # change default if we have a parent repo
685 default_target_repo = source_repo.parent
687 default_target_repo = source_repo.parent
686
688
687 target_repo_data = PullRequestModel().generate_repo_data(
689 target_repo_data = PullRequestModel().generate_repo_data(
688 default_target_repo, translator=self.request.translate)
690 default_target_repo, translator=self.request.translate)
689
691
690 selected_source_ref = source_repo_data['refs']['selected_ref']
692 selected_source_ref = source_repo_data['refs']['selected_ref']
691 title_source_ref = ''
693 title_source_ref = ''
692 if selected_source_ref:
694 if selected_source_ref:
693 title_source_ref = selected_source_ref.split(':', 2)[1]
695 title_source_ref = selected_source_ref.split(':', 2)[1]
694 c.default_title = PullRequestModel().generate_pullrequest_title(
696 c.default_title = PullRequestModel().generate_pullrequest_title(
695 source=source_repo.repo_name,
697 source=source_repo.repo_name,
696 source_ref=title_source_ref,
698 source_ref=title_source_ref,
697 target=default_target_repo.repo_name
699 target=default_target_repo.repo_name
698 )
700 )
699
701
700 c.default_repo_data = {
702 c.default_repo_data = {
701 'source_repo_name': source_repo.repo_name,
703 'source_repo_name': source_repo.repo_name,
702 'source_refs_json': json.dumps(source_repo_data),
704 'source_refs_json': json.dumps(source_repo_data),
703 'target_repo_name': default_target_repo.repo_name,
705 'target_repo_name': default_target_repo.repo_name,
704 'target_refs_json': json.dumps(target_repo_data),
706 'target_refs_json': json.dumps(target_repo_data),
705 }
707 }
706 c.default_source_ref = selected_source_ref
708 c.default_source_ref = selected_source_ref
707
709
708 return self._get_template_context(c)
710 return self._get_template_context(c)
709
711
710 @LoginRequired()
712 @LoginRequired()
711 @NotAnonymous()
713 @NotAnonymous()
712 @HasRepoPermissionAnyDecorator(
714 @HasRepoPermissionAnyDecorator(
713 'repository.read', 'repository.write', 'repository.admin')
715 'repository.read', 'repository.write', 'repository.admin')
714 @view_config(
716 @view_config(
715 route_name='pullrequest_repo_refs', request_method='GET',
717 route_name='pullrequest_repo_refs', request_method='GET',
716 renderer='json_ext', xhr=True)
718 renderer='json_ext', xhr=True)
717 def pull_request_repo_refs(self):
719 def pull_request_repo_refs(self):
718 self.load_default_context()
720 self.load_default_context()
719 target_repo_name = self.request.matchdict['target_repo_name']
721 target_repo_name = self.request.matchdict['target_repo_name']
720 repo = Repository.get_by_repo_name(target_repo_name)
722 repo = Repository.get_by_repo_name(target_repo_name)
721 if not repo:
723 if not repo:
722 raise HTTPNotFound()
724 raise HTTPNotFound()
723
725
724 target_perm = HasRepoPermissionAny(
726 target_perm = HasRepoPermissionAny(
725 'repository.read', 'repository.write', 'repository.admin')(
727 'repository.read', 'repository.write', 'repository.admin')(
726 target_repo_name)
728 target_repo_name)
727 if not target_perm:
729 if not target_perm:
728 raise HTTPNotFound()
730 raise HTTPNotFound()
729
731
730 return PullRequestModel().generate_repo_data(
732 return PullRequestModel().generate_repo_data(
731 repo, translator=self.request.translate)
733 repo, translator=self.request.translate)
732
734
733 @LoginRequired()
735 @LoginRequired()
734 @NotAnonymous()
736 @NotAnonymous()
735 @HasRepoPermissionAnyDecorator(
737 @HasRepoPermissionAnyDecorator(
736 'repository.read', 'repository.write', 'repository.admin')
738 'repository.read', 'repository.write', 'repository.admin')
737 @view_config(
739 @view_config(
738 route_name='pullrequest_repo_destinations', request_method='GET',
740 route_name='pullrequest_repo_destinations', request_method='GET',
739 renderer='json_ext', xhr=True)
741 renderer='json_ext', xhr=True)
740 def pull_request_repo_destinations(self):
742 def pull_request_repo_destinations(self):
741 _ = self.request.translate
743 _ = self.request.translate
742 filter_query = self.request.GET.get('query')
744 filter_query = self.request.GET.get('query')
743
745
744 query = Repository.query() \
746 query = Repository.query() \
745 .order_by(func.length(Repository.repo_name)) \
747 .order_by(func.length(Repository.repo_name)) \
746 .filter(
748 .filter(
747 or_(Repository.repo_name == self.db_repo.repo_name,
749 or_(Repository.repo_name == self.db_repo.repo_name,
748 Repository.fork_id == self.db_repo.repo_id))
750 Repository.fork_id == self.db_repo.repo_id))
749
751
750 if filter_query:
752 if filter_query:
751 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
753 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
752 query = query.filter(
754 query = query.filter(
753 Repository.repo_name.ilike(ilike_expression))
755 Repository.repo_name.ilike(ilike_expression))
754
756
755 add_parent = False
757 add_parent = False
756 if self.db_repo.parent:
758 if self.db_repo.parent:
757 if filter_query in self.db_repo.parent.repo_name:
759 if filter_query in self.db_repo.parent.repo_name:
758 parent_vcs_obj = self.db_repo.parent.scm_instance()
760 parent_vcs_obj = self.db_repo.parent.scm_instance()
759 if parent_vcs_obj and not parent_vcs_obj.is_empty():
761 if parent_vcs_obj and not parent_vcs_obj.is_empty():
760 add_parent = True
762 add_parent = True
761
763
762 limit = 20 - 1 if add_parent else 20
764 limit = 20 - 1 if add_parent else 20
763 all_repos = query.limit(limit).all()
765 all_repos = query.limit(limit).all()
764 if add_parent:
766 if add_parent:
765 all_repos += [self.db_repo.parent]
767 all_repos += [self.db_repo.parent]
766
768
767 repos = []
769 repos = []
768 for obj in ScmModel().get_repos(all_repos):
770 for obj in ScmModel().get_repos(all_repos):
769 repos.append({
771 repos.append({
770 'id': obj['name'],
772 'id': obj['name'],
771 'text': obj['name'],
773 'text': obj['name'],
772 'type': 'repo',
774 'type': 'repo',
773 'repo_id': obj['dbrepo']['repo_id'],
775 'repo_id': obj['dbrepo']['repo_id'],
774 'repo_type': obj['dbrepo']['repo_type'],
776 'repo_type': obj['dbrepo']['repo_type'],
775 'private': obj['dbrepo']['private'],
777 'private': obj['dbrepo']['private'],
776
778
777 })
779 })
778
780
779 data = {
781 data = {
780 'more': False,
782 'more': False,
781 'results': [{
783 'results': [{
782 'text': _('Repositories'),
784 'text': _('Repositories'),
783 'children': repos
785 'children': repos
784 }] if repos else []
786 }] if repos else []
785 }
787 }
786 return data
788 return data
787
789
788 @LoginRequired()
790 @LoginRequired()
789 @NotAnonymous()
791 @NotAnonymous()
790 @HasRepoPermissionAnyDecorator(
792 @HasRepoPermissionAnyDecorator(
791 'repository.read', 'repository.write', 'repository.admin')
793 'repository.read', 'repository.write', 'repository.admin')
792 @CSRFRequired()
794 @CSRFRequired()
793 @view_config(
795 @view_config(
794 route_name='pullrequest_create', request_method='POST',
796 route_name='pullrequest_create', request_method='POST',
795 renderer=None)
797 renderer=None)
796 def pull_request_create(self):
798 def pull_request_create(self):
797 _ = self.request.translate
799 _ = self.request.translate
798 self.assure_not_empty_repo()
800 self.assure_not_empty_repo()
799 self.load_default_context()
801 self.load_default_context()
800
802
801 controls = peppercorn.parse(self.request.POST.items())
803 controls = peppercorn.parse(self.request.POST.items())
802
804
803 try:
805 try:
804 form = PullRequestForm(
806 form = PullRequestForm(
805 self.request.translate, self.db_repo.repo_id)()
807 self.request.translate, self.db_repo.repo_id)()
806 _form = form.to_python(controls)
808 _form = form.to_python(controls)
807 except formencode.Invalid as errors:
809 except formencode.Invalid as errors:
808 if errors.error_dict.get('revisions'):
810 if errors.error_dict.get('revisions'):
809 msg = 'Revisions: %s' % errors.error_dict['revisions']
811 msg = 'Revisions: %s' % errors.error_dict['revisions']
810 elif errors.error_dict.get('pullrequest_title'):
812 elif errors.error_dict.get('pullrequest_title'):
811 msg = errors.error_dict.get('pullrequest_title')
813 msg = errors.error_dict.get('pullrequest_title')
812 else:
814 else:
813 msg = _('Error creating pull request: {}').format(errors)
815 msg = _('Error creating pull request: {}').format(errors)
814 log.exception(msg)
816 log.exception(msg)
815 h.flash(msg, 'error')
817 h.flash(msg, 'error')
816
818
817 # would rather just go back to form ...
819 # would rather just go back to form ...
818 raise HTTPFound(
820 raise HTTPFound(
819 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
821 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
820
822
821 source_repo = _form['source_repo']
823 source_repo = _form['source_repo']
822 source_ref = _form['source_ref']
824 source_ref = _form['source_ref']
823 target_repo = _form['target_repo']
825 target_repo = _form['target_repo']
824 target_ref = _form['target_ref']
826 target_ref = _form['target_ref']
825 commit_ids = _form['revisions'][::-1]
827 commit_ids = _form['revisions'][::-1]
826
828
827 # find the ancestor for this pr
829 # find the ancestor for this pr
828 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
830 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
829 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
831 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
830
832
831 # re-check permissions again here
833 # re-check permissions again here
832 # source_repo we must have read permissions
834 # source_repo we must have read permissions
833
835
834 source_perm = HasRepoPermissionAny(
836 source_perm = HasRepoPermissionAny(
835 'repository.read',
837 'repository.read',
836 'repository.write', 'repository.admin')(source_db_repo.repo_name)
838 'repository.write', 'repository.admin')(source_db_repo.repo_name)
837 if not source_perm:
839 if not source_perm:
838 msg = _('Not Enough permissions to source repo `{}`.'.format(
840 msg = _('Not Enough permissions to source repo `{}`.'.format(
839 source_db_repo.repo_name))
841 source_db_repo.repo_name))
840 h.flash(msg, category='error')
842 h.flash(msg, category='error')
841 # copy the args back to redirect
843 # copy the args back to redirect
842 org_query = self.request.GET.mixed()
844 org_query = self.request.GET.mixed()
843 raise HTTPFound(
845 raise HTTPFound(
844 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
846 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
845 _query=org_query))
847 _query=org_query))
846
848
847 # target repo we must have read permissions, and also later on
849 # target repo we must have read permissions, and also later on
848 # we want to check branch permissions here
850 # we want to check branch permissions here
849 target_perm = HasRepoPermissionAny(
851 target_perm = HasRepoPermissionAny(
850 'repository.read',
852 'repository.read',
851 'repository.write', 'repository.admin')(target_db_repo.repo_name)
853 'repository.write', 'repository.admin')(target_db_repo.repo_name)
852 if not target_perm:
854 if not target_perm:
853 msg = _('Not Enough permissions to target repo `{}`.'.format(
855 msg = _('Not Enough permissions to target repo `{}`.'.format(
854 target_db_repo.repo_name))
856 target_db_repo.repo_name))
855 h.flash(msg, category='error')
857 h.flash(msg, category='error')
856 # copy the args back to redirect
858 # copy the args back to redirect
857 org_query = self.request.GET.mixed()
859 org_query = self.request.GET.mixed()
858 raise HTTPFound(
860 raise HTTPFound(
859 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
861 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
860 _query=org_query))
862 _query=org_query))
861
863
862 source_scm = source_db_repo.scm_instance()
864 source_scm = source_db_repo.scm_instance()
863 target_scm = target_db_repo.scm_instance()
865 target_scm = target_db_repo.scm_instance()
864
866
865 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
867 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
866 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
868 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
867
869
868 ancestor = source_scm.get_common_ancestor(
870 ancestor = source_scm.get_common_ancestor(
869 source_commit.raw_id, target_commit.raw_id, target_scm)
871 source_commit.raw_id, target_commit.raw_id, target_scm)
870
872
871 # recalculate target ref based on ancestor
873 # recalculate target ref based on ancestor
872 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
874 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
873 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
875 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
874
876
875 get_default_reviewers_data, validate_default_reviewers = \
877 get_default_reviewers_data, validate_default_reviewers = \
876 PullRequestModel().get_reviewer_functions()
878 PullRequestModel().get_reviewer_functions()
877
879
878 # recalculate reviewers logic, to make sure we can validate this
880 # recalculate reviewers logic, to make sure we can validate this
879 reviewer_rules = get_default_reviewers_data(
881 reviewer_rules = get_default_reviewers_data(
880 self._rhodecode_db_user, source_db_repo,
882 self._rhodecode_db_user, source_db_repo,
881 source_commit, target_db_repo, target_commit)
883 source_commit, target_db_repo, target_commit)
882
884
883 given_reviewers = _form['review_members']
885 given_reviewers = _form['review_members']
884 reviewers = validate_default_reviewers(
886 reviewers = validate_default_reviewers(
885 given_reviewers, reviewer_rules)
887 given_reviewers, reviewer_rules)
886
888
887 pullrequest_title = _form['pullrequest_title']
889 pullrequest_title = _form['pullrequest_title']
888 title_source_ref = source_ref.split(':', 2)[1]
890 title_source_ref = source_ref.split(':', 2)[1]
889 if not pullrequest_title:
891 if not pullrequest_title:
890 pullrequest_title = PullRequestModel().generate_pullrequest_title(
892 pullrequest_title = PullRequestModel().generate_pullrequest_title(
891 source=source_repo,
893 source=source_repo,
892 source_ref=title_source_ref,
894 source_ref=title_source_ref,
893 target=target_repo
895 target=target_repo
894 )
896 )
895
897
896 description = _form['pullrequest_desc']
898 description = _form['pullrequest_desc']
899 description_renderer = _form['description_renderer']
897
900
898 try:
901 try:
899 pull_request = PullRequestModel().create(
902 pull_request = PullRequestModel().create(
900 created_by=self._rhodecode_user.user_id,
903 created_by=self._rhodecode_user.user_id,
901 source_repo=source_repo,
904 source_repo=source_repo,
902 source_ref=source_ref,
905 source_ref=source_ref,
903 target_repo=target_repo,
906 target_repo=target_repo,
904 target_ref=target_ref,
907 target_ref=target_ref,
905 revisions=commit_ids,
908 revisions=commit_ids,
906 reviewers=reviewers,
909 reviewers=reviewers,
907 title=pullrequest_title,
910 title=pullrequest_title,
908 description=description,
911 description=description,
912 description_renderer=description_renderer,
909 reviewer_data=reviewer_rules,
913 reviewer_data=reviewer_rules,
910 auth_user=self._rhodecode_user
914 auth_user=self._rhodecode_user
911 )
915 )
912 Session().commit()
916 Session().commit()
913
917
914 h.flash(_('Successfully opened new pull request'),
918 h.flash(_('Successfully opened new pull request'),
915 category='success')
919 category='success')
916 except Exception:
920 except Exception:
917 msg = _('Error occurred during creation of this pull request.')
921 msg = _('Error occurred during creation of this pull request.')
918 log.exception(msg)
922 log.exception(msg)
919 h.flash(msg, category='error')
923 h.flash(msg, category='error')
920
924
921 # copy the args back to redirect
925 # copy the args back to redirect
922 org_query = self.request.GET.mixed()
926 org_query = self.request.GET.mixed()
923 raise HTTPFound(
927 raise HTTPFound(
924 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
928 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
925 _query=org_query))
929 _query=org_query))
926
930
927 raise HTTPFound(
931 raise HTTPFound(
928 h.route_path('pullrequest_show', repo_name=target_repo,
932 h.route_path('pullrequest_show', repo_name=target_repo,
929 pull_request_id=pull_request.pull_request_id))
933 pull_request_id=pull_request.pull_request_id))
930
934
931 @LoginRequired()
935 @LoginRequired()
932 @NotAnonymous()
936 @NotAnonymous()
933 @HasRepoPermissionAnyDecorator(
937 @HasRepoPermissionAnyDecorator(
934 'repository.read', 'repository.write', 'repository.admin')
938 'repository.read', 'repository.write', 'repository.admin')
935 @CSRFRequired()
939 @CSRFRequired()
936 @view_config(
940 @view_config(
937 route_name='pullrequest_update', request_method='POST',
941 route_name='pullrequest_update', request_method='POST',
938 renderer='json_ext')
942 renderer='json_ext')
939 def pull_request_update(self):
943 def pull_request_update(self):
940 pull_request = PullRequest.get_or_404(
944 pull_request = PullRequest.get_or_404(
941 self.request.matchdict['pull_request_id'])
945 self.request.matchdict['pull_request_id'])
942 _ = self.request.translate
946 _ = self.request.translate
943
947
944 self.load_default_context()
948 self.load_default_context()
945
949
946 if pull_request.is_closed():
950 if pull_request.is_closed():
947 log.debug('update: forbidden because pull request is closed')
951 log.debug('update: forbidden because pull request is closed')
948 msg = _(u'Cannot update closed pull requests.')
952 msg = _(u'Cannot update closed pull requests.')
949 h.flash(msg, category='error')
953 h.flash(msg, category='error')
950 return True
954 return True
951
955
952 # only owner or admin can update it
956 # only owner or admin can update it
953 allowed_to_update = PullRequestModel().check_user_update(
957 allowed_to_update = PullRequestModel().check_user_update(
954 pull_request, self._rhodecode_user)
958 pull_request, self._rhodecode_user)
955 if allowed_to_update:
959 if allowed_to_update:
956 controls = peppercorn.parse(self.request.POST.items())
960 controls = peppercorn.parse(self.request.POST.items())
957
961
958 if 'review_members' in controls:
962 if 'review_members' in controls:
959 self._update_reviewers(
963 self._update_reviewers(
960 pull_request, controls['review_members'],
964 pull_request, controls['review_members'],
961 pull_request.reviewer_data)
965 pull_request.reviewer_data)
962 elif str2bool(self.request.POST.get('update_commits', 'false')):
966 elif str2bool(self.request.POST.get('update_commits', 'false')):
963 self._update_commits(pull_request)
967 self._update_commits(pull_request)
964 elif str2bool(self.request.POST.get('edit_pull_request', 'false')):
968 elif str2bool(self.request.POST.get('edit_pull_request', 'false')):
965 self._edit_pull_request(pull_request)
969 self._edit_pull_request(pull_request)
966 else:
970 else:
967 raise HTTPBadRequest()
971 raise HTTPBadRequest()
968 return True
972 return True
969 raise HTTPForbidden()
973 raise HTTPForbidden()
970
974
971 def _edit_pull_request(self, pull_request):
975 def _edit_pull_request(self, pull_request):
972 _ = self.request.translate
976 _ = self.request.translate
977
973 try:
978 try:
974 PullRequestModel().edit(
979 PullRequestModel().edit(
975 pull_request, self.request.POST.get('title'),
980 pull_request,
976 self.request.POST.get('description'), self._rhodecode_user)
981 self.request.POST.get('title'),
982 self.request.POST.get('description'),
983 self.request.POST.get('description_renderer'),
984 self._rhodecode_user)
977 except ValueError:
985 except ValueError:
978 msg = _(u'Cannot update closed pull requests.')
986 msg = _(u'Cannot update closed pull requests.')
979 h.flash(msg, category='error')
987 h.flash(msg, category='error')
980 return
988 return
981 else:
989 else:
982 Session().commit()
990 Session().commit()
983
991
984 msg = _(u'Pull request title & description updated.')
992 msg = _(u'Pull request title & description updated.')
985 h.flash(msg, category='success')
993 h.flash(msg, category='success')
986 return
994 return
987
995
988 def _update_commits(self, pull_request):
996 def _update_commits(self, pull_request):
989 _ = self.request.translate
997 _ = self.request.translate
990 resp = PullRequestModel().update_commits(pull_request)
998 resp = PullRequestModel().update_commits(pull_request)
991
999
992 if resp.executed:
1000 if resp.executed:
993
1001
994 if resp.target_changed and resp.source_changed:
1002 if resp.target_changed and resp.source_changed:
995 changed = 'target and source repositories'
1003 changed = 'target and source repositories'
996 elif resp.target_changed and not resp.source_changed:
1004 elif resp.target_changed and not resp.source_changed:
997 changed = 'target repository'
1005 changed = 'target repository'
998 elif not resp.target_changed and resp.source_changed:
1006 elif not resp.target_changed and resp.source_changed:
999 changed = 'source repository'
1007 changed = 'source repository'
1000 else:
1008 else:
1001 changed = 'nothing'
1009 changed = 'nothing'
1002
1010
1003 msg = _(
1011 msg = _(
1004 u'Pull request updated to "{source_commit_id}" with '
1012 u'Pull request updated to "{source_commit_id}" with '
1005 u'{count_added} added, {count_removed} removed commits. '
1013 u'{count_added} added, {count_removed} removed commits. '
1006 u'Source of changes: {change_source}')
1014 u'Source of changes: {change_source}')
1007 msg = msg.format(
1015 msg = msg.format(
1008 source_commit_id=pull_request.source_ref_parts.commit_id,
1016 source_commit_id=pull_request.source_ref_parts.commit_id,
1009 count_added=len(resp.changes.added),
1017 count_added=len(resp.changes.added),
1010 count_removed=len(resp.changes.removed),
1018 count_removed=len(resp.changes.removed),
1011 change_source=changed)
1019 change_source=changed)
1012 h.flash(msg, category='success')
1020 h.flash(msg, category='success')
1013
1021
1014 channel = '/repo${}$/pr/{}'.format(
1022 channel = '/repo${}$/pr/{}'.format(
1015 pull_request.target_repo.repo_name,
1023 pull_request.target_repo.repo_name,
1016 pull_request.pull_request_id)
1024 pull_request.pull_request_id)
1017 message = msg + (
1025 message = msg + (
1018 ' - <a onclick="window.location.reload()">'
1026 ' - <a onclick="window.location.reload()">'
1019 '<strong>{}</strong></a>'.format(_('Reload page')))
1027 '<strong>{}</strong></a>'.format(_('Reload page')))
1020 channelstream.post_message(
1028 channelstream.post_message(
1021 channel, message, self._rhodecode_user.username,
1029 channel, message, self._rhodecode_user.username,
1022 registry=self.request.registry)
1030 registry=self.request.registry)
1023 else:
1031 else:
1024 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
1032 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
1025 warning_reasons = [
1033 warning_reasons = [
1026 UpdateFailureReason.NO_CHANGE,
1034 UpdateFailureReason.NO_CHANGE,
1027 UpdateFailureReason.WRONG_REF_TYPE,
1035 UpdateFailureReason.WRONG_REF_TYPE,
1028 ]
1036 ]
1029 category = 'warning' if resp.reason in warning_reasons else 'error'
1037 category = 'warning' if resp.reason in warning_reasons else 'error'
1030 h.flash(msg, category=category)
1038 h.flash(msg, category=category)
1031
1039
1032 @LoginRequired()
1040 @LoginRequired()
1033 @NotAnonymous()
1041 @NotAnonymous()
1034 @HasRepoPermissionAnyDecorator(
1042 @HasRepoPermissionAnyDecorator(
1035 'repository.read', 'repository.write', 'repository.admin')
1043 'repository.read', 'repository.write', 'repository.admin')
1036 @CSRFRequired()
1044 @CSRFRequired()
1037 @view_config(
1045 @view_config(
1038 route_name='pullrequest_merge', request_method='POST',
1046 route_name='pullrequest_merge', request_method='POST',
1039 renderer='json_ext')
1047 renderer='json_ext')
1040 def pull_request_merge(self):
1048 def pull_request_merge(self):
1041 """
1049 """
1042 Merge will perform a server-side merge of the specified
1050 Merge will perform a server-side merge of the specified
1043 pull request, if the pull request is approved and mergeable.
1051 pull request, if the pull request is approved and mergeable.
1044 After successful merging, the pull request is automatically
1052 After successful merging, the pull request is automatically
1045 closed, with a relevant comment.
1053 closed, with a relevant comment.
1046 """
1054 """
1047 pull_request = PullRequest.get_or_404(
1055 pull_request = PullRequest.get_or_404(
1048 self.request.matchdict['pull_request_id'])
1056 self.request.matchdict['pull_request_id'])
1049
1057
1050 self.load_default_context()
1058 self.load_default_context()
1051 check = MergeCheck.validate(pull_request, self._rhodecode_db_user,
1059 check = MergeCheck.validate(pull_request, self._rhodecode_db_user,
1052 translator=self.request.translate)
1060 translator=self.request.translate)
1053 merge_possible = not check.failed
1061 merge_possible = not check.failed
1054
1062
1055 for err_type, error_msg in check.errors:
1063 for err_type, error_msg in check.errors:
1056 h.flash(error_msg, category=err_type)
1064 h.flash(error_msg, category=err_type)
1057
1065
1058 if merge_possible:
1066 if merge_possible:
1059 log.debug("Pre-conditions checked, trying to merge.")
1067 log.debug("Pre-conditions checked, trying to merge.")
1060 extras = vcs_operation_context(
1068 extras = vcs_operation_context(
1061 self.request.environ, repo_name=pull_request.target_repo.repo_name,
1069 self.request.environ, repo_name=pull_request.target_repo.repo_name,
1062 username=self._rhodecode_db_user.username, action='push',
1070 username=self._rhodecode_db_user.username, action='push',
1063 scm=pull_request.target_repo.repo_type)
1071 scm=pull_request.target_repo.repo_type)
1064 self._merge_pull_request(
1072 self._merge_pull_request(
1065 pull_request, self._rhodecode_db_user, extras)
1073 pull_request, self._rhodecode_db_user, extras)
1066 else:
1074 else:
1067 log.debug("Pre-conditions failed, NOT merging.")
1075 log.debug("Pre-conditions failed, NOT merging.")
1068
1076
1069 raise HTTPFound(
1077 raise HTTPFound(
1070 h.route_path('pullrequest_show',
1078 h.route_path('pullrequest_show',
1071 repo_name=pull_request.target_repo.repo_name,
1079 repo_name=pull_request.target_repo.repo_name,
1072 pull_request_id=pull_request.pull_request_id))
1080 pull_request_id=pull_request.pull_request_id))
1073
1081
1074 def _merge_pull_request(self, pull_request, user, extras):
1082 def _merge_pull_request(self, pull_request, user, extras):
1075 _ = self.request.translate
1083 _ = self.request.translate
1076 merge_resp = PullRequestModel().merge_repo(pull_request, user, extras=extras)
1084 merge_resp = PullRequestModel().merge_repo(pull_request, user, extras=extras)
1077
1085
1078 if merge_resp.executed:
1086 if merge_resp.executed:
1079 log.debug("The merge was successful, closing the pull request.")
1087 log.debug("The merge was successful, closing the pull request.")
1080 PullRequestModel().close_pull_request(
1088 PullRequestModel().close_pull_request(
1081 pull_request.pull_request_id, user)
1089 pull_request.pull_request_id, user)
1082 Session().commit()
1090 Session().commit()
1083 msg = _('Pull request was successfully merged and closed.')
1091 msg = _('Pull request was successfully merged and closed.')
1084 h.flash(msg, category='success')
1092 h.flash(msg, category='success')
1085 else:
1093 else:
1086 log.debug(
1094 log.debug(
1087 "The merge was not successful. Merge response: %s",
1095 "The merge was not successful. Merge response: %s",
1088 merge_resp)
1096 merge_resp)
1089 msg = PullRequestModel().merge_status_message(
1097 msg = PullRequestModel().merge_status_message(
1090 merge_resp.failure_reason)
1098 merge_resp.failure_reason)
1091 h.flash(msg, category='error')
1099 h.flash(msg, category='error')
1092
1100
1093 def _update_reviewers(self, pull_request, review_members, reviewer_rules):
1101 def _update_reviewers(self, pull_request, review_members, reviewer_rules):
1094 _ = self.request.translate
1102 _ = self.request.translate
1095 get_default_reviewers_data, validate_default_reviewers = \
1103 get_default_reviewers_data, validate_default_reviewers = \
1096 PullRequestModel().get_reviewer_functions()
1104 PullRequestModel().get_reviewer_functions()
1097
1105
1098 try:
1106 try:
1099 reviewers = validate_default_reviewers(review_members, reviewer_rules)
1107 reviewers = validate_default_reviewers(review_members, reviewer_rules)
1100 except ValueError as e:
1108 except ValueError as e:
1101 log.error('Reviewers Validation: {}'.format(e))
1109 log.error('Reviewers Validation: {}'.format(e))
1102 h.flash(e, category='error')
1110 h.flash(e, category='error')
1103 return
1111 return
1104
1112
1105 PullRequestModel().update_reviewers(
1113 PullRequestModel().update_reviewers(
1106 pull_request, reviewers, self._rhodecode_user)
1114 pull_request, reviewers, self._rhodecode_user)
1107 h.flash(_('Pull request reviewers updated.'), category='success')
1115 h.flash(_('Pull request reviewers updated.'), category='success')
1108 Session().commit()
1116 Session().commit()
1109
1117
1110 @LoginRequired()
1118 @LoginRequired()
1111 @NotAnonymous()
1119 @NotAnonymous()
1112 @HasRepoPermissionAnyDecorator(
1120 @HasRepoPermissionAnyDecorator(
1113 'repository.read', 'repository.write', 'repository.admin')
1121 'repository.read', 'repository.write', 'repository.admin')
1114 @CSRFRequired()
1122 @CSRFRequired()
1115 @view_config(
1123 @view_config(
1116 route_name='pullrequest_delete', request_method='POST',
1124 route_name='pullrequest_delete', request_method='POST',
1117 renderer='json_ext')
1125 renderer='json_ext')
1118 def pull_request_delete(self):
1126 def pull_request_delete(self):
1119 _ = self.request.translate
1127 _ = self.request.translate
1120
1128
1121 pull_request = PullRequest.get_or_404(
1129 pull_request = PullRequest.get_or_404(
1122 self.request.matchdict['pull_request_id'])
1130 self.request.matchdict['pull_request_id'])
1123 self.load_default_context()
1131 self.load_default_context()
1124
1132
1125 pr_closed = pull_request.is_closed()
1133 pr_closed = pull_request.is_closed()
1126 allowed_to_delete = PullRequestModel().check_user_delete(
1134 allowed_to_delete = PullRequestModel().check_user_delete(
1127 pull_request, self._rhodecode_user) and not pr_closed
1135 pull_request, self._rhodecode_user) and not pr_closed
1128
1136
1129 # only owner can delete it !
1137 # only owner can delete it !
1130 if allowed_to_delete:
1138 if allowed_to_delete:
1131 PullRequestModel().delete(pull_request, self._rhodecode_user)
1139 PullRequestModel().delete(pull_request, self._rhodecode_user)
1132 Session().commit()
1140 Session().commit()
1133 h.flash(_('Successfully deleted pull request'),
1141 h.flash(_('Successfully deleted pull request'),
1134 category='success')
1142 category='success')
1135 raise HTTPFound(h.route_path('pullrequest_show_all',
1143 raise HTTPFound(h.route_path('pullrequest_show_all',
1136 repo_name=self.db_repo_name))
1144 repo_name=self.db_repo_name))
1137
1145
1138 log.warning('user %s tried to delete pull request without access',
1146 log.warning('user %s tried to delete pull request without access',
1139 self._rhodecode_user)
1147 self._rhodecode_user)
1140 raise HTTPNotFound()
1148 raise HTTPNotFound()
1141
1149
1142 @LoginRequired()
1150 @LoginRequired()
1143 @NotAnonymous()
1151 @NotAnonymous()
1144 @HasRepoPermissionAnyDecorator(
1152 @HasRepoPermissionAnyDecorator(
1145 'repository.read', 'repository.write', 'repository.admin')
1153 'repository.read', 'repository.write', 'repository.admin')
1146 @CSRFRequired()
1154 @CSRFRequired()
1147 @view_config(
1155 @view_config(
1148 route_name='pullrequest_comment_create', request_method='POST',
1156 route_name='pullrequest_comment_create', request_method='POST',
1149 renderer='json_ext')
1157 renderer='json_ext')
1150 def pull_request_comment_create(self):
1158 def pull_request_comment_create(self):
1151 _ = self.request.translate
1159 _ = self.request.translate
1152
1160
1153 pull_request = PullRequest.get_or_404(
1161 pull_request = PullRequest.get_or_404(
1154 self.request.matchdict['pull_request_id'])
1162 self.request.matchdict['pull_request_id'])
1155 pull_request_id = pull_request.pull_request_id
1163 pull_request_id = pull_request.pull_request_id
1156
1164
1157 if pull_request.is_closed():
1165 if pull_request.is_closed():
1158 log.debug('comment: forbidden because pull request is closed')
1166 log.debug('comment: forbidden because pull request is closed')
1159 raise HTTPForbidden()
1167 raise HTTPForbidden()
1160
1168
1161 allowed_to_comment = PullRequestModel().check_user_comment(
1169 allowed_to_comment = PullRequestModel().check_user_comment(
1162 pull_request, self._rhodecode_user)
1170 pull_request, self._rhodecode_user)
1163 if not allowed_to_comment:
1171 if not allowed_to_comment:
1164 log.debug(
1172 log.debug(
1165 'comment: forbidden because pull request is from forbidden repo')
1173 'comment: forbidden because pull request is from forbidden repo')
1166 raise HTTPForbidden()
1174 raise HTTPForbidden()
1167
1175
1168 c = self.load_default_context()
1176 c = self.load_default_context()
1169
1177
1170 status = self.request.POST.get('changeset_status', None)
1178 status = self.request.POST.get('changeset_status', None)
1171 text = self.request.POST.get('text')
1179 text = self.request.POST.get('text')
1172 comment_type = self.request.POST.get('comment_type')
1180 comment_type = self.request.POST.get('comment_type')
1173 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
1181 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
1174 close_pull_request = self.request.POST.get('close_pull_request')
1182 close_pull_request = self.request.POST.get('close_pull_request')
1175
1183
1176 # the logic here should work like following, if we submit close
1184 # the logic here should work like following, if we submit close
1177 # pr comment, use `close_pull_request_with_comment` function
1185 # pr comment, use `close_pull_request_with_comment` function
1178 # else handle regular comment logic
1186 # else handle regular comment logic
1179
1187
1180 if close_pull_request:
1188 if close_pull_request:
1181 # only owner or admin or person with write permissions
1189 # only owner or admin or person with write permissions
1182 allowed_to_close = PullRequestModel().check_user_update(
1190 allowed_to_close = PullRequestModel().check_user_update(
1183 pull_request, self._rhodecode_user)
1191 pull_request, self._rhodecode_user)
1184 if not allowed_to_close:
1192 if not allowed_to_close:
1185 log.debug('comment: forbidden because not allowed to close '
1193 log.debug('comment: forbidden because not allowed to close '
1186 'pull request %s', pull_request_id)
1194 'pull request %s', pull_request_id)
1187 raise HTTPForbidden()
1195 raise HTTPForbidden()
1188 comment, status = PullRequestModel().close_pull_request_with_comment(
1196 comment, status = PullRequestModel().close_pull_request_with_comment(
1189 pull_request, self._rhodecode_user, self.db_repo, message=text)
1197 pull_request, self._rhodecode_user, self.db_repo, message=text)
1190 Session().flush()
1198 Session().flush()
1191 events.trigger(
1199 events.trigger(
1192 events.PullRequestCommentEvent(pull_request, comment))
1200 events.PullRequestCommentEvent(pull_request, comment))
1193
1201
1194 else:
1202 else:
1195 # regular comment case, could be inline, or one with status.
1203 # regular comment case, could be inline, or one with status.
1196 # for that one we check also permissions
1204 # for that one we check also permissions
1197
1205
1198 allowed_to_change_status = PullRequestModel().check_user_change_status(
1206 allowed_to_change_status = PullRequestModel().check_user_change_status(
1199 pull_request, self._rhodecode_user)
1207 pull_request, self._rhodecode_user)
1200
1208
1201 if status and allowed_to_change_status:
1209 if status and allowed_to_change_status:
1202 message = (_('Status change %(transition_icon)s %(status)s')
1210 message = (_('Status change %(transition_icon)s %(status)s')
1203 % {'transition_icon': '>',
1211 % {'transition_icon': '>',
1204 'status': ChangesetStatus.get_status_lbl(status)})
1212 'status': ChangesetStatus.get_status_lbl(status)})
1205 text = text or message
1213 text = text or message
1206
1214
1207 comment = CommentsModel().create(
1215 comment = CommentsModel().create(
1208 text=text,
1216 text=text,
1209 repo=self.db_repo.repo_id,
1217 repo=self.db_repo.repo_id,
1210 user=self._rhodecode_user.user_id,
1218 user=self._rhodecode_user.user_id,
1211 pull_request=pull_request,
1219 pull_request=pull_request,
1212 f_path=self.request.POST.get('f_path'),
1220 f_path=self.request.POST.get('f_path'),
1213 line_no=self.request.POST.get('line'),
1221 line_no=self.request.POST.get('line'),
1214 status_change=(ChangesetStatus.get_status_lbl(status)
1222 status_change=(ChangesetStatus.get_status_lbl(status)
1215 if status and allowed_to_change_status else None),
1223 if status and allowed_to_change_status else None),
1216 status_change_type=(status
1224 status_change_type=(status
1217 if status and allowed_to_change_status else None),
1225 if status and allowed_to_change_status else None),
1218 comment_type=comment_type,
1226 comment_type=comment_type,
1219 resolves_comment_id=resolves_comment_id,
1227 resolves_comment_id=resolves_comment_id,
1220 auth_user=self._rhodecode_user
1228 auth_user=self._rhodecode_user
1221 )
1229 )
1222
1230
1223 if allowed_to_change_status:
1231 if allowed_to_change_status:
1224 # calculate old status before we change it
1232 # calculate old status before we change it
1225 old_calculated_status = pull_request.calculated_review_status()
1233 old_calculated_status = pull_request.calculated_review_status()
1226
1234
1227 # get status if set !
1235 # get status if set !
1228 if status:
1236 if status:
1229 ChangesetStatusModel().set_status(
1237 ChangesetStatusModel().set_status(
1230 self.db_repo.repo_id,
1238 self.db_repo.repo_id,
1231 status,
1239 status,
1232 self._rhodecode_user.user_id,
1240 self._rhodecode_user.user_id,
1233 comment,
1241 comment,
1234 pull_request=pull_request
1242 pull_request=pull_request
1235 )
1243 )
1236
1244
1237 Session().flush()
1245 Session().flush()
1238 # this is somehow required to get access to some relationship
1246 # this is somehow required to get access to some relationship
1239 # loaded on comment
1247 # loaded on comment
1240 Session().refresh(comment)
1248 Session().refresh(comment)
1241
1249
1242 events.trigger(
1250 events.trigger(
1243 events.PullRequestCommentEvent(pull_request, comment))
1251 events.PullRequestCommentEvent(pull_request, comment))
1244
1252
1245 # we now calculate the status of pull request, and based on that
1253 # we now calculate the status of pull request, and based on that
1246 # calculation we set the commits status
1254 # calculation we set the commits status
1247 calculated_status = pull_request.calculated_review_status()
1255 calculated_status = pull_request.calculated_review_status()
1248 if old_calculated_status != calculated_status:
1256 if old_calculated_status != calculated_status:
1249 PullRequestModel()._trigger_pull_request_hook(
1257 PullRequestModel()._trigger_pull_request_hook(
1250 pull_request, self._rhodecode_user, 'review_status_change')
1258 pull_request, self._rhodecode_user, 'review_status_change')
1251
1259
1252 Session().commit()
1260 Session().commit()
1253
1261
1254 data = {
1262 data = {
1255 'target_id': h.safeid(h.safe_unicode(
1263 'target_id': h.safeid(h.safe_unicode(
1256 self.request.POST.get('f_path'))),
1264 self.request.POST.get('f_path'))),
1257 }
1265 }
1258 if comment:
1266 if comment:
1259 c.co = comment
1267 c.co = comment
1260 rendered_comment = render(
1268 rendered_comment = render(
1261 'rhodecode:templates/changeset/changeset_comment_block.mako',
1269 'rhodecode:templates/changeset/changeset_comment_block.mako',
1262 self._get_template_context(c), self.request)
1270 self._get_template_context(c), self.request)
1263
1271
1264 data.update(comment.get_dict())
1272 data.update(comment.get_dict())
1265 data.update({'rendered_text': rendered_comment})
1273 data.update({'rendered_text': rendered_comment})
1266
1274
1267 return data
1275 return data
1268
1276
1269 @LoginRequired()
1277 @LoginRequired()
1270 @NotAnonymous()
1278 @NotAnonymous()
1271 @HasRepoPermissionAnyDecorator(
1279 @HasRepoPermissionAnyDecorator(
1272 'repository.read', 'repository.write', 'repository.admin')
1280 'repository.read', 'repository.write', 'repository.admin')
1273 @CSRFRequired()
1281 @CSRFRequired()
1274 @view_config(
1282 @view_config(
1275 route_name='pullrequest_comment_delete', request_method='POST',
1283 route_name='pullrequest_comment_delete', request_method='POST',
1276 renderer='json_ext')
1284 renderer='json_ext')
1277 def pull_request_comment_delete(self):
1285 def pull_request_comment_delete(self):
1278 pull_request = PullRequest.get_or_404(
1286 pull_request = PullRequest.get_or_404(
1279 self.request.matchdict['pull_request_id'])
1287 self.request.matchdict['pull_request_id'])
1280
1288
1281 comment = ChangesetComment.get_or_404(
1289 comment = ChangesetComment.get_or_404(
1282 self.request.matchdict['comment_id'])
1290 self.request.matchdict['comment_id'])
1283 comment_id = comment.comment_id
1291 comment_id = comment.comment_id
1284
1292
1285 if pull_request.is_closed():
1293 if pull_request.is_closed():
1286 log.debug('comment: forbidden because pull request is closed')
1294 log.debug('comment: forbidden because pull request is closed')
1287 raise HTTPForbidden()
1295 raise HTTPForbidden()
1288
1296
1289 if not comment:
1297 if not comment:
1290 log.debug('Comment with id:%s not found, skipping', comment_id)
1298 log.debug('Comment with id:%s not found, skipping', comment_id)
1291 # comment already deleted in another call probably
1299 # comment already deleted in another call probably
1292 return True
1300 return True
1293
1301
1294 if comment.pull_request.is_closed():
1302 if comment.pull_request.is_closed():
1295 # don't allow deleting comments on closed pull request
1303 # don't allow deleting comments on closed pull request
1296 raise HTTPForbidden()
1304 raise HTTPForbidden()
1297
1305
1298 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1306 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1299 super_admin = h.HasPermissionAny('hg.admin')()
1307 super_admin = h.HasPermissionAny('hg.admin')()
1300 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1308 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1301 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1309 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1302 comment_repo_admin = is_repo_admin and is_repo_comment
1310 comment_repo_admin = is_repo_admin and is_repo_comment
1303
1311
1304 if super_admin or comment_owner or comment_repo_admin:
1312 if super_admin or comment_owner or comment_repo_admin:
1305 old_calculated_status = comment.pull_request.calculated_review_status()
1313 old_calculated_status = comment.pull_request.calculated_review_status()
1306 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
1314 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
1307 Session().commit()
1315 Session().commit()
1308 calculated_status = comment.pull_request.calculated_review_status()
1316 calculated_status = comment.pull_request.calculated_review_status()
1309 if old_calculated_status != calculated_status:
1317 if old_calculated_status != calculated_status:
1310 PullRequestModel()._trigger_pull_request_hook(
1318 PullRequestModel()._trigger_pull_request_hook(
1311 comment.pull_request, self._rhodecode_user, 'review_status_change')
1319 comment.pull_request, self._rhodecode_user, 'review_status_change')
1312 return True
1320 return True
1313 else:
1321 else:
1314 log.warning('No permissions for user %s to delete comment_id: %s',
1322 log.warning('No permissions for user %s to delete comment_id: %s',
1315 self._rhodecode_db_user, comment_id)
1323 self._rhodecode_db_user, comment_id)
1316 raise HTTPNotFound()
1324 raise HTTPNotFound()
@@ -1,2095 +1,2101 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Helper functions
22 Helper functions
23
23
24 Consists of functions to typically be used within templates, but also
24 Consists of functions to typically be used within templates, but also
25 available to Controllers. This module is available to both as 'h'.
25 available to Controllers. This module is available to both as 'h'.
26 """
26 """
27
27
28 import os
28 import os
29 import random
29 import random
30 import hashlib
30 import hashlib
31 import StringIO
31 import StringIO
32 import textwrap
32 import textwrap
33 import urllib
33 import urllib
34 import math
34 import math
35 import logging
35 import logging
36 import re
36 import re
37 import urlparse
37 import urlparse
38 import time
38 import time
39 import string
39 import string
40 import hashlib
40 import hashlib
41 from collections import OrderedDict
41 from collections import OrderedDict
42
42
43 import pygments
43 import pygments
44 import itertools
44 import itertools
45 import fnmatch
45 import fnmatch
46
46
47 from datetime import datetime
47 from datetime import datetime
48 from functools import partial
48 from functools import partial
49 from pygments.formatters.html import HtmlFormatter
49 from pygments.formatters.html import HtmlFormatter
50 from pygments import highlight as code_highlight
50 from pygments import highlight as code_highlight
51 from pygments.lexers import (
51 from pygments.lexers import (
52 get_lexer_by_name, get_lexer_for_filename, get_lexer_for_mimetype)
52 get_lexer_by_name, get_lexer_for_filename, get_lexer_for_mimetype)
53
53
54 from pyramid.threadlocal import get_current_request
54 from pyramid.threadlocal import get_current_request
55
55
56 from webhelpers.html import literal, HTML, escape
56 from webhelpers.html import literal, HTML, escape
57 from webhelpers.html.tools import *
57 from webhelpers.html.tools import *
58 from webhelpers.html.builder import make_tag
58 from webhelpers.html.builder import make_tag
59 from webhelpers.html.tags import auto_discovery_link, checkbox, css_classes, \
59 from webhelpers.html.tags import auto_discovery_link, checkbox, css_classes, \
60 end_form, file, form as wh_form, hidden, image, javascript_link, link_to, \
60 end_form, file, form as wh_form, hidden, image, javascript_link, link_to, \
61 link_to_if, link_to_unless, ol, required_legend, select, stylesheet_link, \
61 link_to_if, link_to_unless, ol, required_legend, select, stylesheet_link, \
62 submit, text, password, textarea, title, ul, xml_declaration, radio
62 submit, text, password, textarea, title, ul, xml_declaration, radio
63 from webhelpers.html.tools import auto_link, button_to, highlight, \
63 from webhelpers.html.tools import auto_link, button_to, highlight, \
64 js_obfuscate, mail_to, strip_links, strip_tags, tag_re
64 js_obfuscate, mail_to, strip_links, strip_tags, tag_re
65 from webhelpers.text import chop_at, collapse, convert_accented_entities, \
65 from webhelpers.text import chop_at, collapse, convert_accented_entities, \
66 convert_misc_entities, lchop, plural, rchop, remove_formatting, \
66 convert_misc_entities, lchop, plural, rchop, remove_formatting, \
67 replace_whitespace, urlify, truncate, wrap_paragraphs
67 replace_whitespace, urlify, truncate, wrap_paragraphs
68 from webhelpers.date import time_ago_in_words
68 from webhelpers.date import time_ago_in_words
69 from webhelpers.paginate import Page as _Page
69 from webhelpers.paginate import Page as _Page
70 from webhelpers.html.tags import _set_input_attrs, _set_id_attr, \
70 from webhelpers.html.tags import _set_input_attrs, _set_id_attr, \
71 convert_boolean_attrs, NotGiven, _make_safe_id_component
71 convert_boolean_attrs, NotGiven, _make_safe_id_component
72 from webhelpers2.number import format_byte_size
72 from webhelpers2.number import format_byte_size
73
73
74 from rhodecode.lib.action_parser import action_parser
74 from rhodecode.lib.action_parser import action_parser
75 from rhodecode.lib.ext_json import json
75 from rhodecode.lib.ext_json import json
76 from rhodecode.lib.utils import repo_name_slug, get_custom_lexer
76 from rhodecode.lib.utils import repo_name_slug, get_custom_lexer
77 from rhodecode.lib.utils2 import str2bool, safe_unicode, safe_str, \
77 from rhodecode.lib.utils2 import str2bool, safe_unicode, safe_str, \
78 get_commit_safe, datetime_to_time, time_to_datetime, time_to_utcdatetime, \
78 get_commit_safe, datetime_to_time, time_to_datetime, time_to_utcdatetime, \
79 AttributeDict, safe_int, md5, md5_safe
79 AttributeDict, safe_int, md5, md5_safe
80 from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links
80 from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links
81 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
81 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
82 from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyCommit
82 from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyCommit
83 from rhodecode.config.conf import DATE_FORMAT, DATETIME_FORMAT
83 from rhodecode.config.conf import DATE_FORMAT, DATETIME_FORMAT
84 from rhodecode.model.changeset_status import ChangesetStatusModel
84 from rhodecode.model.changeset_status import ChangesetStatusModel
85 from rhodecode.model.db import Permission, User, Repository
85 from rhodecode.model.db import Permission, User, Repository
86 from rhodecode.model.repo_group import RepoGroupModel
86 from rhodecode.model.repo_group import RepoGroupModel
87 from rhodecode.model.settings import IssueTrackerSettingsModel
87 from rhodecode.model.settings import IssueTrackerSettingsModel
88
88
89 log = logging.getLogger(__name__)
89 log = logging.getLogger(__name__)
90
90
91
91
92 DEFAULT_USER = User.DEFAULT_USER
92 DEFAULT_USER = User.DEFAULT_USER
93 DEFAULT_USER_EMAIL = User.DEFAULT_USER_EMAIL
93 DEFAULT_USER_EMAIL = User.DEFAULT_USER_EMAIL
94
94
95
95
96 def asset(path, ver=None, **kwargs):
96 def asset(path, ver=None, **kwargs):
97 """
97 """
98 Helper to generate a static asset file path for rhodecode assets
98 Helper to generate a static asset file path for rhodecode assets
99
99
100 eg. h.asset('images/image.png', ver='3923')
100 eg. h.asset('images/image.png', ver='3923')
101
101
102 :param path: path of asset
102 :param path: path of asset
103 :param ver: optional version query param to append as ?ver=
103 :param ver: optional version query param to append as ?ver=
104 """
104 """
105 request = get_current_request()
105 request = get_current_request()
106 query = {}
106 query = {}
107 query.update(kwargs)
107 query.update(kwargs)
108 if ver:
108 if ver:
109 query = {'ver': ver}
109 query = {'ver': ver}
110 return request.static_path(
110 return request.static_path(
111 'rhodecode:public/{}'.format(path), _query=query)
111 'rhodecode:public/{}'.format(path), _query=query)
112
112
113
113
114 default_html_escape_table = {
114 default_html_escape_table = {
115 ord('&'): u'&amp;',
115 ord('&'): u'&amp;',
116 ord('<'): u'&lt;',
116 ord('<'): u'&lt;',
117 ord('>'): u'&gt;',
117 ord('>'): u'&gt;',
118 ord('"'): u'&quot;',
118 ord('"'): u'&quot;',
119 ord("'"): u'&#39;',
119 ord("'"): u'&#39;',
120 }
120 }
121
121
122
122
123 def html_escape(text, html_escape_table=default_html_escape_table):
123 def html_escape(text, html_escape_table=default_html_escape_table):
124 """Produce entities within text."""
124 """Produce entities within text."""
125 return text.translate(html_escape_table)
125 return text.translate(html_escape_table)
126
126
127
127
128 def chop_at_smart(s, sub, inclusive=False, suffix_if_chopped=None):
128 def chop_at_smart(s, sub, inclusive=False, suffix_if_chopped=None):
129 """
129 """
130 Truncate string ``s`` at the first occurrence of ``sub``.
130 Truncate string ``s`` at the first occurrence of ``sub``.
131
131
132 If ``inclusive`` is true, truncate just after ``sub`` rather than at it.
132 If ``inclusive`` is true, truncate just after ``sub`` rather than at it.
133 """
133 """
134 suffix_if_chopped = suffix_if_chopped or ''
134 suffix_if_chopped = suffix_if_chopped or ''
135 pos = s.find(sub)
135 pos = s.find(sub)
136 if pos == -1:
136 if pos == -1:
137 return s
137 return s
138
138
139 if inclusive:
139 if inclusive:
140 pos += len(sub)
140 pos += len(sub)
141
141
142 chopped = s[:pos]
142 chopped = s[:pos]
143 left = s[pos:].strip()
143 left = s[pos:].strip()
144
144
145 if left and suffix_if_chopped:
145 if left and suffix_if_chopped:
146 chopped += suffix_if_chopped
146 chopped += suffix_if_chopped
147
147
148 return chopped
148 return chopped
149
149
150
150
151 def shorter(text, size=20):
151 def shorter(text, size=20):
152 postfix = '...'
152 postfix = '...'
153 if len(text) > size:
153 if len(text) > size:
154 return text[:size - len(postfix)] + postfix
154 return text[:size - len(postfix)] + postfix
155 return text
155 return text
156
156
157
157
158 def _reset(name, value=None, id=NotGiven, type="reset", **attrs):
158 def _reset(name, value=None, id=NotGiven, type="reset", **attrs):
159 """
159 """
160 Reset button
160 Reset button
161 """
161 """
162 _set_input_attrs(attrs, type, name, value)
162 _set_input_attrs(attrs, type, name, value)
163 _set_id_attr(attrs, id, name)
163 _set_id_attr(attrs, id, name)
164 convert_boolean_attrs(attrs, ["disabled"])
164 convert_boolean_attrs(attrs, ["disabled"])
165 return HTML.input(**attrs)
165 return HTML.input(**attrs)
166
166
167 reset = _reset
167 reset = _reset
168 safeid = _make_safe_id_component
168 safeid = _make_safe_id_component
169
169
170
170
171 def branding(name, length=40):
171 def branding(name, length=40):
172 return truncate(name, length, indicator="")
172 return truncate(name, length, indicator="")
173
173
174
174
175 def FID(raw_id, path):
175 def FID(raw_id, path):
176 """
176 """
177 Creates a unique ID for filenode based on it's hash of path and commit
177 Creates a unique ID for filenode based on it's hash of path and commit
178 it's safe to use in urls
178 it's safe to use in urls
179
179
180 :param raw_id:
180 :param raw_id:
181 :param path:
181 :param path:
182 """
182 """
183
183
184 return 'c-%s-%s' % (short_id(raw_id), md5_safe(path)[:12])
184 return 'c-%s-%s' % (short_id(raw_id), md5_safe(path)[:12])
185
185
186
186
187 class _GetError(object):
187 class _GetError(object):
188 """Get error from form_errors, and represent it as span wrapped error
188 """Get error from form_errors, and represent it as span wrapped error
189 message
189 message
190
190
191 :param field_name: field to fetch errors for
191 :param field_name: field to fetch errors for
192 :param form_errors: form errors dict
192 :param form_errors: form errors dict
193 """
193 """
194
194
195 def __call__(self, field_name, form_errors):
195 def __call__(self, field_name, form_errors):
196 tmpl = """<span class="error_msg">%s</span>"""
196 tmpl = """<span class="error_msg">%s</span>"""
197 if form_errors and field_name in form_errors:
197 if form_errors and field_name in form_errors:
198 return literal(tmpl % form_errors.get(field_name))
198 return literal(tmpl % form_errors.get(field_name))
199
199
200 get_error = _GetError()
200 get_error = _GetError()
201
201
202
202
203 class _ToolTip(object):
203 class _ToolTip(object):
204
204
205 def __call__(self, tooltip_title, trim_at=50):
205 def __call__(self, tooltip_title, trim_at=50):
206 """
206 """
207 Special function just to wrap our text into nice formatted
207 Special function just to wrap our text into nice formatted
208 autowrapped text
208 autowrapped text
209
209
210 :param tooltip_title:
210 :param tooltip_title:
211 """
211 """
212 tooltip_title = escape(tooltip_title)
212 tooltip_title = escape(tooltip_title)
213 tooltip_title = tooltip_title.replace('<', '&lt;').replace('>', '&gt;')
213 tooltip_title = tooltip_title.replace('<', '&lt;').replace('>', '&gt;')
214 return tooltip_title
214 return tooltip_title
215 tooltip = _ToolTip()
215 tooltip = _ToolTip()
216
216
217
217
218 def files_breadcrumbs(repo_name, commit_id, file_path):
218 def files_breadcrumbs(repo_name, commit_id, file_path):
219 if isinstance(file_path, str):
219 if isinstance(file_path, str):
220 file_path = safe_unicode(file_path)
220 file_path = safe_unicode(file_path)
221
221
222 # TODO: johbo: Is this always a url like path, or is this operating
222 # TODO: johbo: Is this always a url like path, or is this operating
223 # system dependent?
223 # system dependent?
224 path_segments = file_path.split('/')
224 path_segments = file_path.split('/')
225
225
226 repo_name_html = escape(repo_name)
226 repo_name_html = escape(repo_name)
227 if len(path_segments) == 1 and path_segments[0] == '':
227 if len(path_segments) == 1 and path_segments[0] == '':
228 url_segments = [repo_name_html]
228 url_segments = [repo_name_html]
229 else:
229 else:
230 url_segments = [
230 url_segments = [
231 link_to(
231 link_to(
232 repo_name_html,
232 repo_name_html,
233 route_path(
233 route_path(
234 'repo_files',
234 'repo_files',
235 repo_name=repo_name,
235 repo_name=repo_name,
236 commit_id=commit_id,
236 commit_id=commit_id,
237 f_path=''),
237 f_path=''),
238 class_='pjax-link')]
238 class_='pjax-link')]
239
239
240 last_cnt = len(path_segments) - 1
240 last_cnt = len(path_segments) - 1
241 for cnt, segment in enumerate(path_segments):
241 for cnt, segment in enumerate(path_segments):
242 if not segment:
242 if not segment:
243 continue
243 continue
244 segment_html = escape(segment)
244 segment_html = escape(segment)
245
245
246 if cnt != last_cnt:
246 if cnt != last_cnt:
247 url_segments.append(
247 url_segments.append(
248 link_to(
248 link_to(
249 segment_html,
249 segment_html,
250 route_path(
250 route_path(
251 'repo_files',
251 'repo_files',
252 repo_name=repo_name,
252 repo_name=repo_name,
253 commit_id=commit_id,
253 commit_id=commit_id,
254 f_path='/'.join(path_segments[:cnt + 1])),
254 f_path='/'.join(path_segments[:cnt + 1])),
255 class_='pjax-link'))
255 class_='pjax-link'))
256 else:
256 else:
257 url_segments.append(segment_html)
257 url_segments.append(segment_html)
258
258
259 return literal('/'.join(url_segments))
259 return literal('/'.join(url_segments))
260
260
261
261
262 class CodeHtmlFormatter(HtmlFormatter):
262 class CodeHtmlFormatter(HtmlFormatter):
263 """
263 """
264 My code Html Formatter for source codes
264 My code Html Formatter for source codes
265 """
265 """
266
266
267 def wrap(self, source, outfile):
267 def wrap(self, source, outfile):
268 return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
268 return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
269
269
270 def _wrap_code(self, source):
270 def _wrap_code(self, source):
271 for cnt, it in enumerate(source):
271 for cnt, it in enumerate(source):
272 i, t = it
272 i, t = it
273 t = '<div id="L%s">%s</div>' % (cnt + 1, t)
273 t = '<div id="L%s">%s</div>' % (cnt + 1, t)
274 yield i, t
274 yield i, t
275
275
276 def _wrap_tablelinenos(self, inner):
276 def _wrap_tablelinenos(self, inner):
277 dummyoutfile = StringIO.StringIO()
277 dummyoutfile = StringIO.StringIO()
278 lncount = 0
278 lncount = 0
279 for t, line in inner:
279 for t, line in inner:
280 if t:
280 if t:
281 lncount += 1
281 lncount += 1
282 dummyoutfile.write(line)
282 dummyoutfile.write(line)
283
283
284 fl = self.linenostart
284 fl = self.linenostart
285 mw = len(str(lncount + fl - 1))
285 mw = len(str(lncount + fl - 1))
286 sp = self.linenospecial
286 sp = self.linenospecial
287 st = self.linenostep
287 st = self.linenostep
288 la = self.lineanchors
288 la = self.lineanchors
289 aln = self.anchorlinenos
289 aln = self.anchorlinenos
290 nocls = self.noclasses
290 nocls = self.noclasses
291 if sp:
291 if sp:
292 lines = []
292 lines = []
293
293
294 for i in range(fl, fl + lncount):
294 for i in range(fl, fl + lncount):
295 if i % st == 0:
295 if i % st == 0:
296 if i % sp == 0:
296 if i % sp == 0:
297 if aln:
297 if aln:
298 lines.append('<a href="#%s%d" class="special">%*d</a>' %
298 lines.append('<a href="#%s%d" class="special">%*d</a>' %
299 (la, i, mw, i))
299 (la, i, mw, i))
300 else:
300 else:
301 lines.append('<span class="special">%*d</span>' % (mw, i))
301 lines.append('<span class="special">%*d</span>' % (mw, i))
302 else:
302 else:
303 if aln:
303 if aln:
304 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
304 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
305 else:
305 else:
306 lines.append('%*d' % (mw, i))
306 lines.append('%*d' % (mw, i))
307 else:
307 else:
308 lines.append('')
308 lines.append('')
309 ls = '\n'.join(lines)
309 ls = '\n'.join(lines)
310 else:
310 else:
311 lines = []
311 lines = []
312 for i in range(fl, fl + lncount):
312 for i in range(fl, fl + lncount):
313 if i % st == 0:
313 if i % st == 0:
314 if aln:
314 if aln:
315 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
315 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
316 else:
316 else:
317 lines.append('%*d' % (mw, i))
317 lines.append('%*d' % (mw, i))
318 else:
318 else:
319 lines.append('')
319 lines.append('')
320 ls = '\n'.join(lines)
320 ls = '\n'.join(lines)
321
321
322 # in case you wonder about the seemingly redundant <div> here: since the
322 # in case you wonder about the seemingly redundant <div> here: since the
323 # content in the other cell also is wrapped in a div, some browsers in
323 # content in the other cell also is wrapped in a div, some browsers in
324 # some configurations seem to mess up the formatting...
324 # some configurations seem to mess up the formatting...
325 if nocls:
325 if nocls:
326 yield 0, ('<table class="%stable">' % self.cssclass +
326 yield 0, ('<table class="%stable">' % self.cssclass +
327 '<tr><td><div class="linenodiv" '
327 '<tr><td><div class="linenodiv" '
328 'style="background-color: #f0f0f0; padding-right: 10px">'
328 'style="background-color: #f0f0f0; padding-right: 10px">'
329 '<pre style="line-height: 125%">' +
329 '<pre style="line-height: 125%">' +
330 ls + '</pre></div></td><td id="hlcode" class="code">')
330 ls + '</pre></div></td><td id="hlcode" class="code">')
331 else:
331 else:
332 yield 0, ('<table class="%stable">' % self.cssclass +
332 yield 0, ('<table class="%stable">' % self.cssclass +
333 '<tr><td class="linenos"><div class="linenodiv"><pre>' +
333 '<tr><td class="linenos"><div class="linenodiv"><pre>' +
334 ls + '</pre></div></td><td id="hlcode" class="code">')
334 ls + '</pre></div></td><td id="hlcode" class="code">')
335 yield 0, dummyoutfile.getvalue()
335 yield 0, dummyoutfile.getvalue()
336 yield 0, '</td></tr></table>'
336 yield 0, '</td></tr></table>'
337
337
338
338
339 class SearchContentCodeHtmlFormatter(CodeHtmlFormatter):
339 class SearchContentCodeHtmlFormatter(CodeHtmlFormatter):
340 def __init__(self, **kw):
340 def __init__(self, **kw):
341 # only show these line numbers if set
341 # only show these line numbers if set
342 self.only_lines = kw.pop('only_line_numbers', [])
342 self.only_lines = kw.pop('only_line_numbers', [])
343 self.query_terms = kw.pop('query_terms', [])
343 self.query_terms = kw.pop('query_terms', [])
344 self.max_lines = kw.pop('max_lines', 5)
344 self.max_lines = kw.pop('max_lines', 5)
345 self.line_context = kw.pop('line_context', 3)
345 self.line_context = kw.pop('line_context', 3)
346 self.url = kw.pop('url', None)
346 self.url = kw.pop('url', None)
347
347
348 super(CodeHtmlFormatter, self).__init__(**kw)
348 super(CodeHtmlFormatter, self).__init__(**kw)
349
349
350 def _wrap_code(self, source):
350 def _wrap_code(self, source):
351 for cnt, it in enumerate(source):
351 for cnt, it in enumerate(source):
352 i, t = it
352 i, t = it
353 t = '<pre>%s</pre>' % t
353 t = '<pre>%s</pre>' % t
354 yield i, t
354 yield i, t
355
355
356 def _wrap_tablelinenos(self, inner):
356 def _wrap_tablelinenos(self, inner):
357 yield 0, '<table class="code-highlight %stable">' % self.cssclass
357 yield 0, '<table class="code-highlight %stable">' % self.cssclass
358
358
359 last_shown_line_number = 0
359 last_shown_line_number = 0
360 current_line_number = 1
360 current_line_number = 1
361
361
362 for t, line in inner:
362 for t, line in inner:
363 if not t:
363 if not t:
364 yield t, line
364 yield t, line
365 continue
365 continue
366
366
367 if current_line_number in self.only_lines:
367 if current_line_number in self.only_lines:
368 if last_shown_line_number + 1 != current_line_number:
368 if last_shown_line_number + 1 != current_line_number:
369 yield 0, '<tr>'
369 yield 0, '<tr>'
370 yield 0, '<td class="line">...</td>'
370 yield 0, '<td class="line">...</td>'
371 yield 0, '<td id="hlcode" class="code"></td>'
371 yield 0, '<td id="hlcode" class="code"></td>'
372 yield 0, '</tr>'
372 yield 0, '</tr>'
373
373
374 yield 0, '<tr>'
374 yield 0, '<tr>'
375 if self.url:
375 if self.url:
376 yield 0, '<td class="line"><a href="%s#L%i">%i</a></td>' % (
376 yield 0, '<td class="line"><a href="%s#L%i">%i</a></td>' % (
377 self.url, current_line_number, current_line_number)
377 self.url, current_line_number, current_line_number)
378 else:
378 else:
379 yield 0, '<td class="line"><a href="">%i</a></td>' % (
379 yield 0, '<td class="line"><a href="">%i</a></td>' % (
380 current_line_number)
380 current_line_number)
381 yield 0, '<td id="hlcode" class="code">' + line + '</td>'
381 yield 0, '<td id="hlcode" class="code">' + line + '</td>'
382 yield 0, '</tr>'
382 yield 0, '</tr>'
383
383
384 last_shown_line_number = current_line_number
384 last_shown_line_number = current_line_number
385
385
386 current_line_number += 1
386 current_line_number += 1
387
387
388
388
389 yield 0, '</table>'
389 yield 0, '</table>'
390
390
391
391
392 def extract_phrases(text_query):
392 def extract_phrases(text_query):
393 """
393 """
394 Extracts phrases from search term string making sure phrases
394 Extracts phrases from search term string making sure phrases
395 contained in double quotes are kept together - and discarding empty values
395 contained in double quotes are kept together - and discarding empty values
396 or fully whitespace values eg.
396 or fully whitespace values eg.
397
397
398 'some text "a phrase" more' => ['some', 'text', 'a phrase', 'more']
398 'some text "a phrase" more' => ['some', 'text', 'a phrase', 'more']
399
399
400 """
400 """
401
401
402 in_phrase = False
402 in_phrase = False
403 buf = ''
403 buf = ''
404 phrases = []
404 phrases = []
405 for char in text_query:
405 for char in text_query:
406 if in_phrase:
406 if in_phrase:
407 if char == '"': # end phrase
407 if char == '"': # end phrase
408 phrases.append(buf)
408 phrases.append(buf)
409 buf = ''
409 buf = ''
410 in_phrase = False
410 in_phrase = False
411 continue
411 continue
412 else:
412 else:
413 buf += char
413 buf += char
414 continue
414 continue
415 else:
415 else:
416 if char == '"': # start phrase
416 if char == '"': # start phrase
417 in_phrase = True
417 in_phrase = True
418 phrases.append(buf)
418 phrases.append(buf)
419 buf = ''
419 buf = ''
420 continue
420 continue
421 elif char == ' ':
421 elif char == ' ':
422 phrases.append(buf)
422 phrases.append(buf)
423 buf = ''
423 buf = ''
424 continue
424 continue
425 else:
425 else:
426 buf += char
426 buf += char
427
427
428 phrases.append(buf)
428 phrases.append(buf)
429 phrases = [phrase.strip() for phrase in phrases if phrase.strip()]
429 phrases = [phrase.strip() for phrase in phrases if phrase.strip()]
430 return phrases
430 return phrases
431
431
432
432
433 def get_matching_offsets(text, phrases):
433 def get_matching_offsets(text, phrases):
434 """
434 """
435 Returns a list of string offsets in `text` that the list of `terms` match
435 Returns a list of string offsets in `text` that the list of `terms` match
436
436
437 >>> get_matching_offsets('some text here', ['some', 'here'])
437 >>> get_matching_offsets('some text here', ['some', 'here'])
438 [(0, 4), (10, 14)]
438 [(0, 4), (10, 14)]
439
439
440 """
440 """
441 offsets = []
441 offsets = []
442 for phrase in phrases:
442 for phrase in phrases:
443 for match in re.finditer(phrase, text):
443 for match in re.finditer(phrase, text):
444 offsets.append((match.start(), match.end()))
444 offsets.append((match.start(), match.end()))
445
445
446 return offsets
446 return offsets
447
447
448
448
449 def normalize_text_for_matching(x):
449 def normalize_text_for_matching(x):
450 """
450 """
451 Replaces all non alnum characters to spaces and lower cases the string,
451 Replaces all non alnum characters to spaces and lower cases the string,
452 useful for comparing two text strings without punctuation
452 useful for comparing two text strings without punctuation
453 """
453 """
454 return re.sub(r'[^\w]', ' ', x.lower())
454 return re.sub(r'[^\w]', ' ', x.lower())
455
455
456
456
457 def get_matching_line_offsets(lines, terms):
457 def get_matching_line_offsets(lines, terms):
458 """ Return a set of `lines` indices (starting from 1) matching a
458 """ Return a set of `lines` indices (starting from 1) matching a
459 text search query, along with `context` lines above/below matching lines
459 text search query, along with `context` lines above/below matching lines
460
460
461 :param lines: list of strings representing lines
461 :param lines: list of strings representing lines
462 :param terms: search term string to match in lines eg. 'some text'
462 :param terms: search term string to match in lines eg. 'some text'
463 :param context: number of lines above/below a matching line to add to result
463 :param context: number of lines above/below a matching line to add to result
464 :param max_lines: cut off for lines of interest
464 :param max_lines: cut off for lines of interest
465 eg.
465 eg.
466
466
467 text = '''
467 text = '''
468 words words words
468 words words words
469 words words words
469 words words words
470 some text some
470 some text some
471 words words words
471 words words words
472 words words words
472 words words words
473 text here what
473 text here what
474 '''
474 '''
475 get_matching_line_offsets(text, 'text', context=1)
475 get_matching_line_offsets(text, 'text', context=1)
476 {3: [(5, 9)], 6: [(0, 4)]]
476 {3: [(5, 9)], 6: [(0, 4)]]
477
477
478 """
478 """
479 matching_lines = {}
479 matching_lines = {}
480 phrases = [normalize_text_for_matching(phrase)
480 phrases = [normalize_text_for_matching(phrase)
481 for phrase in extract_phrases(terms)]
481 for phrase in extract_phrases(terms)]
482
482
483 for line_index, line in enumerate(lines, start=1):
483 for line_index, line in enumerate(lines, start=1):
484 match_offsets = get_matching_offsets(
484 match_offsets = get_matching_offsets(
485 normalize_text_for_matching(line), phrases)
485 normalize_text_for_matching(line), phrases)
486 if match_offsets:
486 if match_offsets:
487 matching_lines[line_index] = match_offsets
487 matching_lines[line_index] = match_offsets
488
488
489 return matching_lines
489 return matching_lines
490
490
491
491
492 def hsv_to_rgb(h, s, v):
492 def hsv_to_rgb(h, s, v):
493 """ Convert hsv color values to rgb """
493 """ Convert hsv color values to rgb """
494
494
495 if s == 0.0:
495 if s == 0.0:
496 return v, v, v
496 return v, v, v
497 i = int(h * 6.0) # XXX assume int() truncates!
497 i = int(h * 6.0) # XXX assume int() truncates!
498 f = (h * 6.0) - i
498 f = (h * 6.0) - i
499 p = v * (1.0 - s)
499 p = v * (1.0 - s)
500 q = v * (1.0 - s * f)
500 q = v * (1.0 - s * f)
501 t = v * (1.0 - s * (1.0 - f))
501 t = v * (1.0 - s * (1.0 - f))
502 i = i % 6
502 i = i % 6
503 if i == 0:
503 if i == 0:
504 return v, t, p
504 return v, t, p
505 if i == 1:
505 if i == 1:
506 return q, v, p
506 return q, v, p
507 if i == 2:
507 if i == 2:
508 return p, v, t
508 return p, v, t
509 if i == 3:
509 if i == 3:
510 return p, q, v
510 return p, q, v
511 if i == 4:
511 if i == 4:
512 return t, p, v
512 return t, p, v
513 if i == 5:
513 if i == 5:
514 return v, p, q
514 return v, p, q
515
515
516
516
517 def unique_color_generator(n=10000, saturation=0.10, lightness=0.95):
517 def unique_color_generator(n=10000, saturation=0.10, lightness=0.95):
518 """
518 """
519 Generator for getting n of evenly distributed colors using
519 Generator for getting n of evenly distributed colors using
520 hsv color and golden ratio. It always return same order of colors
520 hsv color and golden ratio. It always return same order of colors
521
521
522 :param n: number of colors to generate
522 :param n: number of colors to generate
523 :param saturation: saturation of returned colors
523 :param saturation: saturation of returned colors
524 :param lightness: lightness of returned colors
524 :param lightness: lightness of returned colors
525 :returns: RGB tuple
525 :returns: RGB tuple
526 """
526 """
527
527
528 golden_ratio = 0.618033988749895
528 golden_ratio = 0.618033988749895
529 h = 0.22717784590367374
529 h = 0.22717784590367374
530
530
531 for _ in xrange(n):
531 for _ in xrange(n):
532 h += golden_ratio
532 h += golden_ratio
533 h %= 1
533 h %= 1
534 HSV_tuple = [h, saturation, lightness]
534 HSV_tuple = [h, saturation, lightness]
535 RGB_tuple = hsv_to_rgb(*HSV_tuple)
535 RGB_tuple = hsv_to_rgb(*HSV_tuple)
536 yield map(lambda x: str(int(x * 256)), RGB_tuple)
536 yield map(lambda x: str(int(x * 256)), RGB_tuple)
537
537
538
538
539 def color_hasher(n=10000, saturation=0.10, lightness=0.95):
539 def color_hasher(n=10000, saturation=0.10, lightness=0.95):
540 """
540 """
541 Returns a function which when called with an argument returns a unique
541 Returns a function which when called with an argument returns a unique
542 color for that argument, eg.
542 color for that argument, eg.
543
543
544 :param n: number of colors to generate
544 :param n: number of colors to generate
545 :param saturation: saturation of returned colors
545 :param saturation: saturation of returned colors
546 :param lightness: lightness of returned colors
546 :param lightness: lightness of returned colors
547 :returns: css RGB string
547 :returns: css RGB string
548
548
549 >>> color_hash = color_hasher()
549 >>> color_hash = color_hasher()
550 >>> color_hash('hello')
550 >>> color_hash('hello')
551 'rgb(34, 12, 59)'
551 'rgb(34, 12, 59)'
552 >>> color_hash('hello')
552 >>> color_hash('hello')
553 'rgb(34, 12, 59)'
553 'rgb(34, 12, 59)'
554 >>> color_hash('other')
554 >>> color_hash('other')
555 'rgb(90, 224, 159)'
555 'rgb(90, 224, 159)'
556 """
556 """
557
557
558 color_dict = {}
558 color_dict = {}
559 cgenerator = unique_color_generator(
559 cgenerator = unique_color_generator(
560 saturation=saturation, lightness=lightness)
560 saturation=saturation, lightness=lightness)
561
561
562 def get_color_string(thing):
562 def get_color_string(thing):
563 if thing in color_dict:
563 if thing in color_dict:
564 col = color_dict[thing]
564 col = color_dict[thing]
565 else:
565 else:
566 col = color_dict[thing] = cgenerator.next()
566 col = color_dict[thing] = cgenerator.next()
567 return "rgb(%s)" % (', '.join(col))
567 return "rgb(%s)" % (', '.join(col))
568
568
569 return get_color_string
569 return get_color_string
570
570
571
571
572 def get_lexer_safe(mimetype=None, filepath=None):
572 def get_lexer_safe(mimetype=None, filepath=None):
573 """
573 """
574 Tries to return a relevant pygments lexer using mimetype/filepath name,
574 Tries to return a relevant pygments lexer using mimetype/filepath name,
575 defaulting to plain text if none could be found
575 defaulting to plain text if none could be found
576 """
576 """
577 lexer = None
577 lexer = None
578 try:
578 try:
579 if mimetype:
579 if mimetype:
580 lexer = get_lexer_for_mimetype(mimetype)
580 lexer = get_lexer_for_mimetype(mimetype)
581 if not lexer:
581 if not lexer:
582 lexer = get_lexer_for_filename(filepath)
582 lexer = get_lexer_for_filename(filepath)
583 except pygments.util.ClassNotFound:
583 except pygments.util.ClassNotFound:
584 pass
584 pass
585
585
586 if not lexer:
586 if not lexer:
587 lexer = get_lexer_by_name('text')
587 lexer = get_lexer_by_name('text')
588
588
589 return lexer
589 return lexer
590
590
591
591
592 def get_lexer_for_filenode(filenode):
592 def get_lexer_for_filenode(filenode):
593 lexer = get_custom_lexer(filenode.extension) or filenode.lexer
593 lexer = get_custom_lexer(filenode.extension) or filenode.lexer
594 return lexer
594 return lexer
595
595
596
596
597 def pygmentize(filenode, **kwargs):
597 def pygmentize(filenode, **kwargs):
598 """
598 """
599 pygmentize function using pygments
599 pygmentize function using pygments
600
600
601 :param filenode:
601 :param filenode:
602 """
602 """
603 lexer = get_lexer_for_filenode(filenode)
603 lexer = get_lexer_for_filenode(filenode)
604 return literal(code_highlight(filenode.content, lexer,
604 return literal(code_highlight(filenode.content, lexer,
605 CodeHtmlFormatter(**kwargs)))
605 CodeHtmlFormatter(**kwargs)))
606
606
607
607
608 def is_following_repo(repo_name, user_id):
608 def is_following_repo(repo_name, user_id):
609 from rhodecode.model.scm import ScmModel
609 from rhodecode.model.scm import ScmModel
610 return ScmModel().is_following_repo(repo_name, user_id)
610 return ScmModel().is_following_repo(repo_name, user_id)
611
611
612
612
613 class _Message(object):
613 class _Message(object):
614 """A message returned by ``Flash.pop_messages()``.
614 """A message returned by ``Flash.pop_messages()``.
615
615
616 Converting the message to a string returns the message text. Instances
616 Converting the message to a string returns the message text. Instances
617 also have the following attributes:
617 also have the following attributes:
618
618
619 * ``message``: the message text.
619 * ``message``: the message text.
620 * ``category``: the category specified when the message was created.
620 * ``category``: the category specified when the message was created.
621 """
621 """
622
622
623 def __init__(self, category, message):
623 def __init__(self, category, message):
624 self.category = category
624 self.category = category
625 self.message = message
625 self.message = message
626
626
627 def __str__(self):
627 def __str__(self):
628 return self.message
628 return self.message
629
629
630 __unicode__ = __str__
630 __unicode__ = __str__
631
631
632 def __html__(self):
632 def __html__(self):
633 return escape(safe_unicode(self.message))
633 return escape(safe_unicode(self.message))
634
634
635
635
636 class Flash(object):
636 class Flash(object):
637 # List of allowed categories. If None, allow any category.
637 # List of allowed categories. If None, allow any category.
638 categories = ["warning", "notice", "error", "success"]
638 categories = ["warning", "notice", "error", "success"]
639
639
640 # Default category if none is specified.
640 # Default category if none is specified.
641 default_category = "notice"
641 default_category = "notice"
642
642
643 def __init__(self, session_key="flash", categories=None,
643 def __init__(self, session_key="flash", categories=None,
644 default_category=None):
644 default_category=None):
645 """
645 """
646 Instantiate a ``Flash`` object.
646 Instantiate a ``Flash`` object.
647
647
648 ``session_key`` is the key to save the messages under in the user's
648 ``session_key`` is the key to save the messages under in the user's
649 session.
649 session.
650
650
651 ``categories`` is an optional list which overrides the default list
651 ``categories`` is an optional list which overrides the default list
652 of categories.
652 of categories.
653
653
654 ``default_category`` overrides the default category used for messages
654 ``default_category`` overrides the default category used for messages
655 when none is specified.
655 when none is specified.
656 """
656 """
657 self.session_key = session_key
657 self.session_key = session_key
658 if categories is not None:
658 if categories is not None:
659 self.categories = categories
659 self.categories = categories
660 if default_category is not None:
660 if default_category is not None:
661 self.default_category = default_category
661 self.default_category = default_category
662 if self.categories and self.default_category not in self.categories:
662 if self.categories and self.default_category not in self.categories:
663 raise ValueError(
663 raise ValueError(
664 "unrecognized default category %r" % (self.default_category,))
664 "unrecognized default category %r" % (self.default_category,))
665
665
666 def pop_messages(self, session=None, request=None):
666 def pop_messages(self, session=None, request=None):
667 """
667 """
668 Return all accumulated messages and delete them from the session.
668 Return all accumulated messages and delete them from the session.
669
669
670 The return value is a list of ``Message`` objects.
670 The return value is a list of ``Message`` objects.
671 """
671 """
672 messages = []
672 messages = []
673
673
674 if not session:
674 if not session:
675 if not request:
675 if not request:
676 request = get_current_request()
676 request = get_current_request()
677 session = request.session
677 session = request.session
678
678
679 # Pop the 'old' pylons flash messages. They are tuples of the form
679 # Pop the 'old' pylons flash messages. They are tuples of the form
680 # (category, message)
680 # (category, message)
681 for cat, msg in session.pop(self.session_key, []):
681 for cat, msg in session.pop(self.session_key, []):
682 messages.append(_Message(cat, msg))
682 messages.append(_Message(cat, msg))
683
683
684 # Pop the 'new' pyramid flash messages for each category as list
684 # Pop the 'new' pyramid flash messages for each category as list
685 # of strings.
685 # of strings.
686 for cat in self.categories:
686 for cat in self.categories:
687 for msg in session.pop_flash(queue=cat):
687 for msg in session.pop_flash(queue=cat):
688 messages.append(_Message(cat, msg))
688 messages.append(_Message(cat, msg))
689 # Map messages from the default queue to the 'notice' category.
689 # Map messages from the default queue to the 'notice' category.
690 for msg in session.pop_flash():
690 for msg in session.pop_flash():
691 messages.append(_Message('notice', msg))
691 messages.append(_Message('notice', msg))
692
692
693 session.save()
693 session.save()
694 return messages
694 return messages
695
695
696 def json_alerts(self, session=None, request=None):
696 def json_alerts(self, session=None, request=None):
697 payloads = []
697 payloads = []
698 messages = flash.pop_messages(session=session, request=request)
698 messages = flash.pop_messages(session=session, request=request)
699 if messages:
699 if messages:
700 for message in messages:
700 for message in messages:
701 subdata = {}
701 subdata = {}
702 if hasattr(message.message, 'rsplit'):
702 if hasattr(message.message, 'rsplit'):
703 flash_data = message.message.rsplit('|DELIM|', 1)
703 flash_data = message.message.rsplit('|DELIM|', 1)
704 org_message = flash_data[0]
704 org_message = flash_data[0]
705 if len(flash_data) > 1:
705 if len(flash_data) > 1:
706 subdata = json.loads(flash_data[1])
706 subdata = json.loads(flash_data[1])
707 else:
707 else:
708 org_message = message.message
708 org_message = message.message
709 payloads.append({
709 payloads.append({
710 'message': {
710 'message': {
711 'message': u'{}'.format(org_message),
711 'message': u'{}'.format(org_message),
712 'level': message.category,
712 'level': message.category,
713 'force': True,
713 'force': True,
714 'subdata': subdata
714 'subdata': subdata
715 }
715 }
716 })
716 })
717 return json.dumps(payloads)
717 return json.dumps(payloads)
718
718
719 def __call__(self, message, category=None, ignore_duplicate=False,
719 def __call__(self, message, category=None, ignore_duplicate=False,
720 session=None, request=None):
720 session=None, request=None):
721
721
722 if not session:
722 if not session:
723 if not request:
723 if not request:
724 request = get_current_request()
724 request = get_current_request()
725 session = request.session
725 session = request.session
726
726
727 session.flash(
727 session.flash(
728 message, queue=category, allow_duplicate=not ignore_duplicate)
728 message, queue=category, allow_duplicate=not ignore_duplicate)
729
729
730
730
731 flash = Flash()
731 flash = Flash()
732
732
733 #==============================================================================
733 #==============================================================================
734 # SCM FILTERS available via h.
734 # SCM FILTERS available via h.
735 #==============================================================================
735 #==============================================================================
736 from rhodecode.lib.vcs.utils import author_name, author_email
736 from rhodecode.lib.vcs.utils import author_name, author_email
737 from rhodecode.lib.utils2 import credentials_filter, age as _age
737 from rhodecode.lib.utils2 import credentials_filter, age as _age
738 from rhodecode.model.db import User, ChangesetStatus
738 from rhodecode.model.db import User, ChangesetStatus
739
739
740 age = _age
740 age = _age
741 capitalize = lambda x: x.capitalize()
741 capitalize = lambda x: x.capitalize()
742 email = author_email
742 email = author_email
743 short_id = lambda x: x[:12]
743 short_id = lambda x: x[:12]
744 hide_credentials = lambda x: ''.join(credentials_filter(x))
744 hide_credentials = lambda x: ''.join(credentials_filter(x))
745
745
746
746
747 import pytz
747 import pytz
748 import tzlocal
748 import tzlocal
749 local_timezone = tzlocal.get_localzone()
749 local_timezone = tzlocal.get_localzone()
750
750
751
751
752 def age_component(datetime_iso, value=None, time_is_local=False):
752 def age_component(datetime_iso, value=None, time_is_local=False):
753 title = value or format_date(datetime_iso)
753 title = value or format_date(datetime_iso)
754 tzinfo = '+00:00'
754 tzinfo = '+00:00'
755
755
756 # detect if we have a timezone info, otherwise, add it
756 # detect if we have a timezone info, otherwise, add it
757 if time_is_local and isinstance(datetime_iso, datetime) and not datetime_iso.tzinfo:
757 if time_is_local and isinstance(datetime_iso, datetime) and not datetime_iso.tzinfo:
758 force_timezone = os.environ.get('RC_TIMEZONE', '')
758 force_timezone = os.environ.get('RC_TIMEZONE', '')
759 if force_timezone:
759 if force_timezone:
760 force_timezone = pytz.timezone(force_timezone)
760 force_timezone = pytz.timezone(force_timezone)
761 timezone = force_timezone or local_timezone
761 timezone = force_timezone or local_timezone
762 offset = timezone.localize(datetime_iso).strftime('%z')
762 offset = timezone.localize(datetime_iso).strftime('%z')
763 tzinfo = '{}:{}'.format(offset[:-2], offset[-2:])
763 tzinfo = '{}:{}'.format(offset[:-2], offset[-2:])
764
764
765 return literal(
765 return literal(
766 '<time class="timeago tooltip" '
766 '<time class="timeago tooltip" '
767 'title="{1}{2}" datetime="{0}{2}">{1}</time>'.format(
767 'title="{1}{2}" datetime="{0}{2}">{1}</time>'.format(
768 datetime_iso, title, tzinfo))
768 datetime_iso, title, tzinfo))
769
769
770
770
771 def _shorten_commit_id(commit_id):
771 def _shorten_commit_id(commit_id):
772 from rhodecode import CONFIG
772 from rhodecode import CONFIG
773 def_len = safe_int(CONFIG.get('rhodecode_show_sha_length', 12))
773 def_len = safe_int(CONFIG.get('rhodecode_show_sha_length', 12))
774 return commit_id[:def_len]
774 return commit_id[:def_len]
775
775
776
776
777 def show_id(commit):
777 def show_id(commit):
778 """
778 """
779 Configurable function that shows ID
779 Configurable function that shows ID
780 by default it's r123:fffeeefffeee
780 by default it's r123:fffeeefffeee
781
781
782 :param commit: commit instance
782 :param commit: commit instance
783 """
783 """
784 from rhodecode import CONFIG
784 from rhodecode import CONFIG
785 show_idx = str2bool(CONFIG.get('rhodecode_show_revision_number', True))
785 show_idx = str2bool(CONFIG.get('rhodecode_show_revision_number', True))
786
786
787 raw_id = _shorten_commit_id(commit.raw_id)
787 raw_id = _shorten_commit_id(commit.raw_id)
788 if show_idx:
788 if show_idx:
789 return 'r%s:%s' % (commit.idx, raw_id)
789 return 'r%s:%s' % (commit.idx, raw_id)
790 else:
790 else:
791 return '%s' % (raw_id, )
791 return '%s' % (raw_id, )
792
792
793
793
794 def format_date(date):
794 def format_date(date):
795 """
795 """
796 use a standardized formatting for dates used in RhodeCode
796 use a standardized formatting for dates used in RhodeCode
797
797
798 :param date: date/datetime object
798 :param date: date/datetime object
799 :return: formatted date
799 :return: formatted date
800 """
800 """
801
801
802 if date:
802 if date:
803 _fmt = "%a, %d %b %Y %H:%M:%S"
803 _fmt = "%a, %d %b %Y %H:%M:%S"
804 return safe_unicode(date.strftime(_fmt))
804 return safe_unicode(date.strftime(_fmt))
805
805
806 return u""
806 return u""
807
807
808
808
809 class _RepoChecker(object):
809 class _RepoChecker(object):
810
810
811 def __init__(self, backend_alias):
811 def __init__(self, backend_alias):
812 self._backend_alias = backend_alias
812 self._backend_alias = backend_alias
813
813
814 def __call__(self, repository):
814 def __call__(self, repository):
815 if hasattr(repository, 'alias'):
815 if hasattr(repository, 'alias'):
816 _type = repository.alias
816 _type = repository.alias
817 elif hasattr(repository, 'repo_type'):
817 elif hasattr(repository, 'repo_type'):
818 _type = repository.repo_type
818 _type = repository.repo_type
819 else:
819 else:
820 _type = repository
820 _type = repository
821 return _type == self._backend_alias
821 return _type == self._backend_alias
822
822
823 is_git = _RepoChecker('git')
823 is_git = _RepoChecker('git')
824 is_hg = _RepoChecker('hg')
824 is_hg = _RepoChecker('hg')
825 is_svn = _RepoChecker('svn')
825 is_svn = _RepoChecker('svn')
826
826
827
827
828 def get_repo_type_by_name(repo_name):
828 def get_repo_type_by_name(repo_name):
829 repo = Repository.get_by_repo_name(repo_name)
829 repo = Repository.get_by_repo_name(repo_name)
830 return repo.repo_type
830 return repo.repo_type
831
831
832
832
833 def is_svn_without_proxy(repository):
833 def is_svn_without_proxy(repository):
834 if is_svn(repository):
834 if is_svn(repository):
835 from rhodecode.model.settings import VcsSettingsModel
835 from rhodecode.model.settings import VcsSettingsModel
836 conf = VcsSettingsModel().get_ui_settings_as_config_obj()
836 conf = VcsSettingsModel().get_ui_settings_as_config_obj()
837 return not str2bool(conf.get('vcs_svn_proxy', 'http_requests_enabled'))
837 return not str2bool(conf.get('vcs_svn_proxy', 'http_requests_enabled'))
838 return False
838 return False
839
839
840
840
841 def discover_user(author):
841 def discover_user(author):
842 """
842 """
843 Tries to discover RhodeCode User based on the autho string. Author string
843 Tries to discover RhodeCode User based on the autho string. Author string
844 is typically `FirstName LastName <email@address.com>`
844 is typically `FirstName LastName <email@address.com>`
845 """
845 """
846
846
847 # if author is already an instance use it for extraction
847 # if author is already an instance use it for extraction
848 if isinstance(author, User):
848 if isinstance(author, User):
849 return author
849 return author
850
850
851 # Valid email in the attribute passed, see if they're in the system
851 # Valid email in the attribute passed, see if they're in the system
852 _email = author_email(author)
852 _email = author_email(author)
853 if _email != '':
853 if _email != '':
854 user = User.get_by_email(_email, case_insensitive=True, cache=True)
854 user = User.get_by_email(_email, case_insensitive=True, cache=True)
855 if user is not None:
855 if user is not None:
856 return user
856 return user
857
857
858 # Maybe it's a username, we try to extract it and fetch by username ?
858 # Maybe it's a username, we try to extract it and fetch by username ?
859 _author = author_name(author)
859 _author = author_name(author)
860 user = User.get_by_username(_author, case_insensitive=True, cache=True)
860 user = User.get_by_username(_author, case_insensitive=True, cache=True)
861 if user is not None:
861 if user is not None:
862 return user
862 return user
863
863
864 return None
864 return None
865
865
866
866
867 def email_or_none(author):
867 def email_or_none(author):
868 # extract email from the commit string
868 # extract email from the commit string
869 _email = author_email(author)
869 _email = author_email(author)
870
870
871 # If we have an email, use it, otherwise
871 # If we have an email, use it, otherwise
872 # see if it contains a username we can get an email from
872 # see if it contains a username we can get an email from
873 if _email != '':
873 if _email != '':
874 return _email
874 return _email
875 else:
875 else:
876 user = User.get_by_username(
876 user = User.get_by_username(
877 author_name(author), case_insensitive=True, cache=True)
877 author_name(author), case_insensitive=True, cache=True)
878
878
879 if user is not None:
879 if user is not None:
880 return user.email
880 return user.email
881
881
882 # No valid email, not a valid user in the system, none!
882 # No valid email, not a valid user in the system, none!
883 return None
883 return None
884
884
885
885
886 def link_to_user(author, length=0, **kwargs):
886 def link_to_user(author, length=0, **kwargs):
887 user = discover_user(author)
887 user = discover_user(author)
888 # user can be None, but if we have it already it means we can re-use it
888 # user can be None, but if we have it already it means we can re-use it
889 # in the person() function, so we save 1 intensive-query
889 # in the person() function, so we save 1 intensive-query
890 if user:
890 if user:
891 author = user
891 author = user
892
892
893 display_person = person(author, 'username_or_name_or_email')
893 display_person = person(author, 'username_or_name_or_email')
894 if length:
894 if length:
895 display_person = shorter(display_person, length)
895 display_person = shorter(display_person, length)
896
896
897 if user:
897 if user:
898 return link_to(
898 return link_to(
899 escape(display_person),
899 escape(display_person),
900 route_path('user_profile', username=user.username),
900 route_path('user_profile', username=user.username),
901 **kwargs)
901 **kwargs)
902 else:
902 else:
903 return escape(display_person)
903 return escape(display_person)
904
904
905
905
906 def link_to_group(users_group_name, **kwargs):
906 def link_to_group(users_group_name, **kwargs):
907 return link_to(
907 return link_to(
908 escape(users_group_name),
908 escape(users_group_name),
909 route_path('user_group_profile', user_group_name=users_group_name),
909 route_path('user_group_profile', user_group_name=users_group_name),
910 **kwargs)
910 **kwargs)
911
911
912
912
913 def person(author, show_attr="username_and_name"):
913 def person(author, show_attr="username_and_name"):
914 user = discover_user(author)
914 user = discover_user(author)
915 if user:
915 if user:
916 return getattr(user, show_attr)
916 return getattr(user, show_attr)
917 else:
917 else:
918 _author = author_name(author)
918 _author = author_name(author)
919 _email = email(author)
919 _email = email(author)
920 return _author or _email
920 return _author or _email
921
921
922
922
923 def author_string(email):
923 def author_string(email):
924 if email:
924 if email:
925 user = User.get_by_email(email, case_insensitive=True, cache=True)
925 user = User.get_by_email(email, case_insensitive=True, cache=True)
926 if user:
926 if user:
927 if user.first_name or user.last_name:
927 if user.first_name or user.last_name:
928 return '%s %s &lt;%s&gt;' % (
928 return '%s %s &lt;%s&gt;' % (
929 user.first_name, user.last_name, email)
929 user.first_name, user.last_name, email)
930 else:
930 else:
931 return email
931 return email
932 else:
932 else:
933 return email
933 return email
934 else:
934 else:
935 return None
935 return None
936
936
937
937
938 def person_by_id(id_, show_attr="username_and_name"):
938 def person_by_id(id_, show_attr="username_and_name"):
939 # attr to return from fetched user
939 # attr to return from fetched user
940 person_getter = lambda usr: getattr(usr, show_attr)
940 person_getter = lambda usr: getattr(usr, show_attr)
941
941
942 #maybe it's an ID ?
942 #maybe it's an ID ?
943 if str(id_).isdigit() or isinstance(id_, int):
943 if str(id_).isdigit() or isinstance(id_, int):
944 id_ = int(id_)
944 id_ = int(id_)
945 user = User.get(id_)
945 user = User.get(id_)
946 if user is not None:
946 if user is not None:
947 return person_getter(user)
947 return person_getter(user)
948 return id_
948 return id_
949
949
950
950
951 def gravatar_with_user(request, author, show_disabled=False):
951 def gravatar_with_user(request, author, show_disabled=False):
952 _render = request.get_partial_renderer(
952 _render = request.get_partial_renderer(
953 'rhodecode:templates/base/base.mako')
953 'rhodecode:templates/base/base.mako')
954 return _render('gravatar_with_user', author, show_disabled=show_disabled)
954 return _render('gravatar_with_user', author, show_disabled=show_disabled)
955
955
956
956
957 tags_paterns = OrderedDict((
957 tags_paterns = OrderedDict((
958 ('lang', (re.compile(r'\[(lang|language)\ \=\&gt;\ *([a-zA-Z\-\/\#\+\.]*)\]'),
958 ('lang', (re.compile(r'\[(lang|language)\ \=\&gt;\ *([a-zA-Z\-\/\#\+\.]*)\]'),
959 '<div class="metatag" tag="lang">\\2</div>')),
959 '<div class="metatag" tag="lang">\\2</div>')),
960
960
961 ('see', (re.compile(r'\[see\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]'),
961 ('see', (re.compile(r'\[see\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]'),
962 '<div class="metatag" tag="see">see: \\1 </div>')),
962 '<div class="metatag" tag="see">see: \\1 </div>')),
963
963
964 ('url', (re.compile(r'\[url\ \=\&gt;\ \[([a-zA-Z0-9\ \.\-\_]+)\]\((http://|https://|/)(.*?)\)\]'),
964 ('url', (re.compile(r'\[url\ \=\&gt;\ \[([a-zA-Z0-9\ \.\-\_]+)\]\((http://|https://|/)(.*?)\)\]'),
965 '<div class="metatag" tag="url"> <a href="\\2\\3">\\1</a> </div>')),
965 '<div class="metatag" tag="url"> <a href="\\2\\3">\\1</a> </div>')),
966
966
967 ('license', (re.compile(r'\[license\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]'),
967 ('license', (re.compile(r'\[license\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]'),
968 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>')),
968 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>')),
969
969
970 ('ref', (re.compile(r'\[(requires|recommends|conflicts|base)\ \=\&gt;\ *([a-zA-Z0-9\-\/]*)\]'),
970 ('ref', (re.compile(r'\[(requires|recommends|conflicts|base)\ \=\&gt;\ *([a-zA-Z0-9\-\/]*)\]'),
971 '<div class="metatag" tag="ref \\1">\\1: <a href="/\\2">\\2</a></div>')),
971 '<div class="metatag" tag="ref \\1">\\1: <a href="/\\2">\\2</a></div>')),
972
972
973 ('state', (re.compile(r'\[(stable|featured|stale|dead|dev|deprecated)\]'),
973 ('state', (re.compile(r'\[(stable|featured|stale|dead|dev|deprecated)\]'),
974 '<div class="metatag" tag="state \\1">\\1</div>')),
974 '<div class="metatag" tag="state \\1">\\1</div>')),
975
975
976 # label in grey
976 # label in grey
977 ('label', (re.compile(r'\[([a-z]+)\]'),
977 ('label', (re.compile(r'\[([a-z]+)\]'),
978 '<div class="metatag" tag="label">\\1</div>')),
978 '<div class="metatag" tag="label">\\1</div>')),
979
979
980 # generic catch all in grey
980 # generic catch all in grey
981 ('generic', (re.compile(r'\[([a-zA-Z0-9\.\-\_]+)\]'),
981 ('generic', (re.compile(r'\[([a-zA-Z0-9\.\-\_]+)\]'),
982 '<div class="metatag" tag="generic">\\1</div>')),
982 '<div class="metatag" tag="generic">\\1</div>')),
983 ))
983 ))
984
984
985
985
986 def extract_metatags(value):
986 def extract_metatags(value):
987 """
987 """
988 Extract supported meta-tags from given text value
988 Extract supported meta-tags from given text value
989 """
989 """
990 tags = []
990 tags = []
991 if not value:
991 if not value:
992 return tags, ''
992 return tags, ''
993
993
994 for key, val in tags_paterns.items():
994 for key, val in tags_paterns.items():
995 pat, replace_html = val
995 pat, replace_html = val
996 tags.extend([(key, x.group()) for x in pat.finditer(value)])
996 tags.extend([(key, x.group()) for x in pat.finditer(value)])
997 value = pat.sub('', value)
997 value = pat.sub('', value)
998
998
999 return tags, value
999 return tags, value
1000
1000
1001
1001
1002 def style_metatag(tag_type, value):
1002 def style_metatag(tag_type, value):
1003 """
1003 """
1004 converts tags from value into html equivalent
1004 converts tags from value into html equivalent
1005 """
1005 """
1006 if not value:
1006 if not value:
1007 return ''
1007 return ''
1008
1008
1009 html_value = value
1009 html_value = value
1010 tag_data = tags_paterns.get(tag_type)
1010 tag_data = tags_paterns.get(tag_type)
1011 if tag_data:
1011 if tag_data:
1012 pat, replace_html = tag_data
1012 pat, replace_html = tag_data
1013 # convert to plain `unicode` instead of a markup tag to be used in
1013 # convert to plain `unicode` instead of a markup tag to be used in
1014 # regex expressions. safe_unicode doesn't work here
1014 # regex expressions. safe_unicode doesn't work here
1015 html_value = pat.sub(replace_html, unicode(value))
1015 html_value = pat.sub(replace_html, unicode(value))
1016
1016
1017 return html_value
1017 return html_value
1018
1018
1019
1019
1020 def bool2icon(value):
1020 def bool2icon(value):
1021 """
1021 """
1022 Returns boolean value of a given value, represented as html element with
1022 Returns boolean value of a given value, represented as html element with
1023 classes that will represent icons
1023 classes that will represent icons
1024
1024
1025 :param value: given value to convert to html node
1025 :param value: given value to convert to html node
1026 """
1026 """
1027
1027
1028 if value: # does bool conversion
1028 if value: # does bool conversion
1029 return HTML.tag('i', class_="icon-true")
1029 return HTML.tag('i', class_="icon-true")
1030 else: # not true as bool
1030 else: # not true as bool
1031 return HTML.tag('i', class_="icon-false")
1031 return HTML.tag('i', class_="icon-false")
1032
1032
1033
1033
1034 #==============================================================================
1034 #==============================================================================
1035 # PERMS
1035 # PERMS
1036 #==============================================================================
1036 #==============================================================================
1037 from rhodecode.lib.auth import HasPermissionAny, HasPermissionAll, \
1037 from rhodecode.lib.auth import HasPermissionAny, HasPermissionAll, \
1038 HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll, \
1038 HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll, \
1039 HasRepoGroupPermissionAny, HasRepoPermissionAnyApi, get_csrf_token, \
1039 HasRepoGroupPermissionAny, HasRepoPermissionAnyApi, get_csrf_token, \
1040 csrf_token_key
1040 csrf_token_key
1041
1041
1042
1042
1043 #==============================================================================
1043 #==============================================================================
1044 # GRAVATAR URL
1044 # GRAVATAR URL
1045 #==============================================================================
1045 #==============================================================================
1046 class InitialsGravatar(object):
1046 class InitialsGravatar(object):
1047 def __init__(self, email_address, first_name, last_name, size=30,
1047 def __init__(self, email_address, first_name, last_name, size=30,
1048 background=None, text_color='#fff'):
1048 background=None, text_color='#fff'):
1049 self.size = size
1049 self.size = size
1050 self.first_name = first_name
1050 self.first_name = first_name
1051 self.last_name = last_name
1051 self.last_name = last_name
1052 self.email_address = email_address
1052 self.email_address = email_address
1053 self.background = background or self.str2color(email_address)
1053 self.background = background or self.str2color(email_address)
1054 self.text_color = text_color
1054 self.text_color = text_color
1055
1055
1056 def get_color_bank(self):
1056 def get_color_bank(self):
1057 """
1057 """
1058 returns a predefined list of colors that gravatars can use.
1058 returns a predefined list of colors that gravatars can use.
1059 Those are randomized distinct colors that guarantee readability and
1059 Those are randomized distinct colors that guarantee readability and
1060 uniqueness.
1060 uniqueness.
1061
1061
1062 generated with: http://phrogz.net/css/distinct-colors.html
1062 generated with: http://phrogz.net/css/distinct-colors.html
1063 """
1063 """
1064 return [
1064 return [
1065 '#bf3030', '#a67f53', '#00ff00', '#5989b3', '#392040', '#d90000',
1065 '#bf3030', '#a67f53', '#00ff00', '#5989b3', '#392040', '#d90000',
1066 '#402910', '#204020', '#79baf2', '#a700b3', '#bf6060', '#7f5320',
1066 '#402910', '#204020', '#79baf2', '#a700b3', '#bf6060', '#7f5320',
1067 '#008000', '#003059', '#ee00ff', '#ff0000', '#8c4b00', '#007300',
1067 '#008000', '#003059', '#ee00ff', '#ff0000', '#8c4b00', '#007300',
1068 '#005fb3', '#de73e6', '#ff4040', '#ffaa00', '#3df255', '#203140',
1068 '#005fb3', '#de73e6', '#ff4040', '#ffaa00', '#3df255', '#203140',
1069 '#47004d', '#591616', '#664400', '#59b365', '#0d2133', '#83008c',
1069 '#47004d', '#591616', '#664400', '#59b365', '#0d2133', '#83008c',
1070 '#592d2d', '#bf9f60', '#73e682', '#1d3f73', '#73006b', '#402020',
1070 '#592d2d', '#bf9f60', '#73e682', '#1d3f73', '#73006b', '#402020',
1071 '#b2862d', '#397341', '#597db3', '#e600d6', '#a60000', '#736039',
1071 '#b2862d', '#397341', '#597db3', '#e600d6', '#a60000', '#736039',
1072 '#00b318', '#79aaf2', '#330d30', '#ff8080', '#403010', '#16591f',
1072 '#00b318', '#79aaf2', '#330d30', '#ff8080', '#403010', '#16591f',
1073 '#002459', '#8c4688', '#e50000', '#ffbf40', '#00732e', '#102340',
1073 '#002459', '#8c4688', '#e50000', '#ffbf40', '#00732e', '#102340',
1074 '#bf60ac', '#8c4646', '#cc8800', '#00a642', '#1d3473', '#b32d98',
1074 '#bf60ac', '#8c4646', '#cc8800', '#00a642', '#1d3473', '#b32d98',
1075 '#660e00', '#ffd580', '#80ffb2', '#7391e6', '#733967', '#d97b6c',
1075 '#660e00', '#ffd580', '#80ffb2', '#7391e6', '#733967', '#d97b6c',
1076 '#8c5e00', '#59b389', '#3967e6', '#590047', '#73281d', '#665200',
1076 '#8c5e00', '#59b389', '#3967e6', '#590047', '#73281d', '#665200',
1077 '#00e67a', '#2d50b3', '#8c2377', '#734139', '#b2982d', '#16593a',
1077 '#00e67a', '#2d50b3', '#8c2377', '#734139', '#b2982d', '#16593a',
1078 '#001859', '#ff00aa', '#a65e53', '#ffcc00', '#0d3321', '#2d3959',
1078 '#001859', '#ff00aa', '#a65e53', '#ffcc00', '#0d3321', '#2d3959',
1079 '#731d56', '#401610', '#4c3d00', '#468c6c', '#002ca6', '#d936a3',
1079 '#731d56', '#401610', '#4c3d00', '#468c6c', '#002ca6', '#d936a3',
1080 '#d94c36', '#403920', '#36d9a3', '#0d1733', '#592d4a', '#993626',
1080 '#d94c36', '#403920', '#36d9a3', '#0d1733', '#592d4a', '#993626',
1081 '#cca300', '#00734d', '#46598c', '#8c005e', '#7f1100', '#8c7000',
1081 '#cca300', '#00734d', '#46598c', '#8c005e', '#7f1100', '#8c7000',
1082 '#00a66f', '#7382e6', '#b32d74', '#d9896c', '#ffe680', '#1d7362',
1082 '#00a66f', '#7382e6', '#b32d74', '#d9896c', '#ffe680', '#1d7362',
1083 '#364cd9', '#73003d', '#d93a00', '#998a4d', '#59b3a1', '#5965b3',
1083 '#364cd9', '#73003d', '#d93a00', '#998a4d', '#59b3a1', '#5965b3',
1084 '#e5007a', '#73341d', '#665f00', '#00b38f', '#0018b3', '#59163a',
1084 '#e5007a', '#73341d', '#665f00', '#00b38f', '#0018b3', '#59163a',
1085 '#b2502d', '#bfb960', '#00ffcc', '#23318c', '#a6537f', '#734939',
1085 '#b2502d', '#bfb960', '#00ffcc', '#23318c', '#a6537f', '#734939',
1086 '#b2a700', '#104036', '#3d3df2', '#402031', '#e56739', '#736f39',
1086 '#b2a700', '#104036', '#3d3df2', '#402031', '#e56739', '#736f39',
1087 '#79f2ea', '#000059', '#401029', '#4c1400', '#ffee00', '#005953',
1087 '#79f2ea', '#000059', '#401029', '#4c1400', '#ffee00', '#005953',
1088 '#101040', '#990052', '#402820', '#403d10', '#00ffee', '#0000d9',
1088 '#101040', '#990052', '#402820', '#403d10', '#00ffee', '#0000d9',
1089 '#ff80c4', '#a66953', '#eeff00', '#00ccbe', '#8080ff', '#e673a1',
1089 '#ff80c4', '#a66953', '#eeff00', '#00ccbe', '#8080ff', '#e673a1',
1090 '#a62c00', '#474d00', '#1a3331', '#46468c', '#733950', '#662900',
1090 '#a62c00', '#474d00', '#1a3331', '#46468c', '#733950', '#662900',
1091 '#858c23', '#238c85', '#0f0073', '#b20047', '#d9986c', '#becc00',
1091 '#858c23', '#238c85', '#0f0073', '#b20047', '#d9986c', '#becc00',
1092 '#396f73', '#281d73', '#ff0066', '#ff6600', '#dee673', '#59adb3',
1092 '#396f73', '#281d73', '#ff0066', '#ff6600', '#dee673', '#59adb3',
1093 '#6559b3', '#590024', '#b2622d', '#98b32d', '#36ced9', '#332d59',
1093 '#6559b3', '#590024', '#b2622d', '#98b32d', '#36ced9', '#332d59',
1094 '#40001a', '#733f1d', '#526600', '#005359', '#242040', '#bf6079',
1094 '#40001a', '#733f1d', '#526600', '#005359', '#242040', '#bf6079',
1095 '#735039', '#cef23d', '#007780', '#5630bf', '#66001b', '#b24700',
1095 '#735039', '#cef23d', '#007780', '#5630bf', '#66001b', '#b24700',
1096 '#acbf60', '#1d6273', '#25008c', '#731d34', '#a67453', '#50592d',
1096 '#acbf60', '#1d6273', '#25008c', '#731d34', '#a67453', '#50592d',
1097 '#00ccff', '#6600ff', '#ff0044', '#4c1f00', '#8a994d', '#79daf2',
1097 '#00ccff', '#6600ff', '#ff0044', '#4c1f00', '#8a994d', '#79daf2',
1098 '#a173e6', '#d93662', '#402310', '#aaff00', '#2d98b3', '#8c40ff',
1098 '#a173e6', '#d93662', '#402310', '#aaff00', '#2d98b3', '#8c40ff',
1099 '#592d39', '#ff8c40', '#354020', '#103640', '#1a0040', '#331a20',
1099 '#592d39', '#ff8c40', '#354020', '#103640', '#1a0040', '#331a20',
1100 '#331400', '#334d00', '#1d5673', '#583973', '#7f0022', '#4c3626',
1100 '#331400', '#334d00', '#1d5673', '#583973', '#7f0022', '#4c3626',
1101 '#88cc00', '#36a3d9', '#3d0073', '#d9364c', '#33241a', '#698c23',
1101 '#88cc00', '#36a3d9', '#3d0073', '#d9364c', '#33241a', '#698c23',
1102 '#5995b3', '#300059', '#e57382', '#7f3300', '#366600', '#00aaff',
1102 '#5995b3', '#300059', '#e57382', '#7f3300', '#366600', '#00aaff',
1103 '#3a1659', '#733941', '#663600', '#74b32d', '#003c59', '#7f53a6',
1103 '#3a1659', '#733941', '#663600', '#74b32d', '#003c59', '#7f53a6',
1104 '#73000f', '#ff8800', '#baf279', '#79caf2', '#291040', '#a6293a',
1104 '#73000f', '#ff8800', '#baf279', '#79caf2', '#291040', '#a6293a',
1105 '#b2742d', '#587339', '#0077b3', '#632699', '#400009', '#d9a66c',
1105 '#b2742d', '#587339', '#0077b3', '#632699', '#400009', '#d9a66c',
1106 '#294010', '#2d4a59', '#aa00ff', '#4c131b', '#b25f00', '#5ce600',
1106 '#294010', '#2d4a59', '#aa00ff', '#4c131b', '#b25f00', '#5ce600',
1107 '#267399', '#a336d9', '#990014', '#664e33', '#86bf60', '#0088ff',
1107 '#267399', '#a336d9', '#990014', '#664e33', '#86bf60', '#0088ff',
1108 '#7700b3', '#593a16', '#073300', '#1d4b73', '#ac60bf', '#e59539',
1108 '#7700b3', '#593a16', '#073300', '#1d4b73', '#ac60bf', '#e59539',
1109 '#4f8c46', '#368dd9', '#5c0073'
1109 '#4f8c46', '#368dd9', '#5c0073'
1110 ]
1110 ]
1111
1111
1112 def rgb_to_hex_color(self, rgb_tuple):
1112 def rgb_to_hex_color(self, rgb_tuple):
1113 """
1113 """
1114 Converts an rgb_tuple passed to an hex color.
1114 Converts an rgb_tuple passed to an hex color.
1115
1115
1116 :param rgb_tuple: tuple with 3 ints represents rgb color space
1116 :param rgb_tuple: tuple with 3 ints represents rgb color space
1117 """
1117 """
1118 return '#' + ("".join(map(chr, rgb_tuple)).encode('hex'))
1118 return '#' + ("".join(map(chr, rgb_tuple)).encode('hex'))
1119
1119
1120 def email_to_int_list(self, email_str):
1120 def email_to_int_list(self, email_str):
1121 """
1121 """
1122 Get every byte of the hex digest value of email and turn it to integer.
1122 Get every byte of the hex digest value of email and turn it to integer.
1123 It's going to be always between 0-255
1123 It's going to be always between 0-255
1124 """
1124 """
1125 digest = md5_safe(email_str.lower())
1125 digest = md5_safe(email_str.lower())
1126 return [int(digest[i * 2:i * 2 + 2], 16) for i in range(16)]
1126 return [int(digest[i * 2:i * 2 + 2], 16) for i in range(16)]
1127
1127
1128 def pick_color_bank_index(self, email_str, color_bank):
1128 def pick_color_bank_index(self, email_str, color_bank):
1129 return self.email_to_int_list(email_str)[0] % len(color_bank)
1129 return self.email_to_int_list(email_str)[0] % len(color_bank)
1130
1130
1131 def str2color(self, email_str):
1131 def str2color(self, email_str):
1132 """
1132 """
1133 Tries to map in a stable algorithm an email to color
1133 Tries to map in a stable algorithm an email to color
1134
1134
1135 :param email_str:
1135 :param email_str:
1136 """
1136 """
1137 color_bank = self.get_color_bank()
1137 color_bank = self.get_color_bank()
1138 # pick position (module it's length so we always find it in the
1138 # pick position (module it's length so we always find it in the
1139 # bank even if it's smaller than 256 values
1139 # bank even if it's smaller than 256 values
1140 pos = self.pick_color_bank_index(email_str, color_bank)
1140 pos = self.pick_color_bank_index(email_str, color_bank)
1141 return color_bank[pos]
1141 return color_bank[pos]
1142
1142
1143 def normalize_email(self, email_address):
1143 def normalize_email(self, email_address):
1144 import unicodedata
1144 import unicodedata
1145 # default host used to fill in the fake/missing email
1145 # default host used to fill in the fake/missing email
1146 default_host = u'localhost'
1146 default_host = u'localhost'
1147
1147
1148 if not email_address:
1148 if not email_address:
1149 email_address = u'%s@%s' % (User.DEFAULT_USER, default_host)
1149 email_address = u'%s@%s' % (User.DEFAULT_USER, default_host)
1150
1150
1151 email_address = safe_unicode(email_address)
1151 email_address = safe_unicode(email_address)
1152
1152
1153 if u'@' not in email_address:
1153 if u'@' not in email_address:
1154 email_address = u'%s@%s' % (email_address, default_host)
1154 email_address = u'%s@%s' % (email_address, default_host)
1155
1155
1156 if email_address.endswith(u'@'):
1156 if email_address.endswith(u'@'):
1157 email_address = u'%s%s' % (email_address, default_host)
1157 email_address = u'%s%s' % (email_address, default_host)
1158
1158
1159 email_address = unicodedata.normalize('NFKD', email_address)\
1159 email_address = unicodedata.normalize('NFKD', email_address)\
1160 .encode('ascii', 'ignore')
1160 .encode('ascii', 'ignore')
1161 return email_address
1161 return email_address
1162
1162
1163 def get_initials(self):
1163 def get_initials(self):
1164 """
1164 """
1165 Returns 2 letter initials calculated based on the input.
1165 Returns 2 letter initials calculated based on the input.
1166 The algorithm picks first given email address, and takes first letter
1166 The algorithm picks first given email address, and takes first letter
1167 of part before @, and then the first letter of server name. In case
1167 of part before @, and then the first letter of server name. In case
1168 the part before @ is in a format of `somestring.somestring2` it replaces
1168 the part before @ is in a format of `somestring.somestring2` it replaces
1169 the server letter with first letter of somestring2
1169 the server letter with first letter of somestring2
1170
1170
1171 In case function was initialized with both first and lastname, this
1171 In case function was initialized with both first and lastname, this
1172 overrides the extraction from email by first letter of the first and
1172 overrides the extraction from email by first letter of the first and
1173 last name. We add special logic to that functionality, In case Full name
1173 last name. We add special logic to that functionality, In case Full name
1174 is compound, like Guido Von Rossum, we use last part of the last name
1174 is compound, like Guido Von Rossum, we use last part of the last name
1175 (Von Rossum) picking `R`.
1175 (Von Rossum) picking `R`.
1176
1176
1177 Function also normalizes the non-ascii characters to they ascii
1177 Function also normalizes the non-ascii characters to they ascii
1178 representation, eg Ą => A
1178 representation, eg Ą => A
1179 """
1179 """
1180 import unicodedata
1180 import unicodedata
1181 # replace non-ascii to ascii
1181 # replace non-ascii to ascii
1182 first_name = unicodedata.normalize(
1182 first_name = unicodedata.normalize(
1183 'NFKD', safe_unicode(self.first_name)).encode('ascii', 'ignore')
1183 'NFKD', safe_unicode(self.first_name)).encode('ascii', 'ignore')
1184 last_name = unicodedata.normalize(
1184 last_name = unicodedata.normalize(
1185 'NFKD', safe_unicode(self.last_name)).encode('ascii', 'ignore')
1185 'NFKD', safe_unicode(self.last_name)).encode('ascii', 'ignore')
1186
1186
1187 # do NFKD encoding, and also make sure email has proper format
1187 # do NFKD encoding, and also make sure email has proper format
1188 email_address = self.normalize_email(self.email_address)
1188 email_address = self.normalize_email(self.email_address)
1189
1189
1190 # first push the email initials
1190 # first push the email initials
1191 prefix, server = email_address.split('@', 1)
1191 prefix, server = email_address.split('@', 1)
1192
1192
1193 # check if prefix is maybe a 'first_name.last_name' syntax
1193 # check if prefix is maybe a 'first_name.last_name' syntax
1194 _dot_split = prefix.rsplit('.', 1)
1194 _dot_split = prefix.rsplit('.', 1)
1195 if len(_dot_split) == 2 and _dot_split[1]:
1195 if len(_dot_split) == 2 and _dot_split[1]:
1196 initials = [_dot_split[0][0], _dot_split[1][0]]
1196 initials = [_dot_split[0][0], _dot_split[1][0]]
1197 else:
1197 else:
1198 initials = [prefix[0], server[0]]
1198 initials = [prefix[0], server[0]]
1199
1199
1200 # then try to replace either first_name or last_name
1200 # then try to replace either first_name or last_name
1201 fn_letter = (first_name or " ")[0].strip()
1201 fn_letter = (first_name or " ")[0].strip()
1202 ln_letter = (last_name.split(' ', 1)[-1] or " ")[0].strip()
1202 ln_letter = (last_name.split(' ', 1)[-1] or " ")[0].strip()
1203
1203
1204 if fn_letter:
1204 if fn_letter:
1205 initials[0] = fn_letter
1205 initials[0] = fn_letter
1206
1206
1207 if ln_letter:
1207 if ln_letter:
1208 initials[1] = ln_letter
1208 initials[1] = ln_letter
1209
1209
1210 return ''.join(initials).upper()
1210 return ''.join(initials).upper()
1211
1211
1212 def get_img_data_by_type(self, font_family, img_type):
1212 def get_img_data_by_type(self, font_family, img_type):
1213 default_user = """
1213 default_user = """
1214 <svg xmlns="http://www.w3.org/2000/svg"
1214 <svg xmlns="http://www.w3.org/2000/svg"
1215 version="1.1" x="0px" y="0px" width="{size}" height="{size}"
1215 version="1.1" x="0px" y="0px" width="{size}" height="{size}"
1216 viewBox="-15 -10 439.165 429.164"
1216 viewBox="-15 -10 439.165 429.164"
1217
1217
1218 xml:space="preserve"
1218 xml:space="preserve"
1219 style="background:{background};" >
1219 style="background:{background};" >
1220
1220
1221 <path d="M204.583,216.671c50.664,0,91.74-48.075,
1221 <path d="M204.583,216.671c50.664,0,91.74-48.075,
1222 91.74-107.378c0-82.237-41.074-107.377-91.74-107.377
1222 91.74-107.378c0-82.237-41.074-107.377-91.74-107.377
1223 c-50.668,0-91.74,25.14-91.74,107.377C112.844,
1223 c-50.668,0-91.74,25.14-91.74,107.377C112.844,
1224 168.596,153.916,216.671,
1224 168.596,153.916,216.671,
1225 204.583,216.671z" fill="{text_color}"/>
1225 204.583,216.671z" fill="{text_color}"/>
1226 <path d="M407.164,374.717L360.88,
1226 <path d="M407.164,374.717L360.88,
1227 270.454c-2.117-4.771-5.836-8.728-10.465-11.138l-71.83-37.392
1227 270.454c-2.117-4.771-5.836-8.728-10.465-11.138l-71.83-37.392
1228 c-1.584-0.823-3.502-0.663-4.926,0.415c-20.316,
1228 c-1.584-0.823-3.502-0.663-4.926,0.415c-20.316,
1229 15.366-44.203,23.488-69.076,23.488c-24.877,
1229 15.366-44.203,23.488-69.076,23.488c-24.877,
1230 0-48.762-8.122-69.078-23.488
1230 0-48.762-8.122-69.078-23.488
1231 c-1.428-1.078-3.346-1.238-4.93-0.415L58.75,
1231 c-1.428-1.078-3.346-1.238-4.93-0.415L58.75,
1232 259.316c-4.631,2.41-8.346,6.365-10.465,11.138L2.001,374.717
1232 259.316c-4.631,2.41-8.346,6.365-10.465,11.138L2.001,374.717
1233 c-3.191,7.188-2.537,15.412,1.75,22.005c4.285,
1233 c-3.191,7.188-2.537,15.412,1.75,22.005c4.285,
1234 6.592,11.537,10.526,19.4,10.526h362.861c7.863,0,15.117-3.936,
1234 6.592,11.537,10.526,19.4,10.526h362.861c7.863,0,15.117-3.936,
1235 19.402-10.527 C409.699,390.129,
1235 19.402-10.527 C409.699,390.129,
1236 410.355,381.902,407.164,374.717z" fill="{text_color}"/>
1236 410.355,381.902,407.164,374.717z" fill="{text_color}"/>
1237 </svg>""".format(
1237 </svg>""".format(
1238 size=self.size,
1238 size=self.size,
1239 background='#979797', # @grey4
1239 background='#979797', # @grey4
1240 text_color=self.text_color,
1240 text_color=self.text_color,
1241 font_family=font_family)
1241 font_family=font_family)
1242
1242
1243 return {
1243 return {
1244 "default_user": default_user
1244 "default_user": default_user
1245 }[img_type]
1245 }[img_type]
1246
1246
1247 def get_img_data(self, svg_type=None):
1247 def get_img_data(self, svg_type=None):
1248 """
1248 """
1249 generates the svg metadata for image
1249 generates the svg metadata for image
1250 """
1250 """
1251
1251
1252 font_family = ','.join([
1252 font_family = ','.join([
1253 'proximanovaregular',
1253 'proximanovaregular',
1254 'Proxima Nova Regular',
1254 'Proxima Nova Regular',
1255 'Proxima Nova',
1255 'Proxima Nova',
1256 'Arial',
1256 'Arial',
1257 'Lucida Grande',
1257 'Lucida Grande',
1258 'sans-serif'
1258 'sans-serif'
1259 ])
1259 ])
1260 if svg_type:
1260 if svg_type:
1261 return self.get_img_data_by_type(font_family, svg_type)
1261 return self.get_img_data_by_type(font_family, svg_type)
1262
1262
1263 initials = self.get_initials()
1263 initials = self.get_initials()
1264 img_data = """
1264 img_data = """
1265 <svg xmlns="http://www.w3.org/2000/svg" pointer-events="none"
1265 <svg xmlns="http://www.w3.org/2000/svg" pointer-events="none"
1266 width="{size}" height="{size}"
1266 width="{size}" height="{size}"
1267 style="width: 100%; height: 100%; background-color: {background}"
1267 style="width: 100%; height: 100%; background-color: {background}"
1268 viewBox="0 0 {size} {size}">
1268 viewBox="0 0 {size} {size}">
1269 <text text-anchor="middle" y="50%" x="50%" dy="0.35em"
1269 <text text-anchor="middle" y="50%" x="50%" dy="0.35em"
1270 pointer-events="auto" fill="{text_color}"
1270 pointer-events="auto" fill="{text_color}"
1271 font-family="{font_family}"
1271 font-family="{font_family}"
1272 style="font-weight: 400; font-size: {f_size}px;">{text}
1272 style="font-weight: 400; font-size: {f_size}px;">{text}
1273 </text>
1273 </text>
1274 </svg>""".format(
1274 </svg>""".format(
1275 size=self.size,
1275 size=self.size,
1276 f_size=self.size/1.85, # scale the text inside the box nicely
1276 f_size=self.size/1.85, # scale the text inside the box nicely
1277 background=self.background,
1277 background=self.background,
1278 text_color=self.text_color,
1278 text_color=self.text_color,
1279 text=initials.upper(),
1279 text=initials.upper(),
1280 font_family=font_family)
1280 font_family=font_family)
1281
1281
1282 return img_data
1282 return img_data
1283
1283
1284 def generate_svg(self, svg_type=None):
1284 def generate_svg(self, svg_type=None):
1285 img_data = self.get_img_data(svg_type)
1285 img_data = self.get_img_data(svg_type)
1286 return "data:image/svg+xml;base64,%s" % img_data.encode('base64')
1286 return "data:image/svg+xml;base64,%s" % img_data.encode('base64')
1287
1287
1288
1288
1289 def initials_gravatar(email_address, first_name, last_name, size=30):
1289 def initials_gravatar(email_address, first_name, last_name, size=30):
1290 svg_type = None
1290 svg_type = None
1291 if email_address == User.DEFAULT_USER_EMAIL:
1291 if email_address == User.DEFAULT_USER_EMAIL:
1292 svg_type = 'default_user'
1292 svg_type = 'default_user'
1293 klass = InitialsGravatar(email_address, first_name, last_name, size)
1293 klass = InitialsGravatar(email_address, first_name, last_name, size)
1294 return klass.generate_svg(svg_type=svg_type)
1294 return klass.generate_svg(svg_type=svg_type)
1295
1295
1296
1296
1297 def gravatar_url(email_address, size=30, request=None):
1297 def gravatar_url(email_address, size=30, request=None):
1298 request = get_current_request()
1298 request = get_current_request()
1299 _use_gravatar = request.call_context.visual.use_gravatar
1299 _use_gravatar = request.call_context.visual.use_gravatar
1300 _gravatar_url = request.call_context.visual.gravatar_url
1300 _gravatar_url = request.call_context.visual.gravatar_url
1301
1301
1302 _gravatar_url = _gravatar_url or User.DEFAULT_GRAVATAR_URL
1302 _gravatar_url = _gravatar_url or User.DEFAULT_GRAVATAR_URL
1303
1303
1304 email_address = email_address or User.DEFAULT_USER_EMAIL
1304 email_address = email_address or User.DEFAULT_USER_EMAIL
1305 if isinstance(email_address, unicode):
1305 if isinstance(email_address, unicode):
1306 # hashlib crashes on unicode items
1306 # hashlib crashes on unicode items
1307 email_address = safe_str(email_address)
1307 email_address = safe_str(email_address)
1308
1308
1309 # empty email or default user
1309 # empty email or default user
1310 if not email_address or email_address == User.DEFAULT_USER_EMAIL:
1310 if not email_address or email_address == User.DEFAULT_USER_EMAIL:
1311 return initials_gravatar(User.DEFAULT_USER_EMAIL, '', '', size=size)
1311 return initials_gravatar(User.DEFAULT_USER_EMAIL, '', '', size=size)
1312
1312
1313 if _use_gravatar:
1313 if _use_gravatar:
1314 # TODO: Disuse pyramid thread locals. Think about another solution to
1314 # TODO: Disuse pyramid thread locals. Think about another solution to
1315 # get the host and schema here.
1315 # get the host and schema here.
1316 request = get_current_request()
1316 request = get_current_request()
1317 tmpl = safe_str(_gravatar_url)
1317 tmpl = safe_str(_gravatar_url)
1318 tmpl = tmpl.replace('{email}', email_address)\
1318 tmpl = tmpl.replace('{email}', email_address)\
1319 .replace('{md5email}', md5_safe(email_address.lower())) \
1319 .replace('{md5email}', md5_safe(email_address.lower())) \
1320 .replace('{netloc}', request.host)\
1320 .replace('{netloc}', request.host)\
1321 .replace('{scheme}', request.scheme)\
1321 .replace('{scheme}', request.scheme)\
1322 .replace('{size}', safe_str(size))
1322 .replace('{size}', safe_str(size))
1323 return tmpl
1323 return tmpl
1324 else:
1324 else:
1325 return initials_gravatar(email_address, '', '', size=size)
1325 return initials_gravatar(email_address, '', '', size=size)
1326
1326
1327
1327
1328 class Page(_Page):
1328 class Page(_Page):
1329 """
1329 """
1330 Custom pager to match rendering style with paginator
1330 Custom pager to match rendering style with paginator
1331 """
1331 """
1332
1332
1333 def _get_pos(self, cur_page, max_page, items):
1333 def _get_pos(self, cur_page, max_page, items):
1334 edge = (items / 2) + 1
1334 edge = (items / 2) + 1
1335 if (cur_page <= edge):
1335 if (cur_page <= edge):
1336 radius = max(items / 2, items - cur_page)
1336 radius = max(items / 2, items - cur_page)
1337 elif (max_page - cur_page) < edge:
1337 elif (max_page - cur_page) < edge:
1338 radius = (items - 1) - (max_page - cur_page)
1338 radius = (items - 1) - (max_page - cur_page)
1339 else:
1339 else:
1340 radius = items / 2
1340 radius = items / 2
1341
1341
1342 left = max(1, (cur_page - (radius)))
1342 left = max(1, (cur_page - (radius)))
1343 right = min(max_page, cur_page + (radius))
1343 right = min(max_page, cur_page + (radius))
1344 return left, cur_page, right
1344 return left, cur_page, right
1345
1345
1346 def _range(self, regexp_match):
1346 def _range(self, regexp_match):
1347 """
1347 """
1348 Return range of linked pages (e.g. '1 2 [3] 4 5 6 7 8').
1348 Return range of linked pages (e.g. '1 2 [3] 4 5 6 7 8').
1349
1349
1350 Arguments:
1350 Arguments:
1351
1351
1352 regexp_match
1352 regexp_match
1353 A "re" (regular expressions) match object containing the
1353 A "re" (regular expressions) match object containing the
1354 radius of linked pages around the current page in
1354 radius of linked pages around the current page in
1355 regexp_match.group(1) as a string
1355 regexp_match.group(1) as a string
1356
1356
1357 This function is supposed to be called as a callable in
1357 This function is supposed to be called as a callable in
1358 re.sub.
1358 re.sub.
1359
1359
1360 """
1360 """
1361 radius = int(regexp_match.group(1))
1361 radius = int(regexp_match.group(1))
1362
1362
1363 # Compute the first and last page number within the radius
1363 # Compute the first and last page number within the radius
1364 # e.g. '1 .. 5 6 [7] 8 9 .. 12'
1364 # e.g. '1 .. 5 6 [7] 8 9 .. 12'
1365 # -> leftmost_page = 5
1365 # -> leftmost_page = 5
1366 # -> rightmost_page = 9
1366 # -> rightmost_page = 9
1367 leftmost_page, _cur, rightmost_page = self._get_pos(self.page,
1367 leftmost_page, _cur, rightmost_page = self._get_pos(self.page,
1368 self.last_page,
1368 self.last_page,
1369 (radius * 2) + 1)
1369 (radius * 2) + 1)
1370 nav_items = []
1370 nav_items = []
1371
1371
1372 # Create a link to the first page (unless we are on the first page
1372 # Create a link to the first page (unless we are on the first page
1373 # or there would be no need to insert '..' spacers)
1373 # or there would be no need to insert '..' spacers)
1374 if self.page != self.first_page and self.first_page < leftmost_page:
1374 if self.page != self.first_page and self.first_page < leftmost_page:
1375 nav_items.append(self._pagerlink(self.first_page, self.first_page))
1375 nav_items.append(self._pagerlink(self.first_page, self.first_page))
1376
1376
1377 # Insert dots if there are pages between the first page
1377 # Insert dots if there are pages between the first page
1378 # and the currently displayed page range
1378 # and the currently displayed page range
1379 if leftmost_page - self.first_page > 1:
1379 if leftmost_page - self.first_page > 1:
1380 # Wrap in a SPAN tag if nolink_attr is set
1380 # Wrap in a SPAN tag if nolink_attr is set
1381 text = '..'
1381 text = '..'
1382 if self.dotdot_attr:
1382 if self.dotdot_attr:
1383 text = HTML.span(c=text, **self.dotdot_attr)
1383 text = HTML.span(c=text, **self.dotdot_attr)
1384 nav_items.append(text)
1384 nav_items.append(text)
1385
1385
1386 for thispage in xrange(leftmost_page, rightmost_page + 1):
1386 for thispage in xrange(leftmost_page, rightmost_page + 1):
1387 # Hilight the current page number and do not use a link
1387 # Hilight the current page number and do not use a link
1388 if thispage == self.page:
1388 if thispage == self.page:
1389 text = '%s' % (thispage,)
1389 text = '%s' % (thispage,)
1390 # Wrap in a SPAN tag if nolink_attr is set
1390 # Wrap in a SPAN tag if nolink_attr is set
1391 if self.curpage_attr:
1391 if self.curpage_attr:
1392 text = HTML.span(c=text, **self.curpage_attr)
1392 text = HTML.span(c=text, **self.curpage_attr)
1393 nav_items.append(text)
1393 nav_items.append(text)
1394 # Otherwise create just a link to that page
1394 # Otherwise create just a link to that page
1395 else:
1395 else:
1396 text = '%s' % (thispage,)
1396 text = '%s' % (thispage,)
1397 nav_items.append(self._pagerlink(thispage, text))
1397 nav_items.append(self._pagerlink(thispage, text))
1398
1398
1399 # Insert dots if there are pages between the displayed
1399 # Insert dots if there are pages between the displayed
1400 # page numbers and the end of the page range
1400 # page numbers and the end of the page range
1401 if self.last_page - rightmost_page > 1:
1401 if self.last_page - rightmost_page > 1:
1402 text = '..'
1402 text = '..'
1403 # Wrap in a SPAN tag if nolink_attr is set
1403 # Wrap in a SPAN tag if nolink_attr is set
1404 if self.dotdot_attr:
1404 if self.dotdot_attr:
1405 text = HTML.span(c=text, **self.dotdot_attr)
1405 text = HTML.span(c=text, **self.dotdot_attr)
1406 nav_items.append(text)
1406 nav_items.append(text)
1407
1407
1408 # Create a link to the very last page (unless we are on the last
1408 # Create a link to the very last page (unless we are on the last
1409 # page or there would be no need to insert '..' spacers)
1409 # page or there would be no need to insert '..' spacers)
1410 if self.page != self.last_page and rightmost_page < self.last_page:
1410 if self.page != self.last_page and rightmost_page < self.last_page:
1411 nav_items.append(self._pagerlink(self.last_page, self.last_page))
1411 nav_items.append(self._pagerlink(self.last_page, self.last_page))
1412
1412
1413 ## prerender links
1413 ## prerender links
1414 #_page_link = url.current()
1414 #_page_link = url.current()
1415 #nav_items.append(literal('<link rel="prerender" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1415 #nav_items.append(literal('<link rel="prerender" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1416 #nav_items.append(literal('<link rel="prefetch" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1416 #nav_items.append(literal('<link rel="prefetch" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1417 return self.separator.join(nav_items)
1417 return self.separator.join(nav_items)
1418
1418
1419 def pager(self, format='~2~', page_param='page', partial_param='partial',
1419 def pager(self, format='~2~', page_param='page', partial_param='partial',
1420 show_if_single_page=False, separator=' ', onclick=None,
1420 show_if_single_page=False, separator=' ', onclick=None,
1421 symbol_first='<<', symbol_last='>>',
1421 symbol_first='<<', symbol_last='>>',
1422 symbol_previous='<', symbol_next='>',
1422 symbol_previous='<', symbol_next='>',
1423 link_attr={'class': 'pager_link', 'rel': 'prerender'},
1423 link_attr={'class': 'pager_link', 'rel': 'prerender'},
1424 curpage_attr={'class': 'pager_curpage'},
1424 curpage_attr={'class': 'pager_curpage'},
1425 dotdot_attr={'class': 'pager_dotdot'}, **kwargs):
1425 dotdot_attr={'class': 'pager_dotdot'}, **kwargs):
1426
1426
1427 self.curpage_attr = curpage_attr
1427 self.curpage_attr = curpage_attr
1428 self.separator = separator
1428 self.separator = separator
1429 self.pager_kwargs = kwargs
1429 self.pager_kwargs = kwargs
1430 self.page_param = page_param
1430 self.page_param = page_param
1431 self.partial_param = partial_param
1431 self.partial_param = partial_param
1432 self.onclick = onclick
1432 self.onclick = onclick
1433 self.link_attr = link_attr
1433 self.link_attr = link_attr
1434 self.dotdot_attr = dotdot_attr
1434 self.dotdot_attr = dotdot_attr
1435
1435
1436 # Don't show navigator if there is no more than one page
1436 # Don't show navigator if there is no more than one page
1437 if self.page_count == 0 or (self.page_count == 1 and not show_if_single_page):
1437 if self.page_count == 0 or (self.page_count == 1 and not show_if_single_page):
1438 return ''
1438 return ''
1439
1439
1440 from string import Template
1440 from string import Template
1441 # Replace ~...~ in token format by range of pages
1441 # Replace ~...~ in token format by range of pages
1442 result = re.sub(r'~(\d+)~', self._range, format)
1442 result = re.sub(r'~(\d+)~', self._range, format)
1443
1443
1444 # Interpolate '%' variables
1444 # Interpolate '%' variables
1445 result = Template(result).safe_substitute({
1445 result = Template(result).safe_substitute({
1446 'first_page': self.first_page,
1446 'first_page': self.first_page,
1447 'last_page': self.last_page,
1447 'last_page': self.last_page,
1448 'page': self.page,
1448 'page': self.page,
1449 'page_count': self.page_count,
1449 'page_count': self.page_count,
1450 'items_per_page': self.items_per_page,
1450 'items_per_page': self.items_per_page,
1451 'first_item': self.first_item,
1451 'first_item': self.first_item,
1452 'last_item': self.last_item,
1452 'last_item': self.last_item,
1453 'item_count': self.item_count,
1453 'item_count': self.item_count,
1454 'link_first': self.page > self.first_page and \
1454 'link_first': self.page > self.first_page and \
1455 self._pagerlink(self.first_page, symbol_first) or '',
1455 self._pagerlink(self.first_page, symbol_first) or '',
1456 'link_last': self.page < self.last_page and \
1456 'link_last': self.page < self.last_page and \
1457 self._pagerlink(self.last_page, symbol_last) or '',
1457 self._pagerlink(self.last_page, symbol_last) or '',
1458 'link_previous': self.previous_page and \
1458 'link_previous': self.previous_page and \
1459 self._pagerlink(self.previous_page, symbol_previous) \
1459 self._pagerlink(self.previous_page, symbol_previous) \
1460 or HTML.span(symbol_previous, class_="pg-previous disabled"),
1460 or HTML.span(symbol_previous, class_="pg-previous disabled"),
1461 'link_next': self.next_page and \
1461 'link_next': self.next_page and \
1462 self._pagerlink(self.next_page, symbol_next) \
1462 self._pagerlink(self.next_page, symbol_next) \
1463 or HTML.span(symbol_next, class_="pg-next disabled")
1463 or HTML.span(symbol_next, class_="pg-next disabled")
1464 })
1464 })
1465
1465
1466 return literal(result)
1466 return literal(result)
1467
1467
1468
1468
1469 #==============================================================================
1469 #==============================================================================
1470 # REPO PAGER, PAGER FOR REPOSITORY
1470 # REPO PAGER, PAGER FOR REPOSITORY
1471 #==============================================================================
1471 #==============================================================================
1472 class RepoPage(Page):
1472 class RepoPage(Page):
1473
1473
1474 def __init__(self, collection, page=1, items_per_page=20,
1474 def __init__(self, collection, page=1, items_per_page=20,
1475 item_count=None, url=None, **kwargs):
1475 item_count=None, url=None, **kwargs):
1476
1476
1477 """Create a "RepoPage" instance. special pager for paging
1477 """Create a "RepoPage" instance. special pager for paging
1478 repository
1478 repository
1479 """
1479 """
1480 self._url_generator = url
1480 self._url_generator = url
1481
1481
1482 # Safe the kwargs class-wide so they can be used in the pager() method
1482 # Safe the kwargs class-wide so they can be used in the pager() method
1483 self.kwargs = kwargs
1483 self.kwargs = kwargs
1484
1484
1485 # Save a reference to the collection
1485 # Save a reference to the collection
1486 self.original_collection = collection
1486 self.original_collection = collection
1487
1487
1488 self.collection = collection
1488 self.collection = collection
1489
1489
1490 # The self.page is the number of the current page.
1490 # The self.page is the number of the current page.
1491 # The first page has the number 1!
1491 # The first page has the number 1!
1492 try:
1492 try:
1493 self.page = int(page) # make it int() if we get it as a string
1493 self.page = int(page) # make it int() if we get it as a string
1494 except (ValueError, TypeError):
1494 except (ValueError, TypeError):
1495 self.page = 1
1495 self.page = 1
1496
1496
1497 self.items_per_page = items_per_page
1497 self.items_per_page = items_per_page
1498
1498
1499 # Unless the user tells us how many items the collections has
1499 # Unless the user tells us how many items the collections has
1500 # we calculate that ourselves.
1500 # we calculate that ourselves.
1501 if item_count is not None:
1501 if item_count is not None:
1502 self.item_count = item_count
1502 self.item_count = item_count
1503 else:
1503 else:
1504 self.item_count = len(self.collection)
1504 self.item_count = len(self.collection)
1505
1505
1506 # Compute the number of the first and last available page
1506 # Compute the number of the first and last available page
1507 if self.item_count > 0:
1507 if self.item_count > 0:
1508 self.first_page = 1
1508 self.first_page = 1
1509 self.page_count = int(math.ceil(float(self.item_count) /
1509 self.page_count = int(math.ceil(float(self.item_count) /
1510 self.items_per_page))
1510 self.items_per_page))
1511 self.last_page = self.first_page + self.page_count - 1
1511 self.last_page = self.first_page + self.page_count - 1
1512
1512
1513 # Make sure that the requested page number is the range of
1513 # Make sure that the requested page number is the range of
1514 # valid pages
1514 # valid pages
1515 if self.page > self.last_page:
1515 if self.page > self.last_page:
1516 self.page = self.last_page
1516 self.page = self.last_page
1517 elif self.page < self.first_page:
1517 elif self.page < self.first_page:
1518 self.page = self.first_page
1518 self.page = self.first_page
1519
1519
1520 # Note: the number of items on this page can be less than
1520 # Note: the number of items on this page can be less than
1521 # items_per_page if the last page is not full
1521 # items_per_page if the last page is not full
1522 self.first_item = max(0, (self.item_count) - (self.page *
1522 self.first_item = max(0, (self.item_count) - (self.page *
1523 items_per_page))
1523 items_per_page))
1524 self.last_item = ((self.item_count - 1) - items_per_page *
1524 self.last_item = ((self.item_count - 1) - items_per_page *
1525 (self.page - 1))
1525 (self.page - 1))
1526
1526
1527 self.items = list(self.collection[self.first_item:self.last_item + 1])
1527 self.items = list(self.collection[self.first_item:self.last_item + 1])
1528
1528
1529 # Links to previous and next page
1529 # Links to previous and next page
1530 if self.page > self.first_page:
1530 if self.page > self.first_page:
1531 self.previous_page = self.page - 1
1531 self.previous_page = self.page - 1
1532 else:
1532 else:
1533 self.previous_page = None
1533 self.previous_page = None
1534
1534
1535 if self.page < self.last_page:
1535 if self.page < self.last_page:
1536 self.next_page = self.page + 1
1536 self.next_page = self.page + 1
1537 else:
1537 else:
1538 self.next_page = None
1538 self.next_page = None
1539
1539
1540 # No items available
1540 # No items available
1541 else:
1541 else:
1542 self.first_page = None
1542 self.first_page = None
1543 self.page_count = 0
1543 self.page_count = 0
1544 self.last_page = None
1544 self.last_page = None
1545 self.first_item = None
1545 self.first_item = None
1546 self.last_item = None
1546 self.last_item = None
1547 self.previous_page = None
1547 self.previous_page = None
1548 self.next_page = None
1548 self.next_page = None
1549 self.items = []
1549 self.items = []
1550
1550
1551 # This is a subclass of the 'list' type. Initialise the list now.
1551 # This is a subclass of the 'list' type. Initialise the list now.
1552 list.__init__(self, reversed(self.items))
1552 list.__init__(self, reversed(self.items))
1553
1553
1554
1554
1555 def breadcrumb_repo_link(repo):
1555 def breadcrumb_repo_link(repo):
1556 """
1556 """
1557 Makes a breadcrumbs path link to repo
1557 Makes a breadcrumbs path link to repo
1558
1558
1559 ex::
1559 ex::
1560 group >> subgroup >> repo
1560 group >> subgroup >> repo
1561
1561
1562 :param repo: a Repository instance
1562 :param repo: a Repository instance
1563 """
1563 """
1564
1564
1565 path = [
1565 path = [
1566 link_to(group.name, route_path('repo_group_home', repo_group_name=group.group_name))
1566 link_to(group.name, route_path('repo_group_home', repo_group_name=group.group_name))
1567 for group in repo.groups_with_parents
1567 for group in repo.groups_with_parents
1568 ] + [
1568 ] + [
1569 link_to(repo.just_name, route_path('repo_summary', repo_name=repo.repo_name))
1569 link_to(repo.just_name, route_path('repo_summary', repo_name=repo.repo_name))
1570 ]
1570 ]
1571
1571
1572 return literal(' &raquo; '.join(path))
1572 return literal(' &raquo; '.join(path))
1573
1573
1574
1574
1575 def format_byte_size_binary(file_size):
1575 def format_byte_size_binary(file_size):
1576 """
1576 """
1577 Formats file/folder sizes to standard.
1577 Formats file/folder sizes to standard.
1578 """
1578 """
1579 if file_size is None:
1579 if file_size is None:
1580 file_size = 0
1580 file_size = 0
1581
1581
1582 formatted_size = format_byte_size(file_size, binary=True)
1582 formatted_size = format_byte_size(file_size, binary=True)
1583 return formatted_size
1583 return formatted_size
1584
1584
1585
1585
1586 def urlify_text(text_, safe=True):
1586 def urlify_text(text_, safe=True):
1587 """
1587 """
1588 Extrac urls from text and make html links out of them
1588 Extrac urls from text and make html links out of them
1589
1589
1590 :param text_:
1590 :param text_:
1591 """
1591 """
1592
1592
1593 url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@#.&+]'''
1593 url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@#.&+]'''
1594 '''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''')
1594 '''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''')
1595
1595
1596 def url_func(match_obj):
1596 def url_func(match_obj):
1597 url_full = match_obj.groups()[0]
1597 url_full = match_obj.groups()[0]
1598 return '<a href="%(url)s">%(url)s</a>' % ({'url': url_full})
1598 return '<a href="%(url)s">%(url)s</a>' % ({'url': url_full})
1599 _newtext = url_pat.sub(url_func, text_)
1599 _newtext = url_pat.sub(url_func, text_)
1600 if safe:
1600 if safe:
1601 return literal(_newtext)
1601 return literal(_newtext)
1602 return _newtext
1602 return _newtext
1603
1603
1604
1604
1605 def urlify_commits(text_, repository):
1605 def urlify_commits(text_, repository):
1606 """
1606 """
1607 Extract commit ids from text and make link from them
1607 Extract commit ids from text and make link from them
1608
1608
1609 :param text_:
1609 :param text_:
1610 :param repository: repo name to build the URL with
1610 :param repository: repo name to build the URL with
1611 """
1611 """
1612
1612
1613 URL_PAT = re.compile(r'(^|\s)([0-9a-fA-F]{12,40})($|\s)')
1613 URL_PAT = re.compile(r'(^|\s)([0-9a-fA-F]{12,40})($|\s)')
1614
1614
1615 def url_func(match_obj):
1615 def url_func(match_obj):
1616 commit_id = match_obj.groups()[1]
1616 commit_id = match_obj.groups()[1]
1617 pref = match_obj.groups()[0]
1617 pref = match_obj.groups()[0]
1618 suf = match_obj.groups()[2]
1618 suf = match_obj.groups()[2]
1619
1619
1620 tmpl = (
1620 tmpl = (
1621 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1621 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1622 '%(commit_id)s</a>%(suf)s'
1622 '%(commit_id)s</a>%(suf)s'
1623 )
1623 )
1624 return tmpl % {
1624 return tmpl % {
1625 'pref': pref,
1625 'pref': pref,
1626 'cls': 'revision-link',
1626 'cls': 'revision-link',
1627 'url': route_url('repo_commit', repo_name=repository,
1627 'url': route_url('repo_commit', repo_name=repository,
1628 commit_id=commit_id),
1628 commit_id=commit_id),
1629 'commit_id': commit_id,
1629 'commit_id': commit_id,
1630 'suf': suf
1630 'suf': suf
1631 }
1631 }
1632
1632
1633 newtext = URL_PAT.sub(url_func, text_)
1633 newtext = URL_PAT.sub(url_func, text_)
1634
1634
1635 return newtext
1635 return newtext
1636
1636
1637
1637
1638 def _process_url_func(match_obj, repo_name, uid, entry,
1638 def _process_url_func(match_obj, repo_name, uid, entry,
1639 return_raw_data=False, link_format='html'):
1639 return_raw_data=False, link_format='html'):
1640 pref = ''
1640 pref = ''
1641 if match_obj.group().startswith(' '):
1641 if match_obj.group().startswith(' '):
1642 pref = ' '
1642 pref = ' '
1643
1643
1644 issue_id = ''.join(match_obj.groups())
1644 issue_id = ''.join(match_obj.groups())
1645
1645
1646 if link_format == 'html':
1646 if link_format == 'html':
1647 tmpl = (
1647 tmpl = (
1648 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1648 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1649 '%(issue-prefix)s%(id-repr)s'
1649 '%(issue-prefix)s%(id-repr)s'
1650 '</a>')
1650 '</a>')
1651 elif link_format == 'rst':
1651 elif link_format == 'rst':
1652 tmpl = '`%(issue-prefix)s%(id-repr)s <%(url)s>`_'
1652 tmpl = '`%(issue-prefix)s%(id-repr)s <%(url)s>`_'
1653 elif link_format == 'markdown':
1653 elif link_format == 'markdown':
1654 tmpl = '[%(issue-prefix)s%(id-repr)s](%(url)s)'
1654 tmpl = '[%(issue-prefix)s%(id-repr)s](%(url)s)'
1655 else:
1655 else:
1656 raise ValueError('Bad link_format:{}'.format(link_format))
1656 raise ValueError('Bad link_format:{}'.format(link_format))
1657
1657
1658 (repo_name_cleaned,
1658 (repo_name_cleaned,
1659 parent_group_name) = RepoGroupModel().\
1659 parent_group_name) = RepoGroupModel().\
1660 _get_group_name_and_parent(repo_name)
1660 _get_group_name_and_parent(repo_name)
1661
1661
1662 # variables replacement
1662 # variables replacement
1663 named_vars = {
1663 named_vars = {
1664 'id': issue_id,
1664 'id': issue_id,
1665 'repo': repo_name,
1665 'repo': repo_name,
1666 'repo_name': repo_name_cleaned,
1666 'repo_name': repo_name_cleaned,
1667 'group_name': parent_group_name
1667 'group_name': parent_group_name
1668 }
1668 }
1669 # named regex variables
1669 # named regex variables
1670 named_vars.update(match_obj.groupdict())
1670 named_vars.update(match_obj.groupdict())
1671 _url = string.Template(entry['url']).safe_substitute(**named_vars)
1671 _url = string.Template(entry['url']).safe_substitute(**named_vars)
1672
1672
1673 data = {
1673 data = {
1674 'pref': pref,
1674 'pref': pref,
1675 'cls': 'issue-tracker-link',
1675 'cls': 'issue-tracker-link',
1676 'url': _url,
1676 'url': _url,
1677 'id-repr': issue_id,
1677 'id-repr': issue_id,
1678 'issue-prefix': entry['pref'],
1678 'issue-prefix': entry['pref'],
1679 'serv': entry['url'],
1679 'serv': entry['url'],
1680 }
1680 }
1681 if return_raw_data:
1681 if return_raw_data:
1682 return {
1682 return {
1683 'id': issue_id,
1683 'id': issue_id,
1684 'url': _url
1684 'url': _url
1685 }
1685 }
1686 return tmpl % data
1686 return tmpl % data
1687
1687
1688
1688
1689 def get_active_pattern_entries(repo_name):
1689 def get_active_pattern_entries(repo_name):
1690 repo = None
1690 repo = None
1691 if repo_name:
1691 if repo_name:
1692 # Retrieving repo_name to avoid invalid repo_name to explode on
1692 # Retrieving repo_name to avoid invalid repo_name to explode on
1693 # IssueTrackerSettingsModel but still passing invalid name further down
1693 # IssueTrackerSettingsModel but still passing invalid name further down
1694 repo = Repository.get_by_repo_name(repo_name, cache=True)
1694 repo = Repository.get_by_repo_name(repo_name, cache=True)
1695
1695
1696 settings_model = IssueTrackerSettingsModel(repo=repo)
1696 settings_model = IssueTrackerSettingsModel(repo=repo)
1697 active_entries = settings_model.get_settings(cache=True)
1697 active_entries = settings_model.get_settings(cache=True)
1698 return active_entries
1698 return active_entries
1699
1699
1700
1700
1701 def process_patterns(text_string, repo_name, link_format='html',
1701 def process_patterns(text_string, repo_name, link_format='html',
1702 active_entries=None):
1702 active_entries=None):
1703
1703
1704 allowed_formats = ['html', 'rst', 'markdown']
1704 allowed_formats = ['html', 'rst', 'markdown']
1705 if link_format not in allowed_formats:
1705 if link_format not in allowed_formats:
1706 raise ValueError('Link format can be only one of:{} got {}'.format(
1706 raise ValueError('Link format can be only one of:{} got {}'.format(
1707 allowed_formats, link_format))
1707 allowed_formats, link_format))
1708
1708
1709 active_entries = active_entries or get_active_pattern_entries(repo_name)
1709 active_entries = active_entries or get_active_pattern_entries(repo_name)
1710 issues_data = []
1710 issues_data = []
1711 newtext = text_string
1711 newtext = text_string
1712
1712
1713 for uid, entry in active_entries.items():
1713 for uid, entry in active_entries.items():
1714 log.debug('found issue tracker entry with uid %s' % (uid,))
1714 log.debug('found issue tracker entry with uid %s' % (uid,))
1715
1715
1716 if not (entry['pat'] and entry['url']):
1716 if not (entry['pat'] and entry['url']):
1717 log.debug('skipping due to missing data')
1717 log.debug('skipping due to missing data')
1718 continue
1718 continue
1719
1719
1720 log.debug('issue tracker entry: uid: `%s` PAT:%s URL:%s PREFIX:%s'
1720 log.debug('issue tracker entry: uid: `%s` PAT:%s URL:%s PREFIX:%s'
1721 % (uid, entry['pat'], entry['url'], entry['pref']))
1721 % (uid, entry['pat'], entry['url'], entry['pref']))
1722
1722
1723 try:
1723 try:
1724 pattern = re.compile(r'%s' % entry['pat'])
1724 pattern = re.compile(r'%s' % entry['pat'])
1725 except re.error:
1725 except re.error:
1726 log.exception(
1726 log.exception(
1727 'issue tracker pattern: `%s` failed to compile',
1727 'issue tracker pattern: `%s` failed to compile',
1728 entry['pat'])
1728 entry['pat'])
1729 continue
1729 continue
1730
1730
1731 data_func = partial(
1731 data_func = partial(
1732 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1732 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1733 return_raw_data=True)
1733 return_raw_data=True)
1734
1734
1735 for match_obj in pattern.finditer(text_string):
1735 for match_obj in pattern.finditer(text_string):
1736 issues_data.append(data_func(match_obj))
1736 issues_data.append(data_func(match_obj))
1737
1737
1738 url_func = partial(
1738 url_func = partial(
1739 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1739 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1740 link_format=link_format)
1740 link_format=link_format)
1741
1741
1742 newtext = pattern.sub(url_func, newtext)
1742 newtext = pattern.sub(url_func, newtext)
1743 log.debug('processed prefix:uid `%s`' % (uid,))
1743 log.debug('processed prefix:uid `%s`' % (uid,))
1744
1744
1745 return newtext, issues_data
1745 return newtext, issues_data
1746
1746
1747
1747
1748 def urlify_commit_message(commit_text, repository=None,
1748 def urlify_commit_message(commit_text, repository=None,
1749 active_pattern_entries=None):
1749 active_pattern_entries=None):
1750 """
1750 """
1751 Parses given text message and makes proper links.
1751 Parses given text message and makes proper links.
1752 issues are linked to given issue-server, and rest is a commit link
1752 issues are linked to given issue-server, and rest is a commit link
1753
1753
1754 :param commit_text:
1754 :param commit_text:
1755 :param repository:
1755 :param repository:
1756 """
1756 """
1757 def escaper(string):
1757 def escaper(string):
1758 return string.replace('<', '&lt;').replace('>', '&gt;')
1758 return string.replace('<', '&lt;').replace('>', '&gt;')
1759
1759
1760 newtext = escaper(commit_text)
1760 newtext = escaper(commit_text)
1761
1761
1762 # extract http/https links and make them real urls
1762 # extract http/https links and make them real urls
1763 newtext = urlify_text(newtext, safe=False)
1763 newtext = urlify_text(newtext, safe=False)
1764
1764
1765 # urlify commits - extract commit ids and make link out of them, if we have
1765 # urlify commits - extract commit ids and make link out of them, if we have
1766 # the scope of repository present.
1766 # the scope of repository present.
1767 if repository:
1767 if repository:
1768 newtext = urlify_commits(newtext, repository)
1768 newtext = urlify_commits(newtext, repository)
1769
1769
1770 # process issue tracker patterns
1770 # process issue tracker patterns
1771 newtext, issues = process_patterns(newtext, repository or '',
1771 newtext, issues = process_patterns(newtext, repository or '',
1772 active_entries=active_pattern_entries)
1772 active_entries=active_pattern_entries)
1773
1773
1774 return literal(newtext)
1774 return literal(newtext)
1775
1775
1776
1776
1777 def render_binary(repo_name, file_obj):
1777 def render_binary(repo_name, file_obj):
1778 """
1778 """
1779 Choose how to render a binary file
1779 Choose how to render a binary file
1780 """
1780 """
1781 filename = file_obj.name
1781 filename = file_obj.name
1782
1782
1783 # images
1783 # images
1784 for ext in ['*.png', '*.jpg', '*.ico', '*.gif']:
1784 for ext in ['*.png', '*.jpg', '*.ico', '*.gif']:
1785 if fnmatch.fnmatch(filename, pat=ext):
1785 if fnmatch.fnmatch(filename, pat=ext):
1786 alt = filename
1786 alt = filename
1787 src = route_path(
1787 src = route_path(
1788 'repo_file_raw', repo_name=repo_name,
1788 'repo_file_raw', repo_name=repo_name,
1789 commit_id=file_obj.commit.raw_id, f_path=file_obj.path)
1789 commit_id=file_obj.commit.raw_id, f_path=file_obj.path)
1790 return literal('<img class="rendered-binary" alt="{}" src="{}">'.format(alt, src))
1790 return literal('<img class="rendered-binary" alt="{}" src="{}">'.format(alt, src))
1791
1791
1792
1792
1793 def renderer_from_filename(filename, exclude=None):
1793 def renderer_from_filename(filename, exclude=None):
1794 """
1794 """
1795 choose a renderer based on filename, this works only for text based files
1795 choose a renderer based on filename, this works only for text based files
1796 """
1796 """
1797
1797
1798 # ipython
1798 # ipython
1799 for ext in ['*.ipynb']:
1799 for ext in ['*.ipynb']:
1800 if fnmatch.fnmatch(filename, pat=ext):
1800 if fnmatch.fnmatch(filename, pat=ext):
1801 return 'jupyter'
1801 return 'jupyter'
1802
1802
1803 is_markup = MarkupRenderer.renderer_from_filename(filename, exclude=exclude)
1803 is_markup = MarkupRenderer.renderer_from_filename(filename, exclude=exclude)
1804 if is_markup:
1804 if is_markup:
1805 return is_markup
1805 return is_markup
1806 return None
1806 return None
1807
1807
1808
1808
1809 def render(source, renderer='rst', mentions=False, relative_urls=None,
1809 def render(source, renderer='rst', mentions=False, relative_urls=None,
1810 repo_name=None):
1810 repo_name=None):
1811
1811
1812 def maybe_convert_relative_links(html_source):
1812 def maybe_convert_relative_links(html_source):
1813 if relative_urls:
1813 if relative_urls:
1814 return relative_links(html_source, relative_urls)
1814 return relative_links(html_source, relative_urls)
1815 return html_source
1815 return html_source
1816
1816
1817 if renderer == 'rst':
1817 if renderer == 'plain':
1818 return literal(
1819 MarkupRenderer.plain(source, leading_newline=False))
1820
1821 elif renderer == 'rst':
1818 if repo_name:
1822 if repo_name:
1819 # process patterns on comments if we pass in repo name
1823 # process patterns on comments if we pass in repo name
1820 source, issues = process_patterns(
1824 source, issues = process_patterns(
1821 source, repo_name, link_format='rst')
1825 source, repo_name, link_format='rst')
1822
1826
1823 return literal(
1827 return literal(
1824 '<div class="rst-block">%s</div>' %
1828 '<div class="rst-block">%s</div>' %
1825 maybe_convert_relative_links(
1829 maybe_convert_relative_links(
1826 MarkupRenderer.rst(source, mentions=mentions)))
1830 MarkupRenderer.rst(source, mentions=mentions)))
1831
1827 elif renderer == 'markdown':
1832 elif renderer == 'markdown':
1828 if repo_name:
1833 if repo_name:
1829 # process patterns on comments if we pass in repo name
1834 # process patterns on comments if we pass in repo name
1830 source, issues = process_patterns(
1835 source, issues = process_patterns(
1831 source, repo_name, link_format='markdown')
1836 source, repo_name, link_format='markdown')
1832
1837
1833 return literal(
1838 return literal(
1834 '<div class="markdown-block">%s</div>' %
1839 '<div class="markdown-block">%s</div>' %
1835 maybe_convert_relative_links(
1840 maybe_convert_relative_links(
1836 MarkupRenderer.markdown(source, flavored=True,
1841 MarkupRenderer.markdown(source, flavored=True,
1837 mentions=mentions)))
1842 mentions=mentions)))
1843
1838 elif renderer == 'jupyter':
1844 elif renderer == 'jupyter':
1839 return literal(
1845 return literal(
1840 '<div class="ipynb">%s</div>' %
1846 '<div class="ipynb">%s</div>' %
1841 maybe_convert_relative_links(
1847 maybe_convert_relative_links(
1842 MarkupRenderer.jupyter(source)))
1848 MarkupRenderer.jupyter(source)))
1843
1849
1844 # None means just show the file-source
1850 # None means just show the file-source
1845 return None
1851 return None
1846
1852
1847
1853
1848 def commit_status(repo, commit_id):
1854 def commit_status(repo, commit_id):
1849 return ChangesetStatusModel().get_status(repo, commit_id)
1855 return ChangesetStatusModel().get_status(repo, commit_id)
1850
1856
1851
1857
1852 def commit_status_lbl(commit_status):
1858 def commit_status_lbl(commit_status):
1853 return dict(ChangesetStatus.STATUSES).get(commit_status)
1859 return dict(ChangesetStatus.STATUSES).get(commit_status)
1854
1860
1855
1861
1856 def commit_time(repo_name, commit_id):
1862 def commit_time(repo_name, commit_id):
1857 repo = Repository.get_by_repo_name(repo_name)
1863 repo = Repository.get_by_repo_name(repo_name)
1858 commit = repo.get_commit(commit_id=commit_id)
1864 commit = repo.get_commit(commit_id=commit_id)
1859 return commit.date
1865 return commit.date
1860
1866
1861
1867
1862 def get_permission_name(key):
1868 def get_permission_name(key):
1863 return dict(Permission.PERMS).get(key)
1869 return dict(Permission.PERMS).get(key)
1864
1870
1865
1871
1866 def journal_filter_help(request):
1872 def journal_filter_help(request):
1867 _ = request.translate
1873 _ = request.translate
1868 from rhodecode.lib.audit_logger import ACTIONS
1874 from rhodecode.lib.audit_logger import ACTIONS
1869 actions = '\n'.join(textwrap.wrap(', '.join(sorted(ACTIONS.keys())), 80))
1875 actions = '\n'.join(textwrap.wrap(', '.join(sorted(ACTIONS.keys())), 80))
1870
1876
1871 return _(
1877 return _(
1872 'Example filter terms:\n' +
1878 'Example filter terms:\n' +
1873 ' repository:vcs\n' +
1879 ' repository:vcs\n' +
1874 ' username:marcin\n' +
1880 ' username:marcin\n' +
1875 ' username:(NOT marcin)\n' +
1881 ' username:(NOT marcin)\n' +
1876 ' action:*push*\n' +
1882 ' action:*push*\n' +
1877 ' ip:127.0.0.1\n' +
1883 ' ip:127.0.0.1\n' +
1878 ' date:20120101\n' +
1884 ' date:20120101\n' +
1879 ' date:[20120101100000 TO 20120102]\n' +
1885 ' date:[20120101100000 TO 20120102]\n' +
1880 '\n' +
1886 '\n' +
1881 'Actions: {actions}\n' +
1887 'Actions: {actions}\n' +
1882 '\n' +
1888 '\n' +
1883 'Generate wildcards using \'*\' character:\n' +
1889 'Generate wildcards using \'*\' character:\n' +
1884 ' "repository:vcs*" - search everything starting with \'vcs\'\n' +
1890 ' "repository:vcs*" - search everything starting with \'vcs\'\n' +
1885 ' "repository:*vcs*" - search for repository containing \'vcs\'\n' +
1891 ' "repository:*vcs*" - search for repository containing \'vcs\'\n' +
1886 '\n' +
1892 '\n' +
1887 'Optional AND / OR operators in queries\n' +
1893 'Optional AND / OR operators in queries\n' +
1888 ' "repository:vcs OR repository:test"\n' +
1894 ' "repository:vcs OR repository:test"\n' +
1889 ' "username:test AND repository:test*"\n'
1895 ' "username:test AND repository:test*"\n'
1890 ).format(actions=actions)
1896 ).format(actions=actions)
1891
1897
1892
1898
1893 def search_filter_help(searcher, request):
1899 def search_filter_help(searcher, request):
1894 _ = request.translate
1900 _ = request.translate
1895
1901
1896 terms = ''
1902 terms = ''
1897 return _(
1903 return _(
1898 'Example filter terms for `{searcher}` search:\n' +
1904 'Example filter terms for `{searcher}` search:\n' +
1899 '{terms}\n' +
1905 '{terms}\n' +
1900 'Generate wildcards using \'*\' character:\n' +
1906 'Generate wildcards using \'*\' character:\n' +
1901 ' "repo_name:vcs*" - search everything starting with \'vcs\'\n' +
1907 ' "repo_name:vcs*" - search everything starting with \'vcs\'\n' +
1902 ' "repo_name:*vcs*" - search for repository containing \'vcs\'\n' +
1908 ' "repo_name:*vcs*" - search for repository containing \'vcs\'\n' +
1903 '\n' +
1909 '\n' +
1904 'Optional AND / OR operators in queries\n' +
1910 'Optional AND / OR operators in queries\n' +
1905 ' "repo_name:vcs OR repo_name:test"\n' +
1911 ' "repo_name:vcs OR repo_name:test"\n' +
1906 ' "owner:test AND repo_name:test*"\n' +
1912 ' "owner:test AND repo_name:test*"\n' +
1907 'More: {search_doc}'
1913 'More: {search_doc}'
1908 ).format(searcher=searcher.name,
1914 ).format(searcher=searcher.name,
1909 terms=terms, search_doc=searcher.query_lang_doc)
1915 terms=terms, search_doc=searcher.query_lang_doc)
1910
1916
1911
1917
1912 def not_mapped_error(repo_name):
1918 def not_mapped_error(repo_name):
1913 from rhodecode.translation import _
1919 from rhodecode.translation import _
1914 flash(_('%s repository is not mapped to db perhaps'
1920 flash(_('%s repository is not mapped to db perhaps'
1915 ' it was created or renamed from the filesystem'
1921 ' it was created or renamed from the filesystem'
1916 ' please run the application again'
1922 ' please run the application again'
1917 ' in order to rescan repositories') % repo_name, category='error')
1923 ' in order to rescan repositories') % repo_name, category='error')
1918
1924
1919
1925
1920 def ip_range(ip_addr):
1926 def ip_range(ip_addr):
1921 from rhodecode.model.db import UserIpMap
1927 from rhodecode.model.db import UserIpMap
1922 s, e = UserIpMap._get_ip_range(ip_addr)
1928 s, e = UserIpMap._get_ip_range(ip_addr)
1923 return '%s - %s' % (s, e)
1929 return '%s - %s' % (s, e)
1924
1930
1925
1931
1926 def form(url, method='post', needs_csrf_token=True, **attrs):
1932 def form(url, method='post', needs_csrf_token=True, **attrs):
1927 """Wrapper around webhelpers.tags.form to prevent CSRF attacks."""
1933 """Wrapper around webhelpers.tags.form to prevent CSRF attacks."""
1928 if method.lower() != 'get' and needs_csrf_token:
1934 if method.lower() != 'get' and needs_csrf_token:
1929 raise Exception(
1935 raise Exception(
1930 'Forms to POST/PUT/DELETE endpoints should have (in general) a ' +
1936 'Forms to POST/PUT/DELETE endpoints should have (in general) a ' +
1931 'CSRF token. If the endpoint does not require such token you can ' +
1937 'CSRF token. If the endpoint does not require such token you can ' +
1932 'explicitly set the parameter needs_csrf_token to false.')
1938 'explicitly set the parameter needs_csrf_token to false.')
1933
1939
1934 return wh_form(url, method=method, **attrs)
1940 return wh_form(url, method=method, **attrs)
1935
1941
1936
1942
1937 def secure_form(form_url, method="POST", multipart=False, **attrs):
1943 def secure_form(form_url, method="POST", multipart=False, **attrs):
1938 """Start a form tag that points the action to an url. This
1944 """Start a form tag that points the action to an url. This
1939 form tag will also include the hidden field containing
1945 form tag will also include the hidden field containing
1940 the auth token.
1946 the auth token.
1941
1947
1942 The url options should be given either as a string, or as a
1948 The url options should be given either as a string, or as a
1943 ``url()`` function. The method for the form defaults to POST.
1949 ``url()`` function. The method for the form defaults to POST.
1944
1950
1945 Options:
1951 Options:
1946
1952
1947 ``multipart``
1953 ``multipart``
1948 If set to True, the enctype is set to "multipart/form-data".
1954 If set to True, the enctype is set to "multipart/form-data".
1949 ``method``
1955 ``method``
1950 The method to use when submitting the form, usually either
1956 The method to use when submitting the form, usually either
1951 "GET" or "POST". If "PUT", "DELETE", or another verb is used, a
1957 "GET" or "POST". If "PUT", "DELETE", or another verb is used, a
1952 hidden input with name _method is added to simulate the verb
1958 hidden input with name _method is added to simulate the verb
1953 over POST.
1959 over POST.
1954
1960
1955 """
1961 """
1956 from webhelpers.pylonslib.secure_form import insecure_form
1962 from webhelpers.pylonslib.secure_form import insecure_form
1957
1963
1958 if 'request' in attrs:
1964 if 'request' in attrs:
1959 session = attrs['request'].session
1965 session = attrs['request'].session
1960 del attrs['request']
1966 del attrs['request']
1961 else:
1967 else:
1962 raise ValueError(
1968 raise ValueError(
1963 'Calling this form requires request= to be passed as argument')
1969 'Calling this form requires request= to be passed as argument')
1964
1970
1965 form = insecure_form(form_url, method, multipart, **attrs)
1971 form = insecure_form(form_url, method, multipart, **attrs)
1966 token = literal(
1972 token = literal(
1967 '<input type="hidden" id="{}" name="{}" value="{}">'.format(
1973 '<input type="hidden" id="{}" name="{}" value="{}">'.format(
1968 csrf_token_key, csrf_token_key, get_csrf_token(session)))
1974 csrf_token_key, csrf_token_key, get_csrf_token(session)))
1969
1975
1970 return literal("%s\n%s" % (form, token))
1976 return literal("%s\n%s" % (form, token))
1971
1977
1972
1978
1973 def dropdownmenu(name, selected, options, enable_filter=False, **attrs):
1979 def dropdownmenu(name, selected, options, enable_filter=False, **attrs):
1974 select_html = select(name, selected, options, **attrs)
1980 select_html = select(name, selected, options, **attrs)
1975 select2 = """
1981 select2 = """
1976 <script>
1982 <script>
1977 $(document).ready(function() {
1983 $(document).ready(function() {
1978 $('#%s').select2({
1984 $('#%s').select2({
1979 containerCssClass: 'drop-menu',
1985 containerCssClass: 'drop-menu',
1980 dropdownCssClass: 'drop-menu-dropdown',
1986 dropdownCssClass: 'drop-menu-dropdown',
1981 dropdownAutoWidth: true%s
1987 dropdownAutoWidth: true%s
1982 });
1988 });
1983 });
1989 });
1984 </script>
1990 </script>
1985 """
1991 """
1986 filter_option = """,
1992 filter_option = """,
1987 minimumResultsForSearch: -1
1993 minimumResultsForSearch: -1
1988 """
1994 """
1989 input_id = attrs.get('id') or name
1995 input_id = attrs.get('id') or name
1990 filter_enabled = "" if enable_filter else filter_option
1996 filter_enabled = "" if enable_filter else filter_option
1991 select_script = literal(select2 % (input_id, filter_enabled))
1997 select_script = literal(select2 % (input_id, filter_enabled))
1992
1998
1993 return literal(select_html+select_script)
1999 return literal(select_html+select_script)
1994
2000
1995
2001
1996 def get_visual_attr(tmpl_context_var, attr_name):
2002 def get_visual_attr(tmpl_context_var, attr_name):
1997 """
2003 """
1998 A safe way to get a variable from visual variable of template context
2004 A safe way to get a variable from visual variable of template context
1999
2005
2000 :param tmpl_context_var: instance of tmpl_context, usually present as `c`
2006 :param tmpl_context_var: instance of tmpl_context, usually present as `c`
2001 :param attr_name: name of the attribute we fetch from the c.visual
2007 :param attr_name: name of the attribute we fetch from the c.visual
2002 """
2008 """
2003 visual = getattr(tmpl_context_var, 'visual', None)
2009 visual = getattr(tmpl_context_var, 'visual', None)
2004 if not visual:
2010 if not visual:
2005 return
2011 return
2006 else:
2012 else:
2007 return getattr(visual, attr_name, None)
2013 return getattr(visual, attr_name, None)
2008
2014
2009
2015
2010 def get_last_path_part(file_node):
2016 def get_last_path_part(file_node):
2011 if not file_node.path:
2017 if not file_node.path:
2012 return u''
2018 return u''
2013
2019
2014 path = safe_unicode(file_node.path.split('/')[-1])
2020 path = safe_unicode(file_node.path.split('/')[-1])
2015 return u'../' + path
2021 return u'../' + path
2016
2022
2017
2023
2018 def route_url(*args, **kwargs):
2024 def route_url(*args, **kwargs):
2019 """
2025 """
2020 Wrapper around pyramids `route_url` (fully qualified url) function.
2026 Wrapper around pyramids `route_url` (fully qualified url) function.
2021 """
2027 """
2022 req = get_current_request()
2028 req = get_current_request()
2023 return req.route_url(*args, **kwargs)
2029 return req.route_url(*args, **kwargs)
2024
2030
2025
2031
2026 def route_path(*args, **kwargs):
2032 def route_path(*args, **kwargs):
2027 """
2033 """
2028 Wrapper around pyramids `route_path` function.
2034 Wrapper around pyramids `route_path` function.
2029 """
2035 """
2030 req = get_current_request()
2036 req = get_current_request()
2031 return req.route_path(*args, **kwargs)
2037 return req.route_path(*args, **kwargs)
2032
2038
2033
2039
2034 def route_path_or_none(*args, **kwargs):
2040 def route_path_or_none(*args, **kwargs):
2035 try:
2041 try:
2036 return route_path(*args, **kwargs)
2042 return route_path(*args, **kwargs)
2037 except KeyError:
2043 except KeyError:
2038 return None
2044 return None
2039
2045
2040
2046
2041 def current_route_path(request, **kw):
2047 def current_route_path(request, **kw):
2042 new_args = request.GET.mixed()
2048 new_args = request.GET.mixed()
2043 new_args.update(kw)
2049 new_args.update(kw)
2044 return request.current_route_path(_query=new_args)
2050 return request.current_route_path(_query=new_args)
2045
2051
2046
2052
2047 def api_call_example(method, args):
2053 def api_call_example(method, args):
2048 """
2054 """
2049 Generates an API call example via CURL
2055 Generates an API call example via CURL
2050 """
2056 """
2051 args_json = json.dumps(OrderedDict([
2057 args_json = json.dumps(OrderedDict([
2052 ('id', 1),
2058 ('id', 1),
2053 ('auth_token', 'SECRET'),
2059 ('auth_token', 'SECRET'),
2054 ('method', method),
2060 ('method', method),
2055 ('args', args)
2061 ('args', args)
2056 ]))
2062 ]))
2057 return literal(
2063 return literal(
2058 "curl {api_url} -X POST -H 'content-type:text/plain' --data-binary '{data}'"
2064 "curl {api_url} -X POST -H 'content-type:text/plain' --data-binary '{data}'"
2059 "<br/><br/>SECRET can be found in <a href=\"{token_url}\">auth-tokens</a> page, "
2065 "<br/><br/>SECRET can be found in <a href=\"{token_url}\">auth-tokens</a> page, "
2060 "and needs to be of `api calls` role."
2066 "and needs to be of `api calls` role."
2061 .format(
2067 .format(
2062 api_url=route_url('apiv2'),
2068 api_url=route_url('apiv2'),
2063 token_url=route_url('my_account_auth_tokens'),
2069 token_url=route_url('my_account_auth_tokens'),
2064 data=args_json))
2070 data=args_json))
2065
2071
2066
2072
2067 def notification_description(notification, request):
2073 def notification_description(notification, request):
2068 """
2074 """
2069 Generate notification human readable description based on notification type
2075 Generate notification human readable description based on notification type
2070 """
2076 """
2071 from rhodecode.model.notification import NotificationModel
2077 from rhodecode.model.notification import NotificationModel
2072 return NotificationModel().make_description(
2078 return NotificationModel().make_description(
2073 notification, translate=request.translate)
2079 notification, translate=request.translate)
2074
2080
2075
2081
2076 def go_import_header(request, db_repo=None):
2082 def go_import_header(request, db_repo=None):
2077 """
2083 """
2078 Creates a header for go-import functionality in Go Lang
2084 Creates a header for go-import functionality in Go Lang
2079 """
2085 """
2080
2086
2081 if not db_repo:
2087 if not db_repo:
2082 return
2088 return
2083 if 'go-get' not in request.GET:
2089 if 'go-get' not in request.GET:
2084 return
2090 return
2085
2091
2086 clone_url = db_repo.clone_url()
2092 clone_url = db_repo.clone_url()
2087 prefix = re.split(r'^https?:\/\/', clone_url)[-1]
2093 prefix = re.split(r'^https?:\/\/', clone_url)[-1]
2088 # we have a repo and go-get flag,
2094 # we have a repo and go-get flag,
2089 return literal('<meta name="go-import" content="{} {} {}">'.format(
2095 return literal('<meta name="go-import" content="{} {} {}">'.format(
2090 prefix, db_repo.repo_type, clone_url))
2096 prefix, db_repo.repo_type, clone_url))
2091
2097
2092
2098
2093 def reviewer_as_json(*args, **kwargs):
2099 def reviewer_as_json(*args, **kwargs):
2094 from rhodecode.apps.repository.utils import reviewer_as_json as _reviewer_as_json
2100 from rhodecode.apps.repository.utils import reviewer_as_json as _reviewer_as_json
2095 return _reviewer_as_json(*args, **kwargs)
2101 return _reviewer_as_json(*args, **kwargs)
@@ -1,515 +1,519 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2018 RhodeCode GmbH
3 # Copyright (C) 2011-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 Renderer for markup languages with ability to parse using rst or markdown
23 Renderer for markup languages with ability to parse using rst or markdown
24 """
24 """
25
25
26 import re
26 import re
27 import os
27 import os
28 import lxml
28 import lxml
29 import logging
29 import logging
30 import urlparse
30 import urlparse
31 import bleach
31 import bleach
32
32
33 from mako.lookup import TemplateLookup
33 from mako.lookup import TemplateLookup
34 from mako.template import Template as MakoTemplate
34 from mako.template import Template as MakoTemplate
35
35
36 from docutils.core import publish_parts
36 from docutils.core import publish_parts
37 from docutils.parsers.rst import directives
37 from docutils.parsers.rst import directives
38 from docutils import writers
38 from docutils import writers
39 from docutils.writers import html4css1
39 from docutils.writers import html4css1
40 import markdown
40 import markdown
41
41
42 from rhodecode.lib.markdown_ext import GithubFlavoredMarkdownExtension
42 from rhodecode.lib.markdown_ext import GithubFlavoredMarkdownExtension
43 from rhodecode.lib.utils2 import (
43 from rhodecode.lib.utils2 import (
44 safe_str, safe_unicode, md5_safe, MENTIONS_REGEX)
44 safe_str, safe_unicode, md5_safe, MENTIONS_REGEX)
45
45
46 log = logging.getLogger(__name__)
46 log = logging.getLogger(__name__)
47
47
48 # default renderer used to generate automated comments
48 # default renderer used to generate automated comments
49 DEFAULT_COMMENTS_RENDERER = 'rst'
49 DEFAULT_COMMENTS_RENDERER = 'rst'
50
50
51
51
52 class CustomHTMLTranslator(writers.html4css1.HTMLTranslator):
52 class CustomHTMLTranslator(writers.html4css1.HTMLTranslator):
53 """
53 """
54 Custom HTML Translator used for sandboxing potential
54 Custom HTML Translator used for sandboxing potential
55 JS injections in ref links
55 JS injections in ref links
56 """
56 """
57
57
58 def visit_reference(self, node):
58 def visit_reference(self, node):
59 if 'refuri' in node.attributes:
59 if 'refuri' in node.attributes:
60 refuri = node['refuri']
60 refuri = node['refuri']
61 if ':' in refuri:
61 if ':' in refuri:
62 prefix, link = refuri.lstrip().split(':', 1)
62 prefix, link = refuri.lstrip().split(':', 1)
63 if prefix == 'javascript':
63 if prefix == 'javascript':
64 # we don't allow javascript type of refs...
64 # we don't allow javascript type of refs...
65 node['refuri'] = 'javascript:alert("SandBoxedJavascript")'
65 node['refuri'] = 'javascript:alert("SandBoxedJavascript")'
66
66
67 # old style class requires this...
67 # old style class requires this...
68 return html4css1.HTMLTranslator.visit_reference(self, node)
68 return html4css1.HTMLTranslator.visit_reference(self, node)
69
69
70
70
71 class RhodeCodeWriter(writers.html4css1.Writer):
71 class RhodeCodeWriter(writers.html4css1.Writer):
72 def __init__(self):
72 def __init__(self):
73 writers.Writer.__init__(self)
73 writers.Writer.__init__(self)
74 self.translator_class = CustomHTMLTranslator
74 self.translator_class = CustomHTMLTranslator
75
75
76
76
77 def relative_links(html_source, server_paths):
77 def relative_links(html_source, server_paths):
78 if not html_source:
78 if not html_source:
79 return html_source
79 return html_source
80
80
81 try:
81 try:
82 from lxml.html import fromstring
82 from lxml.html import fromstring
83 from lxml.html import tostring
83 from lxml.html import tostring
84 except ImportError:
84 except ImportError:
85 log.exception('Failed to import lxml')
85 log.exception('Failed to import lxml')
86 return html_source
86 return html_source
87
87
88 try:
88 try:
89 doc = lxml.html.fromstring(html_source)
89 doc = lxml.html.fromstring(html_source)
90 except Exception:
90 except Exception:
91 return html_source
91 return html_source
92
92
93 for el in doc.cssselect('img, video'):
93 for el in doc.cssselect('img, video'):
94 src = el.attrib.get('src')
94 src = el.attrib.get('src')
95 if src:
95 if src:
96 el.attrib['src'] = relative_path(src, server_paths['raw'])
96 el.attrib['src'] = relative_path(src, server_paths['raw'])
97
97
98 for el in doc.cssselect('a:not(.gfm)'):
98 for el in doc.cssselect('a:not(.gfm)'):
99 src = el.attrib.get('href')
99 src = el.attrib.get('href')
100 if src:
100 if src:
101 raw_mode = el.attrib['href'].endswith('?raw=1')
101 raw_mode = el.attrib['href'].endswith('?raw=1')
102 if raw_mode:
102 if raw_mode:
103 el.attrib['href'] = relative_path(src, server_paths['raw'])
103 el.attrib['href'] = relative_path(src, server_paths['raw'])
104 else:
104 else:
105 el.attrib['href'] = relative_path(src, server_paths['standard'])
105 el.attrib['href'] = relative_path(src, server_paths['standard'])
106
106
107 return lxml.html.tostring(doc)
107 return lxml.html.tostring(doc)
108
108
109
109
110 def relative_path(path, request_path, is_repo_file=None):
110 def relative_path(path, request_path, is_repo_file=None):
111 """
111 """
112 relative link support, path is a rel path, and request_path is current
112 relative link support, path is a rel path, and request_path is current
113 server path (not absolute)
113 server path (not absolute)
114
114
115 e.g.
115 e.g.
116
116
117 path = '../logo.png'
117 path = '../logo.png'
118 request_path= '/repo/files/path/file.md'
118 request_path= '/repo/files/path/file.md'
119 produces: '/repo/files/logo.png'
119 produces: '/repo/files/logo.png'
120 """
120 """
121 # TODO(marcink): unicode/str support ?
121 # TODO(marcink): unicode/str support ?
122 # maybe=> safe_unicode(urllib.quote(safe_str(final_path), '/:'))
122 # maybe=> safe_unicode(urllib.quote(safe_str(final_path), '/:'))
123
123
124 def dummy_check(p):
124 def dummy_check(p):
125 return True # assume default is a valid file path
125 return True # assume default is a valid file path
126
126
127 is_repo_file = is_repo_file or dummy_check
127 is_repo_file = is_repo_file or dummy_check
128 if not path:
128 if not path:
129 return request_path
129 return request_path
130
130
131 path = safe_unicode(path)
131 path = safe_unicode(path)
132 request_path = safe_unicode(request_path)
132 request_path = safe_unicode(request_path)
133
133
134 if path.startswith((u'data:', u'javascript:', u'#', u':')):
134 if path.startswith((u'data:', u'javascript:', u'#', u':')):
135 # skip data, anchor, invalid links
135 # skip data, anchor, invalid links
136 return path
136 return path
137
137
138 is_absolute = bool(urlparse.urlparse(path).netloc)
138 is_absolute = bool(urlparse.urlparse(path).netloc)
139 if is_absolute:
139 if is_absolute:
140 return path
140 return path
141
141
142 if not request_path:
142 if not request_path:
143 return path
143 return path
144
144
145 if path.startswith(u'/'):
145 if path.startswith(u'/'):
146 path = path[1:]
146 path = path[1:]
147
147
148 if path.startswith(u'./'):
148 if path.startswith(u'./'):
149 path = path[2:]
149 path = path[2:]
150
150
151 parts = request_path.split('/')
151 parts = request_path.split('/')
152 # compute how deep we need to traverse the request_path
152 # compute how deep we need to traverse the request_path
153 depth = 0
153 depth = 0
154
154
155 if is_repo_file(request_path):
155 if is_repo_file(request_path):
156 # if request path is a VALID file, we use a relative path with
156 # if request path is a VALID file, we use a relative path with
157 # one level up
157 # one level up
158 depth += 1
158 depth += 1
159
159
160 while path.startswith(u'../'):
160 while path.startswith(u'../'):
161 depth += 1
161 depth += 1
162 path = path[3:]
162 path = path[3:]
163
163
164 if depth > 0:
164 if depth > 0:
165 parts = parts[:-depth]
165 parts = parts[:-depth]
166
166
167 parts.append(path)
167 parts.append(path)
168 final_path = u'/'.join(parts).lstrip(u'/')
168 final_path = u'/'.join(parts).lstrip(u'/')
169
169
170 return u'/' + final_path
170 return u'/' + final_path
171
171
172
172
173 class MarkupRenderer(object):
173 class MarkupRenderer(object):
174 RESTRUCTUREDTEXT_DISALLOWED_DIRECTIVES = ['include', 'meta', 'raw']
174 RESTRUCTUREDTEXT_DISALLOWED_DIRECTIVES = ['include', 'meta', 'raw']
175
175
176 MARKDOWN_PAT = re.compile(r'\.(md|mkdn?|mdown|markdown)$', re.IGNORECASE)
176 MARKDOWN_PAT = re.compile(r'\.(md|mkdn?|mdown|markdown)$', re.IGNORECASE)
177 RST_PAT = re.compile(r'\.re?st$', re.IGNORECASE)
177 RST_PAT = re.compile(r'\.re?st$', re.IGNORECASE)
178 JUPYTER_PAT = re.compile(r'\.(ipynb)$', re.IGNORECASE)
178 JUPYTER_PAT = re.compile(r'\.(ipynb)$', re.IGNORECASE)
179 PLAIN_PAT = re.compile(r'^readme$', re.IGNORECASE)
179 PLAIN_PAT = re.compile(r'^readme$', re.IGNORECASE)
180
180
181 URL_PAT = re.compile(r'(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]'
181 URL_PAT = re.compile(r'(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]'
182 r'|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)')
182 r'|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)')
183
183
184 extensions = ['codehilite', 'extra', 'def_list', 'sane_lists']
184 extensions = ['codehilite', 'extra', 'def_list', 'sane_lists']
185 output_format = 'html4'
185 output_format = 'html4'
186 markdown_renderer = markdown.Markdown(
186 markdown_renderer = markdown.Markdown(
187 extensions, enable_attributes=False, output_format=output_format)
187 extensions, enable_attributes=False, output_format=output_format)
188
188
189 markdown_renderer_flavored = markdown.Markdown(
189 markdown_renderer_flavored = markdown.Markdown(
190 extensions + [GithubFlavoredMarkdownExtension()],
190 extensions + [GithubFlavoredMarkdownExtension()],
191 enable_attributes=False, output_format=output_format)
191 enable_attributes=False, output_format=output_format)
192
192
193 # extension together with weights. Lower is first means we control how
193 # extension together with weights. Lower is first means we control how
194 # extensions are attached to readme names with those.
194 # extensions are attached to readme names with those.
195 PLAIN_EXTS = [
195 PLAIN_EXTS = [
196 # prefer no extension
196 # prefer no extension
197 ('', 0), # special case that renders READMES names without extension
197 ('', 0), # special case that renders READMES names without extension
198 ('.text', 2), ('.TEXT', 2),
198 ('.text', 2), ('.TEXT', 2),
199 ('.txt', 3), ('.TXT', 3)
199 ('.txt', 3), ('.TXT', 3)
200 ]
200 ]
201
201
202 RST_EXTS = [
202 RST_EXTS = [
203 ('.rst', 1), ('.rest', 1),
203 ('.rst', 1), ('.rest', 1),
204 ('.RST', 2), ('.REST', 2)
204 ('.RST', 2), ('.REST', 2)
205 ]
205 ]
206
206
207 MARKDOWN_EXTS = [
207 MARKDOWN_EXTS = [
208 ('.md', 1), ('.MD', 1),
208 ('.md', 1), ('.MD', 1),
209 ('.mkdn', 2), ('.MKDN', 2),
209 ('.mkdn', 2), ('.MKDN', 2),
210 ('.mdown', 3), ('.MDOWN', 3),
210 ('.mdown', 3), ('.MDOWN', 3),
211 ('.markdown', 4), ('.MARKDOWN', 4)
211 ('.markdown', 4), ('.MARKDOWN', 4)
212 ]
212 ]
213
213
214 def _detect_renderer(self, source, filename=None):
214 def _detect_renderer(self, source, filename=None):
215 """
215 """
216 runs detection of what renderer should be used for generating html
216 runs detection of what renderer should be used for generating html
217 from a markup language
217 from a markup language
218
218
219 filename can be also explicitly a renderer name
219 filename can be also explicitly a renderer name
220
220
221 :param source:
221 :param source:
222 :param filename:
222 :param filename:
223 """
223 """
224
224
225 if MarkupRenderer.MARKDOWN_PAT.findall(filename):
225 if MarkupRenderer.MARKDOWN_PAT.findall(filename):
226 detected_renderer = 'markdown'
226 detected_renderer = 'markdown'
227 elif MarkupRenderer.RST_PAT.findall(filename):
227 elif MarkupRenderer.RST_PAT.findall(filename):
228 detected_renderer = 'rst'
228 detected_renderer = 'rst'
229 elif MarkupRenderer.JUPYTER_PAT.findall(filename):
229 elif MarkupRenderer.JUPYTER_PAT.findall(filename):
230 detected_renderer = 'jupyter'
230 detected_renderer = 'jupyter'
231 elif MarkupRenderer.PLAIN_PAT.findall(filename):
231 elif MarkupRenderer.PLAIN_PAT.findall(filename):
232 detected_renderer = 'plain'
232 detected_renderer = 'plain'
233 else:
233 else:
234 detected_renderer = 'plain'
234 detected_renderer = 'plain'
235
235
236 return getattr(MarkupRenderer, detected_renderer)
236 return getattr(MarkupRenderer, detected_renderer)
237
237
238 @classmethod
238 @classmethod
239 def bleach_clean(cls, text):
239 def bleach_clean(cls, text):
240 from .bleach_whitelist import markdown_attrs, markdown_tags
240 from .bleach_whitelist import markdown_attrs, markdown_tags
241 allowed_tags = markdown_tags
241 allowed_tags = markdown_tags
242 allowed_attrs = markdown_attrs
242 allowed_attrs = markdown_attrs
243 return bleach.clean(text, tags=allowed_tags, attributes=allowed_attrs)
243 return bleach.clean(text, tags=allowed_tags, attributes=allowed_attrs)
244
244
245 @classmethod
245 @classmethod
246 def renderer_from_filename(cls, filename, exclude):
246 def renderer_from_filename(cls, filename, exclude):
247 """
247 """
248 Detect renderer markdown/rst from filename and optionally use exclude
248 Detect renderer markdown/rst from filename and optionally use exclude
249 list to remove some options. This is mostly used in helpers.
249 list to remove some options. This is mostly used in helpers.
250 Returns None when no renderer can be detected.
250 Returns None when no renderer can be detected.
251 """
251 """
252 def _filter(elements):
252 def _filter(elements):
253 if isinstance(exclude, (list, tuple)):
253 if isinstance(exclude, (list, tuple)):
254 return [x for x in elements if x not in exclude]
254 return [x for x in elements if x not in exclude]
255 return elements
255 return elements
256
256
257 if filename.endswith(
257 if filename.endswith(
258 tuple(_filter([x[0] for x in cls.MARKDOWN_EXTS if x[0]]))):
258 tuple(_filter([x[0] for x in cls.MARKDOWN_EXTS if x[0]]))):
259 return 'markdown'
259 return 'markdown'
260 if filename.endswith(tuple(_filter([x[0] for x in cls.RST_EXTS if x[0]]))):
260 if filename.endswith(tuple(_filter([x[0] for x in cls.RST_EXTS if x[0]]))):
261 return 'rst'
261 return 'rst'
262
262
263 return None
263 return None
264
264
265 def render(self, source, filename=None):
265 def render(self, source, filename=None):
266 """
266 """
267 Renders a given filename using detected renderer
267 Renders a given filename using detected renderer
268 it detects renderers based on file extension or mimetype.
268 it detects renderers based on file extension or mimetype.
269 At last it will just do a simple html replacing new lines with <br/>
269 At last it will just do a simple html replacing new lines with <br/>
270
270
271 :param file_name:
271 :param file_name:
272 :param source:
272 :param source:
273 """
273 """
274
274
275 renderer = self._detect_renderer(source, filename)
275 renderer = self._detect_renderer(source, filename)
276 readme_data = renderer(source)
276 readme_data = renderer(source)
277 return readme_data
277 return readme_data
278
278
279 @classmethod
279 @classmethod
280 def _flavored_markdown(cls, text):
280 def _flavored_markdown(cls, text):
281 """
281 """
282 Github style flavored markdown
282 Github style flavored markdown
283
283
284 :param text:
284 :param text:
285 """
285 """
286
286
287 # Extract pre blocks.
287 # Extract pre blocks.
288 extractions = {}
288 extractions = {}
289
289
290 def pre_extraction_callback(matchobj):
290 def pre_extraction_callback(matchobj):
291 digest = md5_safe(matchobj.group(0))
291 digest = md5_safe(matchobj.group(0))
292 extractions[digest] = matchobj.group(0)
292 extractions[digest] = matchobj.group(0)
293 return "{gfm-extraction-%s}" % digest
293 return "{gfm-extraction-%s}" % digest
294 pattern = re.compile(r'<pre>.*?</pre>', re.MULTILINE | re.DOTALL)
294 pattern = re.compile(r'<pre>.*?</pre>', re.MULTILINE | re.DOTALL)
295 text = re.sub(pattern, pre_extraction_callback, text)
295 text = re.sub(pattern, pre_extraction_callback, text)
296
296
297 # Prevent foo_bar_baz from ending up with an italic word in the middle.
297 # Prevent foo_bar_baz from ending up with an italic word in the middle.
298 def italic_callback(matchobj):
298 def italic_callback(matchobj):
299 s = matchobj.group(0)
299 s = matchobj.group(0)
300 if list(s).count('_') >= 2:
300 if list(s).count('_') >= 2:
301 return s.replace('_', r'\_')
301 return s.replace('_', r'\_')
302 return s
302 return s
303 text = re.sub(r'^(?! {4}|\t)\w+_\w+_\w[\w_]*', italic_callback, text)
303 text = re.sub(r'^(?! {4}|\t)\w+_\w+_\w[\w_]*', italic_callback, text)
304
304
305 # Insert pre block extractions.
305 # Insert pre block extractions.
306 def pre_insert_callback(matchobj):
306 def pre_insert_callback(matchobj):
307 return '\n\n' + extractions[matchobj.group(1)]
307 return '\n\n' + extractions[matchobj.group(1)]
308 text = re.sub(r'\{gfm-extraction-([0-9a-f]{32})\}',
308 text = re.sub(r'\{gfm-extraction-([0-9a-f]{32})\}',
309 pre_insert_callback, text)
309 pre_insert_callback, text)
310
310
311 return text
311 return text
312
312
313 @classmethod
313 @classmethod
314 def urlify_text(cls, text):
314 def urlify_text(cls, text):
315 def url_func(match_obj):
315 def url_func(match_obj):
316 url_full = match_obj.groups()[0]
316 url_full = match_obj.groups()[0]
317 return '<a href="%(url)s">%(url)s</a>' % ({'url': url_full})
317 return '<a href="%(url)s">%(url)s</a>' % ({'url': url_full})
318
318
319 return cls.URL_PAT.sub(url_func, text)
319 return cls.URL_PAT.sub(url_func, text)
320
320
321 @classmethod
321 @classmethod
322 def plain(cls, source, universal_newline=True):
322 def plain(cls, source, universal_newline=True, leading_newline=True):
323 source = safe_unicode(source)
323 source = safe_unicode(source)
324 if universal_newline:
324 if universal_newline:
325 newline = '\n'
325 newline = '\n'
326 source = newline.join(source.splitlines())
326 source = newline.join(source.splitlines())
327
327
328 source = cls.urlify_text(source)
328 rendered_source = cls.urlify_text(source)
329 return '<br />' + source.replace("\n", '<br />')
329 source = ''
330 if leading_newline:
331 source += '<br />'
332 source += rendered_source.replace("\n", '<br />')
333 return source
330
334
331 @classmethod
335 @classmethod
332 def markdown(cls, source, safe=True, flavored=True, mentions=False,
336 def markdown(cls, source, safe=True, flavored=True, mentions=False,
333 clean_html=True):
337 clean_html=True):
334 """
338 """
335 returns markdown rendered code cleaned by the bleach library
339 returns markdown rendered code cleaned by the bleach library
336 """
340 """
337
341
338 if flavored:
342 if flavored:
339 markdown_renderer = cls.markdown_renderer_flavored
343 markdown_renderer = cls.markdown_renderer_flavored
340 else:
344 else:
341 markdown_renderer = cls.markdown_renderer
345 markdown_renderer = cls.markdown_renderer
342
346
343 if mentions:
347 if mentions:
344 mention_pat = re.compile(MENTIONS_REGEX)
348 mention_pat = re.compile(MENTIONS_REGEX)
345
349
346 def wrapp(match_obj):
350 def wrapp(match_obj):
347 uname = match_obj.groups()[0]
351 uname = match_obj.groups()[0]
348 return ' **@%(uname)s** ' % {'uname': uname}
352 return ' **@%(uname)s** ' % {'uname': uname}
349 mention_hl = mention_pat.sub(wrapp, source).strip()
353 mention_hl = mention_pat.sub(wrapp, source).strip()
350 # we extracted mentions render with this using Mentions false
354 # we extracted mentions render with this using Mentions false
351 return cls.markdown(mention_hl, safe=safe, flavored=flavored,
355 return cls.markdown(mention_hl, safe=safe, flavored=flavored,
352 mentions=False)
356 mentions=False)
353
357
354 source = safe_unicode(source)
358 source = safe_unicode(source)
355
359
356 try:
360 try:
357 if flavored:
361 if flavored:
358 source = cls._flavored_markdown(source)
362 source = cls._flavored_markdown(source)
359 rendered = markdown_renderer.convert(source)
363 rendered = markdown_renderer.convert(source)
360 if clean_html:
364 if clean_html:
361 rendered = cls.bleach_clean(rendered)
365 rendered = cls.bleach_clean(rendered)
362 return rendered
366 return rendered
363 except Exception:
367 except Exception:
364 log.exception('Error when rendering Markdown')
368 log.exception('Error when rendering Markdown')
365 if safe:
369 if safe:
366 log.debug('Fallback to render in plain mode')
370 log.debug('Fallback to render in plain mode')
367 return cls.plain(source)
371 return cls.plain(source)
368 else:
372 else:
369 raise
373 raise
370
374
371 @classmethod
375 @classmethod
372 def rst(cls, source, safe=True, mentions=False, clean_html=False):
376 def rst(cls, source, safe=True, mentions=False, clean_html=False):
373 if mentions:
377 if mentions:
374 mention_pat = re.compile(MENTIONS_REGEX)
378 mention_pat = re.compile(MENTIONS_REGEX)
375
379
376 def wrapp(match_obj):
380 def wrapp(match_obj):
377 uname = match_obj.groups()[0]
381 uname = match_obj.groups()[0]
378 return ' **@%(uname)s** ' % {'uname': uname}
382 return ' **@%(uname)s** ' % {'uname': uname}
379 mention_hl = mention_pat.sub(wrapp, source).strip()
383 mention_hl = mention_pat.sub(wrapp, source).strip()
380 # we extracted mentions render with this using Mentions false
384 # we extracted mentions render with this using Mentions false
381 return cls.rst(mention_hl, safe=safe, mentions=False)
385 return cls.rst(mention_hl, safe=safe, mentions=False)
382
386
383 source = safe_unicode(source)
387 source = safe_unicode(source)
384 try:
388 try:
385 docutils_settings = dict(
389 docutils_settings = dict(
386 [(alias, None) for alias in
390 [(alias, None) for alias in
387 cls.RESTRUCTUREDTEXT_DISALLOWED_DIRECTIVES])
391 cls.RESTRUCTUREDTEXT_DISALLOWED_DIRECTIVES])
388
392
389 docutils_settings.update({
393 docutils_settings.update({
390 'input_encoding': 'unicode', 'report_level': 4})
394 'input_encoding': 'unicode', 'report_level': 4})
391
395
392 for k, v in docutils_settings.iteritems():
396 for k, v in docutils_settings.iteritems():
393 directives.register_directive(k, v)
397 directives.register_directive(k, v)
394
398
395 parts = publish_parts(source=source,
399 parts = publish_parts(source=source,
396 writer=RhodeCodeWriter(),
400 writer=RhodeCodeWriter(),
397 settings_overrides=docutils_settings)
401 settings_overrides=docutils_settings)
398 rendered = parts["fragment"]
402 rendered = parts["fragment"]
399 if clean_html:
403 if clean_html:
400 rendered = cls.bleach_clean(rendered)
404 rendered = cls.bleach_clean(rendered)
401 return parts['html_title'] + rendered
405 return parts['html_title'] + rendered
402 except Exception:
406 except Exception:
403 log.exception('Error when rendering RST')
407 log.exception('Error when rendering RST')
404 if safe:
408 if safe:
405 log.debug('Fallbacking to render in plain mode')
409 log.debug('Fallbacking to render in plain mode')
406 return cls.plain(source)
410 return cls.plain(source)
407 else:
411 else:
408 raise
412 raise
409
413
410 @classmethod
414 @classmethod
411 def jupyter(cls, source, safe=True):
415 def jupyter(cls, source, safe=True):
412 from rhodecode.lib import helpers
416 from rhodecode.lib import helpers
413
417
414 from traitlets.config import Config
418 from traitlets.config import Config
415 import nbformat
419 import nbformat
416 from nbconvert import HTMLExporter
420 from nbconvert import HTMLExporter
417 from nbconvert.preprocessors import Preprocessor
421 from nbconvert.preprocessors import Preprocessor
418
422
419 class CustomHTMLExporter(HTMLExporter):
423 class CustomHTMLExporter(HTMLExporter):
420 def _template_file_default(self):
424 def _template_file_default(self):
421 return 'basic'
425 return 'basic'
422
426
423 class Sandbox(Preprocessor):
427 class Sandbox(Preprocessor):
424
428
425 def preprocess(self, nb, resources):
429 def preprocess(self, nb, resources):
426 sandbox_text = 'SandBoxed(IPython.core.display.Javascript object)'
430 sandbox_text = 'SandBoxed(IPython.core.display.Javascript object)'
427 for cell in nb['cells']:
431 for cell in nb['cells']:
428 if safe and 'outputs' in cell:
432 if safe and 'outputs' in cell:
429 for cell_output in cell['outputs']:
433 for cell_output in cell['outputs']:
430 if 'data' in cell_output:
434 if 'data' in cell_output:
431 if 'application/javascript' in cell_output['data']:
435 if 'application/javascript' in cell_output['data']:
432 cell_output['data']['text/plain'] = sandbox_text
436 cell_output['data']['text/plain'] = sandbox_text
433 cell_output['data'].pop('application/javascript', None)
437 cell_output['data'].pop('application/javascript', None)
434 return nb, resources
438 return nb, resources
435
439
436 def _sanitize_resources(resources):
440 def _sanitize_resources(resources):
437 """
441 """
438 Skip/sanitize some of the CSS generated and included in jupyter
442 Skip/sanitize some of the CSS generated and included in jupyter
439 so it doesn't messes up UI so much
443 so it doesn't messes up UI so much
440 """
444 """
441
445
442 # TODO(marcink): probably we should replace this with whole custom
446 # TODO(marcink): probably we should replace this with whole custom
443 # CSS set that doesn't screw up, but jupyter generated html has some
447 # CSS set that doesn't screw up, but jupyter generated html has some
444 # special markers, so it requires Custom HTML exporter template with
448 # special markers, so it requires Custom HTML exporter template with
445 # _default_template_path_default, to achieve that
449 # _default_template_path_default, to achieve that
446
450
447 # strip the reset CSS
451 # strip the reset CSS
448 resources[0] = resources[0][resources[0].find('/*! Source'):]
452 resources[0] = resources[0][resources[0].find('/*! Source'):]
449 return resources
453 return resources
450
454
451 def as_html(notebook):
455 def as_html(notebook):
452 conf = Config()
456 conf = Config()
453 conf.CustomHTMLExporter.preprocessors = [Sandbox]
457 conf.CustomHTMLExporter.preprocessors = [Sandbox]
454 html_exporter = CustomHTMLExporter(config=conf)
458 html_exporter = CustomHTMLExporter(config=conf)
455
459
456 (body, resources) = html_exporter.from_notebook_node(notebook)
460 (body, resources) = html_exporter.from_notebook_node(notebook)
457 header = '<!-- ## IPYTHON NOTEBOOK RENDERING ## -->'
461 header = '<!-- ## IPYTHON NOTEBOOK RENDERING ## -->'
458 js = MakoTemplate(r'''
462 js = MakoTemplate(r'''
459 <!-- Load mathjax -->
463 <!-- Load mathjax -->
460 <!-- MathJax configuration -->
464 <!-- MathJax configuration -->
461 <script type="text/x-mathjax-config">
465 <script type="text/x-mathjax-config">
462 MathJax.Hub.Config({
466 MathJax.Hub.Config({
463 jax: ["input/TeX","output/HTML-CSS", "output/PreviewHTML"],
467 jax: ["input/TeX","output/HTML-CSS", "output/PreviewHTML"],
464 extensions: ["tex2jax.js","MathMenu.js","MathZoom.js", "fast-preview.js", "AssistiveMML.js", "[Contrib]/a11y/accessibility-menu.js"],
468 extensions: ["tex2jax.js","MathMenu.js","MathZoom.js", "fast-preview.js", "AssistiveMML.js", "[Contrib]/a11y/accessibility-menu.js"],
465 TeX: {
469 TeX: {
466 extensions: ["AMSmath.js","AMSsymbols.js","noErrors.js","noUndefined.js"]
470 extensions: ["AMSmath.js","AMSsymbols.js","noErrors.js","noUndefined.js"]
467 },
471 },
468 tex2jax: {
472 tex2jax: {
469 inlineMath: [ ['$','$'], ["\\(","\\)"] ],
473 inlineMath: [ ['$','$'], ["\\(","\\)"] ],
470 displayMath: [ ['$$','$$'], ["\\[","\\]"] ],
474 displayMath: [ ['$$','$$'], ["\\[","\\]"] ],
471 processEscapes: true,
475 processEscapes: true,
472 processEnvironments: true
476 processEnvironments: true
473 },
477 },
474 // Center justify equations in code and markdown cells. Elsewhere
478 // Center justify equations in code and markdown cells. Elsewhere
475 // we use CSS to left justify single line equations in code cells.
479 // we use CSS to left justify single line equations in code cells.
476 displayAlign: 'center',
480 displayAlign: 'center',
477 "HTML-CSS": {
481 "HTML-CSS": {
478 styles: {'.MathJax_Display': {"margin": 0}},
482 styles: {'.MathJax_Display': {"margin": 0}},
479 linebreaks: { automatic: true },
483 linebreaks: { automatic: true },
480 availableFonts: ["STIX", "TeX"]
484 availableFonts: ["STIX", "TeX"]
481 },
485 },
482 showMathMenu: false
486 showMathMenu: false
483 });
487 });
484 </script>
488 </script>
485 <!-- End of mathjax configuration -->
489 <!-- End of mathjax configuration -->
486 <script src="${h.asset('js/src/math_jax/MathJax.js')}"></script>
490 <script src="${h.asset('js/src/math_jax/MathJax.js')}"></script>
487 ''').render(h=helpers)
491 ''').render(h=helpers)
488
492
489 css = '<style>{}</style>'.format(
493 css = '<style>{}</style>'.format(
490 ''.join(_sanitize_resources(resources['inlining']['css'])))
494 ''.join(_sanitize_resources(resources['inlining']['css'])))
491
495
492 body = '\n'.join([header, css, js, body])
496 body = '\n'.join([header, css, js, body])
493 return body, resources
497 return body, resources
494
498
495 notebook = nbformat.reads(source, as_version=4)
499 notebook = nbformat.reads(source, as_version=4)
496 (body, resources) = as_html(notebook)
500 (body, resources) = as_html(notebook)
497 return body
501 return body
498
502
499
503
500 class RstTemplateRenderer(object):
504 class RstTemplateRenderer(object):
501
505
502 def __init__(self):
506 def __init__(self):
503 base = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
507 base = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
504 rst_template_dirs = [os.path.join(base, 'templates', 'rst_templates')]
508 rst_template_dirs = [os.path.join(base, 'templates', 'rst_templates')]
505 self.template_store = TemplateLookup(
509 self.template_store = TemplateLookup(
506 directories=rst_template_dirs,
510 directories=rst_template_dirs,
507 input_encoding='utf-8',
511 input_encoding='utf-8',
508 imports=['from rhodecode.lib import helpers as h'])
512 imports=['from rhodecode.lib import helpers as h'])
509
513
510 def _get_template(self, templatename):
514 def _get_template(self, templatename):
511 return self.template_store.get_template(templatename)
515 return self.template_store.get_template(templatename)
512
516
513 def render(self, template_name, **kwargs):
517 def render(self, template_name, **kwargs):
514 template = self._get_template(template_name)
518 template = self._get_template(template_name)
515 return template.render(**kwargs)
519 return template.render(**kwargs)
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,620 +1,621 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 this is forms validation classes
22 this is forms validation classes
23 http://formencode.org/module-formencode.validators.html
23 http://formencode.org/module-formencode.validators.html
24 for list off all availible validators
24 for list off all availible validators
25
25
26 we can create our own validators
26 we can create our own validators
27
27
28 The table below outlines the options which can be used in a schema in addition to the validators themselves
28 The table below outlines the options which can be used in a schema in addition to the validators themselves
29 pre_validators [] These validators will be applied before the schema
29 pre_validators [] These validators will be applied before the schema
30 chained_validators [] These validators will be applied after the schema
30 chained_validators [] These validators will be applied after the schema
31 allow_extra_fields False If True, then it is not an error when keys that aren't associated with a validator are present
31 allow_extra_fields False If True, then it is not an error when keys that aren't associated with a validator are present
32 filter_extra_fields False If True, then keys that aren't associated with a validator are removed
32 filter_extra_fields False If True, then keys that aren't associated with a validator are removed
33 if_key_missing NoDefault If this is given, then any keys that aren't available but are expected will be replaced with this value (and then validated). This does not override a present .if_missing attribute on validators. NoDefault is a special FormEncode class to mean that no default values has been specified and therefore missing keys shouldn't take a default value.
33 if_key_missing NoDefault If this is given, then any keys that aren't available but are expected will be replaced with this value (and then validated). This does not override a present .if_missing attribute on validators. NoDefault is a special FormEncode class to mean that no default values has been specified and therefore missing keys shouldn't take a default value.
34 ignore_key_missing False If True, then missing keys will be missing in the result, if the validator doesn't have .if_missing on it already
34 ignore_key_missing False If True, then missing keys will be missing in the result, if the validator doesn't have .if_missing on it already
35
35
36
36
37 <name> = formencode.validators.<name of validator>
37 <name> = formencode.validators.<name of validator>
38 <name> must equal form name
38 <name> must equal form name
39 list=[1,2,3,4,5]
39 list=[1,2,3,4,5]
40 for SELECT use formencode.All(OneOf(list), Int())
40 for SELECT use formencode.All(OneOf(list), Int())
41
41
42 """
42 """
43
43
44 import deform
44 import deform
45 import logging
45 import logging
46 import formencode
46 import formencode
47
47
48 from pkg_resources import resource_filename
48 from pkg_resources import resource_filename
49 from formencode import All, Pipe
49 from formencode import All, Pipe
50
50
51 from pyramid.threadlocal import get_current_request
51 from pyramid.threadlocal import get_current_request
52
52
53 from rhodecode import BACKENDS
53 from rhodecode import BACKENDS
54 from rhodecode.lib import helpers
54 from rhodecode.lib import helpers
55 from rhodecode.model import validators as v
55 from rhodecode.model import validators as v
56
56
57 log = logging.getLogger(__name__)
57 log = logging.getLogger(__name__)
58
58
59
59
60 deform_templates = resource_filename('deform', 'templates')
60 deform_templates = resource_filename('deform', 'templates')
61 rhodecode_templates = resource_filename('rhodecode', 'templates/forms')
61 rhodecode_templates = resource_filename('rhodecode', 'templates/forms')
62 search_path = (rhodecode_templates, deform_templates)
62 search_path = (rhodecode_templates, deform_templates)
63
63
64
64
65 class RhodecodeFormZPTRendererFactory(deform.ZPTRendererFactory):
65 class RhodecodeFormZPTRendererFactory(deform.ZPTRendererFactory):
66 """ Subclass of ZPTRendererFactory to add rhodecode context variables """
66 """ Subclass of ZPTRendererFactory to add rhodecode context variables """
67 def __call__(self, template_name, **kw):
67 def __call__(self, template_name, **kw):
68 kw['h'] = helpers
68 kw['h'] = helpers
69 kw['request'] = get_current_request()
69 kw['request'] = get_current_request()
70 return self.load(template_name)(**kw)
70 return self.load(template_name)(**kw)
71
71
72
72
73 form_renderer = RhodecodeFormZPTRendererFactory(search_path)
73 form_renderer = RhodecodeFormZPTRendererFactory(search_path)
74 deform.Form.set_default_renderer(form_renderer)
74 deform.Form.set_default_renderer(form_renderer)
75
75
76
76
77 def LoginForm(localizer):
77 def LoginForm(localizer):
78 _ = localizer
78 _ = localizer
79
79
80 class _LoginForm(formencode.Schema):
80 class _LoginForm(formencode.Schema):
81 allow_extra_fields = True
81 allow_extra_fields = True
82 filter_extra_fields = True
82 filter_extra_fields = True
83 username = v.UnicodeString(
83 username = v.UnicodeString(
84 strip=True,
84 strip=True,
85 min=1,
85 min=1,
86 not_empty=True,
86 not_empty=True,
87 messages={
87 messages={
88 'empty': _(u'Please enter a login'),
88 'empty': _(u'Please enter a login'),
89 'tooShort': _(u'Enter a value %(min)i characters long or more')
89 'tooShort': _(u'Enter a value %(min)i characters long or more')
90 }
90 }
91 )
91 )
92
92
93 password = v.UnicodeString(
93 password = v.UnicodeString(
94 strip=False,
94 strip=False,
95 min=3,
95 min=3,
96 max=72,
96 max=72,
97 not_empty=True,
97 not_empty=True,
98 messages={
98 messages={
99 'empty': _(u'Please enter a password'),
99 'empty': _(u'Please enter a password'),
100 'tooShort': _(u'Enter %(min)i characters or more')}
100 'tooShort': _(u'Enter %(min)i characters or more')}
101 )
101 )
102
102
103 remember = v.StringBoolean(if_missing=False)
103 remember = v.StringBoolean(if_missing=False)
104
104
105 chained_validators = [v.ValidAuth(localizer)]
105 chained_validators = [v.ValidAuth(localizer)]
106 return _LoginForm
106 return _LoginForm
107
107
108
108
109 def UserForm(localizer, edit=False, available_languages=None, old_data=None):
109 def UserForm(localizer, edit=False, available_languages=None, old_data=None):
110 old_data = old_data or {}
110 old_data = old_data or {}
111 available_languages = available_languages or []
111 available_languages = available_languages or []
112 _ = localizer
112 _ = localizer
113
113
114 class _UserForm(formencode.Schema):
114 class _UserForm(formencode.Schema):
115 allow_extra_fields = True
115 allow_extra_fields = True
116 filter_extra_fields = True
116 filter_extra_fields = True
117 username = All(v.UnicodeString(strip=True, min=1, not_empty=True),
117 username = All(v.UnicodeString(strip=True, min=1, not_empty=True),
118 v.ValidUsername(localizer, edit, old_data))
118 v.ValidUsername(localizer, edit, old_data))
119 if edit:
119 if edit:
120 new_password = All(
120 new_password = All(
121 v.ValidPassword(localizer),
121 v.ValidPassword(localizer),
122 v.UnicodeString(strip=False, min=6, max=72, not_empty=False)
122 v.UnicodeString(strip=False, min=6, max=72, not_empty=False)
123 )
123 )
124 password_confirmation = All(
124 password_confirmation = All(
125 v.ValidPassword(localizer),
125 v.ValidPassword(localizer),
126 v.UnicodeString(strip=False, min=6, max=72, not_empty=False),
126 v.UnicodeString(strip=False, min=6, max=72, not_empty=False),
127 )
127 )
128 admin = v.StringBoolean(if_missing=False)
128 admin = v.StringBoolean(if_missing=False)
129 else:
129 else:
130 password = All(
130 password = All(
131 v.ValidPassword(localizer),
131 v.ValidPassword(localizer),
132 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
132 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
133 )
133 )
134 password_confirmation = All(
134 password_confirmation = All(
135 v.ValidPassword(localizer),
135 v.ValidPassword(localizer),
136 v.UnicodeString(strip=False, min=6, max=72, not_empty=False)
136 v.UnicodeString(strip=False, min=6, max=72, not_empty=False)
137 )
137 )
138
138
139 password_change = v.StringBoolean(if_missing=False)
139 password_change = v.StringBoolean(if_missing=False)
140 create_repo_group = v.StringBoolean(if_missing=False)
140 create_repo_group = v.StringBoolean(if_missing=False)
141
141
142 active = v.StringBoolean(if_missing=False)
142 active = v.StringBoolean(if_missing=False)
143 firstname = v.UnicodeString(strip=True, min=1, not_empty=False)
143 firstname = v.UnicodeString(strip=True, min=1, not_empty=False)
144 lastname = v.UnicodeString(strip=True, min=1, not_empty=False)
144 lastname = v.UnicodeString(strip=True, min=1, not_empty=False)
145 email = All(v.UniqSystemEmail(localizer, old_data), v.Email(not_empty=True))
145 email = All(v.UniqSystemEmail(localizer, old_data), v.Email(not_empty=True))
146 extern_name = v.UnicodeString(strip=True)
146 extern_name = v.UnicodeString(strip=True)
147 extern_type = v.UnicodeString(strip=True)
147 extern_type = v.UnicodeString(strip=True)
148 language = v.OneOf(available_languages, hideList=False,
148 language = v.OneOf(available_languages, hideList=False,
149 testValueList=True, if_missing=None)
149 testValueList=True, if_missing=None)
150 chained_validators = [v.ValidPasswordsMatch(localizer)]
150 chained_validators = [v.ValidPasswordsMatch(localizer)]
151 return _UserForm
151 return _UserForm
152
152
153
153
154 def UserGroupForm(localizer, edit=False, old_data=None, allow_disabled=False):
154 def UserGroupForm(localizer, edit=False, old_data=None, allow_disabled=False):
155 old_data = old_data or {}
155 old_data = old_data or {}
156 _ = localizer
156 _ = localizer
157
157
158 class _UserGroupForm(formencode.Schema):
158 class _UserGroupForm(formencode.Schema):
159 allow_extra_fields = True
159 allow_extra_fields = True
160 filter_extra_fields = True
160 filter_extra_fields = True
161
161
162 users_group_name = All(
162 users_group_name = All(
163 v.UnicodeString(strip=True, min=1, not_empty=True),
163 v.UnicodeString(strip=True, min=1, not_empty=True),
164 v.ValidUserGroup(localizer, edit, old_data)
164 v.ValidUserGroup(localizer, edit, old_data)
165 )
165 )
166 user_group_description = v.UnicodeString(strip=True, min=1,
166 user_group_description = v.UnicodeString(strip=True, min=1,
167 not_empty=False)
167 not_empty=False)
168
168
169 users_group_active = v.StringBoolean(if_missing=False)
169 users_group_active = v.StringBoolean(if_missing=False)
170
170
171 if edit:
171 if edit:
172 # this is user group owner
172 # this is user group owner
173 user = All(
173 user = All(
174 v.UnicodeString(not_empty=True),
174 v.UnicodeString(not_empty=True),
175 v.ValidRepoUser(localizer, allow_disabled))
175 v.ValidRepoUser(localizer, allow_disabled))
176 return _UserGroupForm
176 return _UserGroupForm
177
177
178
178
179 def RepoGroupForm(localizer, edit=False, old_data=None, available_groups=None,
179 def RepoGroupForm(localizer, edit=False, old_data=None, available_groups=None,
180 can_create_in_root=False, allow_disabled=False):
180 can_create_in_root=False, allow_disabled=False):
181 _ = localizer
181 _ = localizer
182 old_data = old_data or {}
182 old_data = old_data or {}
183 available_groups = available_groups or []
183 available_groups = available_groups or []
184
184
185 class _RepoGroupForm(formencode.Schema):
185 class _RepoGroupForm(formencode.Schema):
186 allow_extra_fields = True
186 allow_extra_fields = True
187 filter_extra_fields = False
187 filter_extra_fields = False
188
188
189 group_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
189 group_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
190 v.SlugifyName(localizer),)
190 v.SlugifyName(localizer),)
191 group_description = v.UnicodeString(strip=True, min=1,
191 group_description = v.UnicodeString(strip=True, min=1,
192 not_empty=False)
192 not_empty=False)
193 group_copy_permissions = v.StringBoolean(if_missing=False)
193 group_copy_permissions = v.StringBoolean(if_missing=False)
194
194
195 group_parent_id = v.OneOf(available_groups, hideList=False,
195 group_parent_id = v.OneOf(available_groups, hideList=False,
196 testValueList=True, not_empty=True)
196 testValueList=True, not_empty=True)
197 enable_locking = v.StringBoolean(if_missing=False)
197 enable_locking = v.StringBoolean(if_missing=False)
198 chained_validators = [
198 chained_validators = [
199 v.ValidRepoGroup(localizer, edit, old_data, can_create_in_root)]
199 v.ValidRepoGroup(localizer, edit, old_data, can_create_in_root)]
200
200
201 if edit:
201 if edit:
202 # this is repo group owner
202 # this is repo group owner
203 user = All(
203 user = All(
204 v.UnicodeString(not_empty=True),
204 v.UnicodeString(not_empty=True),
205 v.ValidRepoUser(localizer, allow_disabled))
205 v.ValidRepoUser(localizer, allow_disabled))
206 return _RepoGroupForm
206 return _RepoGroupForm
207
207
208
208
209 def RegisterForm(localizer, edit=False, old_data=None):
209 def RegisterForm(localizer, edit=False, old_data=None):
210 _ = localizer
210 _ = localizer
211 old_data = old_data or {}
211 old_data = old_data or {}
212
212
213 class _RegisterForm(formencode.Schema):
213 class _RegisterForm(formencode.Schema):
214 allow_extra_fields = True
214 allow_extra_fields = True
215 filter_extra_fields = True
215 filter_extra_fields = True
216 username = All(
216 username = All(
217 v.ValidUsername(localizer, edit, old_data),
217 v.ValidUsername(localizer, edit, old_data),
218 v.UnicodeString(strip=True, min=1, not_empty=True)
218 v.UnicodeString(strip=True, min=1, not_empty=True)
219 )
219 )
220 password = All(
220 password = All(
221 v.ValidPassword(localizer),
221 v.ValidPassword(localizer),
222 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
222 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
223 )
223 )
224 password_confirmation = All(
224 password_confirmation = All(
225 v.ValidPassword(localizer),
225 v.ValidPassword(localizer),
226 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
226 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
227 )
227 )
228 active = v.StringBoolean(if_missing=False)
228 active = v.StringBoolean(if_missing=False)
229 firstname = v.UnicodeString(strip=True, min=1, not_empty=False)
229 firstname = v.UnicodeString(strip=True, min=1, not_empty=False)
230 lastname = v.UnicodeString(strip=True, min=1, not_empty=False)
230 lastname = v.UnicodeString(strip=True, min=1, not_empty=False)
231 email = All(v.UniqSystemEmail(localizer, old_data), v.Email(not_empty=True))
231 email = All(v.UniqSystemEmail(localizer, old_data), v.Email(not_empty=True))
232
232
233 chained_validators = [v.ValidPasswordsMatch(localizer)]
233 chained_validators = [v.ValidPasswordsMatch(localizer)]
234 return _RegisterForm
234 return _RegisterForm
235
235
236
236
237 def PasswordResetForm(localizer):
237 def PasswordResetForm(localizer):
238 _ = localizer
238 _ = localizer
239
239
240 class _PasswordResetForm(formencode.Schema):
240 class _PasswordResetForm(formencode.Schema):
241 allow_extra_fields = True
241 allow_extra_fields = True
242 filter_extra_fields = True
242 filter_extra_fields = True
243 email = All(v.ValidSystemEmail(localizer), v.Email(not_empty=True))
243 email = All(v.ValidSystemEmail(localizer), v.Email(not_empty=True))
244 return _PasswordResetForm
244 return _PasswordResetForm
245
245
246
246
247 def RepoForm(localizer, edit=False, old_data=None, repo_groups=None,
247 def RepoForm(localizer, edit=False, old_data=None, repo_groups=None,
248 landing_revs=None, allow_disabled=False):
248 landing_revs=None, allow_disabled=False):
249 _ = localizer
249 _ = localizer
250 old_data = old_data or {}
250 old_data = old_data or {}
251 repo_groups = repo_groups or []
251 repo_groups = repo_groups or []
252 landing_revs = landing_revs or []
252 landing_revs = landing_revs or []
253 supported_backends = BACKENDS.keys()
253 supported_backends = BACKENDS.keys()
254
254
255 class _RepoForm(formencode.Schema):
255 class _RepoForm(formencode.Schema):
256 allow_extra_fields = True
256 allow_extra_fields = True
257 filter_extra_fields = False
257 filter_extra_fields = False
258 repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
258 repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
259 v.SlugifyName(localizer), v.CannotHaveGitSuffix(localizer))
259 v.SlugifyName(localizer), v.CannotHaveGitSuffix(localizer))
260 repo_group = All(v.CanWriteGroup(localizer, old_data),
260 repo_group = All(v.CanWriteGroup(localizer, old_data),
261 v.OneOf(repo_groups, hideList=True))
261 v.OneOf(repo_groups, hideList=True))
262 repo_type = v.OneOf(supported_backends, required=False,
262 repo_type = v.OneOf(supported_backends, required=False,
263 if_missing=old_data.get('repo_type'))
263 if_missing=old_data.get('repo_type'))
264 repo_description = v.UnicodeString(strip=True, min=1, not_empty=False)
264 repo_description = v.UnicodeString(strip=True, min=1, not_empty=False)
265 repo_private = v.StringBoolean(if_missing=False)
265 repo_private = v.StringBoolean(if_missing=False)
266 repo_landing_rev = v.OneOf(landing_revs, hideList=True)
266 repo_landing_rev = v.OneOf(landing_revs, hideList=True)
267 repo_copy_permissions = v.StringBoolean(if_missing=False)
267 repo_copy_permissions = v.StringBoolean(if_missing=False)
268 clone_uri = All(v.UnicodeString(strip=True, min=1, not_empty=False))
268 clone_uri = All(v.UnicodeString(strip=True, min=1, not_empty=False))
269
269
270 repo_enable_statistics = v.StringBoolean(if_missing=False)
270 repo_enable_statistics = v.StringBoolean(if_missing=False)
271 repo_enable_downloads = v.StringBoolean(if_missing=False)
271 repo_enable_downloads = v.StringBoolean(if_missing=False)
272 repo_enable_locking = v.StringBoolean(if_missing=False)
272 repo_enable_locking = v.StringBoolean(if_missing=False)
273
273
274 if edit:
274 if edit:
275 # this is repo owner
275 # this is repo owner
276 user = All(
276 user = All(
277 v.UnicodeString(not_empty=True),
277 v.UnicodeString(not_empty=True),
278 v.ValidRepoUser(localizer, allow_disabled))
278 v.ValidRepoUser(localizer, allow_disabled))
279 clone_uri_change = v.UnicodeString(
279 clone_uri_change = v.UnicodeString(
280 not_empty=False, if_missing=v.Missing)
280 not_empty=False, if_missing=v.Missing)
281
281
282 chained_validators = [v.ValidCloneUri(localizer),
282 chained_validators = [v.ValidCloneUri(localizer),
283 v.ValidRepoName(localizer, edit, old_data)]
283 v.ValidRepoName(localizer, edit, old_data)]
284 return _RepoForm
284 return _RepoForm
285
285
286
286
287 def RepoPermsForm(localizer):
287 def RepoPermsForm(localizer):
288 _ = localizer
288 _ = localizer
289
289
290 class _RepoPermsForm(formencode.Schema):
290 class _RepoPermsForm(formencode.Schema):
291 allow_extra_fields = True
291 allow_extra_fields = True
292 filter_extra_fields = False
292 filter_extra_fields = False
293 chained_validators = [v.ValidPerms(localizer, type_='repo')]
293 chained_validators = [v.ValidPerms(localizer, type_='repo')]
294 return _RepoPermsForm
294 return _RepoPermsForm
295
295
296
296
297 def RepoGroupPermsForm(localizer, valid_recursive_choices):
297 def RepoGroupPermsForm(localizer, valid_recursive_choices):
298 _ = localizer
298 _ = localizer
299
299
300 class _RepoGroupPermsForm(formencode.Schema):
300 class _RepoGroupPermsForm(formencode.Schema):
301 allow_extra_fields = True
301 allow_extra_fields = True
302 filter_extra_fields = False
302 filter_extra_fields = False
303 recursive = v.OneOf(valid_recursive_choices)
303 recursive = v.OneOf(valid_recursive_choices)
304 chained_validators = [v.ValidPerms(localizer, type_='repo_group')]
304 chained_validators = [v.ValidPerms(localizer, type_='repo_group')]
305 return _RepoGroupPermsForm
305 return _RepoGroupPermsForm
306
306
307
307
308 def UserGroupPermsForm(localizer):
308 def UserGroupPermsForm(localizer):
309 _ = localizer
309 _ = localizer
310
310
311 class _UserPermsForm(formencode.Schema):
311 class _UserPermsForm(formencode.Schema):
312 allow_extra_fields = True
312 allow_extra_fields = True
313 filter_extra_fields = False
313 filter_extra_fields = False
314 chained_validators = [v.ValidPerms(localizer, type_='user_group')]
314 chained_validators = [v.ValidPerms(localizer, type_='user_group')]
315 return _UserPermsForm
315 return _UserPermsForm
316
316
317
317
318 def RepoFieldForm(localizer):
318 def RepoFieldForm(localizer):
319 _ = localizer
319 _ = localizer
320
320
321 class _RepoFieldForm(formencode.Schema):
321 class _RepoFieldForm(formencode.Schema):
322 filter_extra_fields = True
322 filter_extra_fields = True
323 allow_extra_fields = True
323 allow_extra_fields = True
324
324
325 new_field_key = All(v.FieldKey(localizer),
325 new_field_key = All(v.FieldKey(localizer),
326 v.UnicodeString(strip=True, min=3, not_empty=True))
326 v.UnicodeString(strip=True, min=3, not_empty=True))
327 new_field_value = v.UnicodeString(not_empty=False, if_missing=u'')
327 new_field_value = v.UnicodeString(not_empty=False, if_missing=u'')
328 new_field_type = v.OneOf(['str', 'unicode', 'list', 'tuple'],
328 new_field_type = v.OneOf(['str', 'unicode', 'list', 'tuple'],
329 if_missing='str')
329 if_missing='str')
330 new_field_label = v.UnicodeString(not_empty=False)
330 new_field_label = v.UnicodeString(not_empty=False)
331 new_field_desc = v.UnicodeString(not_empty=False)
331 new_field_desc = v.UnicodeString(not_empty=False)
332 return _RepoFieldForm
332 return _RepoFieldForm
333
333
334
334
335 def RepoForkForm(localizer, edit=False, old_data=None,
335 def RepoForkForm(localizer, edit=False, old_data=None,
336 supported_backends=BACKENDS.keys(), repo_groups=None,
336 supported_backends=BACKENDS.keys(), repo_groups=None,
337 landing_revs=None):
337 landing_revs=None):
338 _ = localizer
338 _ = localizer
339 old_data = old_data or {}
339 old_data = old_data or {}
340 repo_groups = repo_groups or []
340 repo_groups = repo_groups or []
341 landing_revs = landing_revs or []
341 landing_revs = landing_revs or []
342
342
343 class _RepoForkForm(formencode.Schema):
343 class _RepoForkForm(formencode.Schema):
344 allow_extra_fields = True
344 allow_extra_fields = True
345 filter_extra_fields = False
345 filter_extra_fields = False
346 repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
346 repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
347 v.SlugifyName(localizer))
347 v.SlugifyName(localizer))
348 repo_group = All(v.CanWriteGroup(localizer, ),
348 repo_group = All(v.CanWriteGroup(localizer, ),
349 v.OneOf(repo_groups, hideList=True))
349 v.OneOf(repo_groups, hideList=True))
350 repo_type = All(v.ValidForkType(localizer, old_data), v.OneOf(supported_backends))
350 repo_type = All(v.ValidForkType(localizer, old_data), v.OneOf(supported_backends))
351 description = v.UnicodeString(strip=True, min=1, not_empty=True)
351 description = v.UnicodeString(strip=True, min=1, not_empty=True)
352 private = v.StringBoolean(if_missing=False)
352 private = v.StringBoolean(if_missing=False)
353 copy_permissions = v.StringBoolean(if_missing=False)
353 copy_permissions = v.StringBoolean(if_missing=False)
354 fork_parent_id = v.UnicodeString()
354 fork_parent_id = v.UnicodeString()
355 chained_validators = [v.ValidForkName(localizer, edit, old_data)]
355 chained_validators = [v.ValidForkName(localizer, edit, old_data)]
356 landing_rev = v.OneOf(landing_revs, hideList=True)
356 landing_rev = v.OneOf(landing_revs, hideList=True)
357 return _RepoForkForm
357 return _RepoForkForm
358
358
359
359
360 def ApplicationSettingsForm(localizer):
360 def ApplicationSettingsForm(localizer):
361 _ = localizer
361 _ = localizer
362
362
363 class _ApplicationSettingsForm(formencode.Schema):
363 class _ApplicationSettingsForm(formencode.Schema):
364 allow_extra_fields = True
364 allow_extra_fields = True
365 filter_extra_fields = False
365 filter_extra_fields = False
366 rhodecode_title = v.UnicodeString(strip=True, max=40, not_empty=False)
366 rhodecode_title = v.UnicodeString(strip=True, max=40, not_empty=False)
367 rhodecode_realm = v.UnicodeString(strip=True, min=1, not_empty=True)
367 rhodecode_realm = v.UnicodeString(strip=True, min=1, not_empty=True)
368 rhodecode_pre_code = v.UnicodeString(strip=True, min=1, not_empty=False)
368 rhodecode_pre_code = v.UnicodeString(strip=True, min=1, not_empty=False)
369 rhodecode_post_code = v.UnicodeString(strip=True, min=1, not_empty=False)
369 rhodecode_post_code = v.UnicodeString(strip=True, min=1, not_empty=False)
370 rhodecode_captcha_public_key = v.UnicodeString(strip=True, min=1, not_empty=False)
370 rhodecode_captcha_public_key = v.UnicodeString(strip=True, min=1, not_empty=False)
371 rhodecode_captcha_private_key = v.UnicodeString(strip=True, min=1, not_empty=False)
371 rhodecode_captcha_private_key = v.UnicodeString(strip=True, min=1, not_empty=False)
372 rhodecode_create_personal_repo_group = v.StringBoolean(if_missing=False)
372 rhodecode_create_personal_repo_group = v.StringBoolean(if_missing=False)
373 rhodecode_personal_repo_group_pattern = v.UnicodeString(strip=True, min=1, not_empty=False)
373 rhodecode_personal_repo_group_pattern = v.UnicodeString(strip=True, min=1, not_empty=False)
374 return _ApplicationSettingsForm
374 return _ApplicationSettingsForm
375
375
376
376
377 def ApplicationVisualisationForm(localizer):
377 def ApplicationVisualisationForm(localizer):
378 from rhodecode.model.db import Repository
378 from rhodecode.model.db import Repository
379 _ = localizer
379 _ = localizer
380
380
381 class _ApplicationVisualisationForm(formencode.Schema):
381 class _ApplicationVisualisationForm(formencode.Schema):
382 allow_extra_fields = True
382 allow_extra_fields = True
383 filter_extra_fields = False
383 filter_extra_fields = False
384 rhodecode_show_public_icon = v.StringBoolean(if_missing=False)
384 rhodecode_show_public_icon = v.StringBoolean(if_missing=False)
385 rhodecode_show_private_icon = v.StringBoolean(if_missing=False)
385 rhodecode_show_private_icon = v.StringBoolean(if_missing=False)
386 rhodecode_stylify_metatags = v.StringBoolean(if_missing=False)
386 rhodecode_stylify_metatags = v.StringBoolean(if_missing=False)
387
387
388 rhodecode_repository_fields = v.StringBoolean(if_missing=False)
388 rhodecode_repository_fields = v.StringBoolean(if_missing=False)
389 rhodecode_lightweight_journal = v.StringBoolean(if_missing=False)
389 rhodecode_lightweight_journal = v.StringBoolean(if_missing=False)
390 rhodecode_dashboard_items = v.Int(min=5, not_empty=True)
390 rhodecode_dashboard_items = v.Int(min=5, not_empty=True)
391 rhodecode_admin_grid_items = v.Int(min=5, not_empty=True)
391 rhodecode_admin_grid_items = v.Int(min=5, not_empty=True)
392 rhodecode_show_version = v.StringBoolean(if_missing=False)
392 rhodecode_show_version = v.StringBoolean(if_missing=False)
393 rhodecode_use_gravatar = v.StringBoolean(if_missing=False)
393 rhodecode_use_gravatar = v.StringBoolean(if_missing=False)
394 rhodecode_markup_renderer = v.OneOf(['markdown', 'rst'])
394 rhodecode_markup_renderer = v.OneOf(['markdown', 'rst'])
395 rhodecode_gravatar_url = v.UnicodeString(min=3)
395 rhodecode_gravatar_url = v.UnicodeString(min=3)
396 rhodecode_clone_uri_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI)
396 rhodecode_clone_uri_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI)
397 rhodecode_clone_uri_ssh_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI_SSH)
397 rhodecode_clone_uri_ssh_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI_SSH)
398 rhodecode_support_url = v.UnicodeString()
398 rhodecode_support_url = v.UnicodeString()
399 rhodecode_show_revision_number = v.StringBoolean(if_missing=False)
399 rhodecode_show_revision_number = v.StringBoolean(if_missing=False)
400 rhodecode_show_sha_length = v.Int(min=4, not_empty=True)
400 rhodecode_show_sha_length = v.Int(min=4, not_empty=True)
401 return _ApplicationVisualisationForm
401 return _ApplicationVisualisationForm
402
402
403
403
404 class _BaseVcsSettingsForm(formencode.Schema):
404 class _BaseVcsSettingsForm(formencode.Schema):
405
405
406 allow_extra_fields = True
406 allow_extra_fields = True
407 filter_extra_fields = False
407 filter_extra_fields = False
408 hooks_changegroup_repo_size = v.StringBoolean(if_missing=False)
408 hooks_changegroup_repo_size = v.StringBoolean(if_missing=False)
409 hooks_changegroup_push_logger = v.StringBoolean(if_missing=False)
409 hooks_changegroup_push_logger = v.StringBoolean(if_missing=False)
410 hooks_outgoing_pull_logger = v.StringBoolean(if_missing=False)
410 hooks_outgoing_pull_logger = v.StringBoolean(if_missing=False)
411
411
412 # PR/Code-review
412 # PR/Code-review
413 rhodecode_pr_merge_enabled = v.StringBoolean(if_missing=False)
413 rhodecode_pr_merge_enabled = v.StringBoolean(if_missing=False)
414 rhodecode_use_outdated_comments = v.StringBoolean(if_missing=False)
414 rhodecode_use_outdated_comments = v.StringBoolean(if_missing=False)
415
415
416 # hg
416 # hg
417 extensions_largefiles = v.StringBoolean(if_missing=False)
417 extensions_largefiles = v.StringBoolean(if_missing=False)
418 extensions_evolve = v.StringBoolean(if_missing=False)
418 extensions_evolve = v.StringBoolean(if_missing=False)
419 phases_publish = v.StringBoolean(if_missing=False)
419 phases_publish = v.StringBoolean(if_missing=False)
420
420
421 rhodecode_hg_use_rebase_for_merging = v.StringBoolean(if_missing=False)
421 rhodecode_hg_use_rebase_for_merging = v.StringBoolean(if_missing=False)
422 rhodecode_hg_close_branch_before_merging = v.StringBoolean(if_missing=False)
422 rhodecode_hg_close_branch_before_merging = v.StringBoolean(if_missing=False)
423
423
424 # git
424 # git
425 vcs_git_lfs_enabled = v.StringBoolean(if_missing=False)
425 vcs_git_lfs_enabled = v.StringBoolean(if_missing=False)
426 rhodecode_git_use_rebase_for_merging = v.StringBoolean(if_missing=False)
426 rhodecode_git_use_rebase_for_merging = v.StringBoolean(if_missing=False)
427 rhodecode_git_close_branch_before_merging = v.StringBoolean(if_missing=False)
427 rhodecode_git_close_branch_before_merging = v.StringBoolean(if_missing=False)
428
428
429 # svn
429 # svn
430 vcs_svn_proxy_http_requests_enabled = v.StringBoolean(if_missing=False)
430 vcs_svn_proxy_http_requests_enabled = v.StringBoolean(if_missing=False)
431 vcs_svn_proxy_http_server_url = v.UnicodeString(strip=True, if_missing=None)
431 vcs_svn_proxy_http_server_url = v.UnicodeString(strip=True, if_missing=None)
432
432
433 # cache
433 # cache
434 rhodecode_diff_cache = v.StringBoolean(if_missing=False)
434 rhodecode_diff_cache = v.StringBoolean(if_missing=False)
435
435
436
436
437 def ApplicationUiSettingsForm(localizer):
437 def ApplicationUiSettingsForm(localizer):
438 _ = localizer
438 _ = localizer
439
439
440 class _ApplicationUiSettingsForm(_BaseVcsSettingsForm):
440 class _ApplicationUiSettingsForm(_BaseVcsSettingsForm):
441 web_push_ssl = v.StringBoolean(if_missing=False)
441 web_push_ssl = v.StringBoolean(if_missing=False)
442 paths_root_path = All(
442 paths_root_path = All(
443 v.ValidPath(localizer),
443 v.ValidPath(localizer),
444 v.UnicodeString(strip=True, min=1, not_empty=True)
444 v.UnicodeString(strip=True, min=1, not_empty=True)
445 )
445 )
446 largefiles_usercache = All(
446 largefiles_usercache = All(
447 v.ValidPath(localizer),
447 v.ValidPath(localizer),
448 v.UnicodeString(strip=True, min=2, not_empty=True))
448 v.UnicodeString(strip=True, min=2, not_empty=True))
449 vcs_git_lfs_store_location = All(
449 vcs_git_lfs_store_location = All(
450 v.ValidPath(localizer),
450 v.ValidPath(localizer),
451 v.UnicodeString(strip=True, min=2, not_empty=True))
451 v.UnicodeString(strip=True, min=2, not_empty=True))
452 extensions_hgsubversion = v.StringBoolean(if_missing=False)
452 extensions_hgsubversion = v.StringBoolean(if_missing=False)
453 extensions_hggit = v.StringBoolean(if_missing=False)
453 extensions_hggit = v.StringBoolean(if_missing=False)
454 new_svn_branch = v.ValidSvnPattern(localizer, section='vcs_svn_branch')
454 new_svn_branch = v.ValidSvnPattern(localizer, section='vcs_svn_branch')
455 new_svn_tag = v.ValidSvnPattern(localizer, section='vcs_svn_tag')
455 new_svn_tag = v.ValidSvnPattern(localizer, section='vcs_svn_tag')
456 return _ApplicationUiSettingsForm
456 return _ApplicationUiSettingsForm
457
457
458
458
459 def RepoVcsSettingsForm(localizer, repo_name):
459 def RepoVcsSettingsForm(localizer, repo_name):
460 _ = localizer
460 _ = localizer
461
461
462 class _RepoVcsSettingsForm(_BaseVcsSettingsForm):
462 class _RepoVcsSettingsForm(_BaseVcsSettingsForm):
463 inherit_global_settings = v.StringBoolean(if_missing=False)
463 inherit_global_settings = v.StringBoolean(if_missing=False)
464 new_svn_branch = v.ValidSvnPattern(localizer,
464 new_svn_branch = v.ValidSvnPattern(localizer,
465 section='vcs_svn_branch', repo_name=repo_name)
465 section='vcs_svn_branch', repo_name=repo_name)
466 new_svn_tag = v.ValidSvnPattern(localizer,
466 new_svn_tag = v.ValidSvnPattern(localizer,
467 section='vcs_svn_tag', repo_name=repo_name)
467 section='vcs_svn_tag', repo_name=repo_name)
468 return _RepoVcsSettingsForm
468 return _RepoVcsSettingsForm
469
469
470
470
471 def LabsSettingsForm(localizer):
471 def LabsSettingsForm(localizer):
472 _ = localizer
472 _ = localizer
473
473
474 class _LabSettingsForm(formencode.Schema):
474 class _LabSettingsForm(formencode.Schema):
475 allow_extra_fields = True
475 allow_extra_fields = True
476 filter_extra_fields = False
476 filter_extra_fields = False
477 return _LabSettingsForm
477 return _LabSettingsForm
478
478
479
479
480 def ApplicationPermissionsForm(
480 def ApplicationPermissionsForm(
481 localizer, register_choices, password_reset_choices,
481 localizer, register_choices, password_reset_choices,
482 extern_activate_choices):
482 extern_activate_choices):
483 _ = localizer
483 _ = localizer
484
484
485 class _DefaultPermissionsForm(formencode.Schema):
485 class _DefaultPermissionsForm(formencode.Schema):
486 allow_extra_fields = True
486 allow_extra_fields = True
487 filter_extra_fields = True
487 filter_extra_fields = True
488
488
489 anonymous = v.StringBoolean(if_missing=False)
489 anonymous = v.StringBoolean(if_missing=False)
490 default_register = v.OneOf(register_choices)
490 default_register = v.OneOf(register_choices)
491 default_register_message = v.UnicodeString()
491 default_register_message = v.UnicodeString()
492 default_password_reset = v.OneOf(password_reset_choices)
492 default_password_reset = v.OneOf(password_reset_choices)
493 default_extern_activate = v.OneOf(extern_activate_choices)
493 default_extern_activate = v.OneOf(extern_activate_choices)
494 return _DefaultPermissionsForm
494 return _DefaultPermissionsForm
495
495
496
496
497 def ObjectPermissionsForm(localizer, repo_perms_choices, group_perms_choices,
497 def ObjectPermissionsForm(localizer, repo_perms_choices, group_perms_choices,
498 user_group_perms_choices):
498 user_group_perms_choices):
499 _ = localizer
499 _ = localizer
500
500
501 class _ObjectPermissionsForm(formencode.Schema):
501 class _ObjectPermissionsForm(formencode.Schema):
502 allow_extra_fields = True
502 allow_extra_fields = True
503 filter_extra_fields = True
503 filter_extra_fields = True
504 overwrite_default_repo = v.StringBoolean(if_missing=False)
504 overwrite_default_repo = v.StringBoolean(if_missing=False)
505 overwrite_default_group = v.StringBoolean(if_missing=False)
505 overwrite_default_group = v.StringBoolean(if_missing=False)
506 overwrite_default_user_group = v.StringBoolean(if_missing=False)
506 overwrite_default_user_group = v.StringBoolean(if_missing=False)
507 default_repo_perm = v.OneOf(repo_perms_choices)
507 default_repo_perm = v.OneOf(repo_perms_choices)
508 default_group_perm = v.OneOf(group_perms_choices)
508 default_group_perm = v.OneOf(group_perms_choices)
509 default_user_group_perm = v.OneOf(user_group_perms_choices)
509 default_user_group_perm = v.OneOf(user_group_perms_choices)
510 return _ObjectPermissionsForm
510 return _ObjectPermissionsForm
511
511
512
512
513 def UserPermissionsForm(localizer, create_choices, create_on_write_choices,
513 def UserPermissionsForm(localizer, create_choices, create_on_write_choices,
514 repo_group_create_choices, user_group_create_choices,
514 repo_group_create_choices, user_group_create_choices,
515 fork_choices, inherit_default_permissions_choices):
515 fork_choices, inherit_default_permissions_choices):
516 _ = localizer
516 _ = localizer
517
517
518 class _DefaultPermissionsForm(formencode.Schema):
518 class _DefaultPermissionsForm(formencode.Schema):
519 allow_extra_fields = True
519 allow_extra_fields = True
520 filter_extra_fields = True
520 filter_extra_fields = True
521
521
522 anonymous = v.StringBoolean(if_missing=False)
522 anonymous = v.StringBoolean(if_missing=False)
523
523
524 default_repo_create = v.OneOf(create_choices)
524 default_repo_create = v.OneOf(create_choices)
525 default_repo_create_on_write = v.OneOf(create_on_write_choices)
525 default_repo_create_on_write = v.OneOf(create_on_write_choices)
526 default_user_group_create = v.OneOf(user_group_create_choices)
526 default_user_group_create = v.OneOf(user_group_create_choices)
527 default_repo_group_create = v.OneOf(repo_group_create_choices)
527 default_repo_group_create = v.OneOf(repo_group_create_choices)
528 default_fork_create = v.OneOf(fork_choices)
528 default_fork_create = v.OneOf(fork_choices)
529 default_inherit_default_permissions = v.OneOf(inherit_default_permissions_choices)
529 default_inherit_default_permissions = v.OneOf(inherit_default_permissions_choices)
530 return _DefaultPermissionsForm
530 return _DefaultPermissionsForm
531
531
532
532
533 def UserIndividualPermissionsForm(localizer):
533 def UserIndividualPermissionsForm(localizer):
534 _ = localizer
534 _ = localizer
535
535
536 class _DefaultPermissionsForm(formencode.Schema):
536 class _DefaultPermissionsForm(formencode.Schema):
537 allow_extra_fields = True
537 allow_extra_fields = True
538 filter_extra_fields = True
538 filter_extra_fields = True
539
539
540 inherit_default_permissions = v.StringBoolean(if_missing=False)
540 inherit_default_permissions = v.StringBoolean(if_missing=False)
541 return _DefaultPermissionsForm
541 return _DefaultPermissionsForm
542
542
543
543
544 def DefaultsForm(localizer, edit=False, old_data=None, supported_backends=BACKENDS.keys()):
544 def DefaultsForm(localizer, edit=False, old_data=None, supported_backends=BACKENDS.keys()):
545 _ = localizer
545 _ = localizer
546 old_data = old_data or {}
546 old_data = old_data or {}
547
547
548 class _DefaultsForm(formencode.Schema):
548 class _DefaultsForm(formencode.Schema):
549 allow_extra_fields = True
549 allow_extra_fields = True
550 filter_extra_fields = True
550 filter_extra_fields = True
551 default_repo_type = v.OneOf(supported_backends)
551 default_repo_type = v.OneOf(supported_backends)
552 default_repo_private = v.StringBoolean(if_missing=False)
552 default_repo_private = v.StringBoolean(if_missing=False)
553 default_repo_enable_statistics = v.StringBoolean(if_missing=False)
553 default_repo_enable_statistics = v.StringBoolean(if_missing=False)
554 default_repo_enable_downloads = v.StringBoolean(if_missing=False)
554 default_repo_enable_downloads = v.StringBoolean(if_missing=False)
555 default_repo_enable_locking = v.StringBoolean(if_missing=False)
555 default_repo_enable_locking = v.StringBoolean(if_missing=False)
556 return _DefaultsForm
556 return _DefaultsForm
557
557
558
558
559 def AuthSettingsForm(localizer):
559 def AuthSettingsForm(localizer):
560 _ = localizer
560 _ = localizer
561
561
562 class _AuthSettingsForm(formencode.Schema):
562 class _AuthSettingsForm(formencode.Schema):
563 allow_extra_fields = True
563 allow_extra_fields = True
564 filter_extra_fields = True
564 filter_extra_fields = True
565 auth_plugins = All(v.ValidAuthPlugins(localizer),
565 auth_plugins = All(v.ValidAuthPlugins(localizer),
566 v.UniqueListFromString(localizer)(not_empty=True))
566 v.UniqueListFromString(localizer)(not_empty=True))
567 return _AuthSettingsForm
567 return _AuthSettingsForm
568
568
569
569
570 def UserExtraEmailForm(localizer):
570 def UserExtraEmailForm(localizer):
571 _ = localizer
571 _ = localizer
572
572
573 class _UserExtraEmailForm(formencode.Schema):
573 class _UserExtraEmailForm(formencode.Schema):
574 email = All(v.UniqSystemEmail(localizer), v.Email(not_empty=True))
574 email = All(v.UniqSystemEmail(localizer), v.Email(not_empty=True))
575 return _UserExtraEmailForm
575 return _UserExtraEmailForm
576
576
577
577
578 def UserExtraIpForm(localizer):
578 def UserExtraIpForm(localizer):
579 _ = localizer
579 _ = localizer
580
580
581 class _UserExtraIpForm(formencode.Schema):
581 class _UserExtraIpForm(formencode.Schema):
582 ip = v.ValidIp(localizer)(not_empty=True)
582 ip = v.ValidIp(localizer)(not_empty=True)
583 return _UserExtraIpForm
583 return _UserExtraIpForm
584
584
585
585
586 def PullRequestForm(localizer, repo_id):
586 def PullRequestForm(localizer, repo_id):
587 _ = localizer
587 _ = localizer
588
588
589 class ReviewerForm(formencode.Schema):
589 class ReviewerForm(formencode.Schema):
590 user_id = v.Int(not_empty=True)
590 user_id = v.Int(not_empty=True)
591 reasons = All()
591 reasons = All()
592 rules = All(v.UniqueList(localizer, convert=int)())
592 rules = All(v.UniqueList(localizer, convert=int)())
593 mandatory = v.StringBoolean()
593 mandatory = v.StringBoolean()
594
594
595 class _PullRequestForm(formencode.Schema):
595 class _PullRequestForm(formencode.Schema):
596 allow_extra_fields = True
596 allow_extra_fields = True
597 filter_extra_fields = True
597 filter_extra_fields = True
598
598
599 common_ancestor = v.UnicodeString(strip=True, required=True)
599 common_ancestor = v.UnicodeString(strip=True, required=True)
600 source_repo = v.UnicodeString(strip=True, required=True)
600 source_repo = v.UnicodeString(strip=True, required=True)
601 source_ref = v.UnicodeString(strip=True, required=True)
601 source_ref = v.UnicodeString(strip=True, required=True)
602 target_repo = v.UnicodeString(strip=True, required=True)
602 target_repo = v.UnicodeString(strip=True, required=True)
603 target_ref = v.UnicodeString(strip=True, required=True)
603 target_ref = v.UnicodeString(strip=True, required=True)
604 revisions = All(#v.NotReviewedRevisions(localizer, repo_id)(),
604 revisions = All(#v.NotReviewedRevisions(localizer, repo_id)(),
605 v.UniqueList(localizer)(not_empty=True))
605 v.UniqueList(localizer)(not_empty=True))
606 review_members = formencode.ForEach(ReviewerForm())
606 review_members = formencode.ForEach(ReviewerForm())
607 pullrequest_title = v.UnicodeString(strip=True, required=True, min=3, max=255)
607 pullrequest_title = v.UnicodeString(strip=True, required=True, min=3, max=255)
608 pullrequest_desc = v.UnicodeString(strip=True, required=False)
608 pullrequest_desc = v.UnicodeString(strip=True, required=False)
609 description_renderer = v.UnicodeString(strip=True, required=False)
609
610
610 return _PullRequestForm
611 return _PullRequestForm
611
612
612
613
613 def IssueTrackerPatternsForm(localizer):
614 def IssueTrackerPatternsForm(localizer):
614 _ = localizer
615 _ = localizer
615
616
616 class _IssueTrackerPatternsForm(formencode.Schema):
617 class _IssueTrackerPatternsForm(formencode.Schema):
617 allow_extra_fields = True
618 allow_extra_fields = True
618 filter_extra_fields = False
619 filter_extra_fields = False
619 chained_validators = [v.ValidPattern(localizer)]
620 chained_validators = [v.ValidPattern(localizer)]
620 return _IssueTrackerPatternsForm
621 return _IssueTrackerPatternsForm
@@ -1,1701 +1,1704 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2018 RhodeCode GmbH
3 # Copyright (C) 2012-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 pull request model for RhodeCode
23 pull request model for RhodeCode
24 """
24 """
25
25
26
26
27 import json
27 import json
28 import logging
28 import logging
29 import datetime
29 import datetime
30 import urllib
30 import urllib
31 import collections
31 import collections
32
32
33 from pyramid.threadlocal import get_current_request
33 from pyramid.threadlocal import get_current_request
34
34
35 from rhodecode import events
35 from rhodecode import events
36 from rhodecode.translation import lazy_ugettext#, _
36 from rhodecode.translation import lazy_ugettext#, _
37 from rhodecode.lib import helpers as h, hooks_utils, diffs
37 from rhodecode.lib import helpers as h, hooks_utils, diffs
38 from rhodecode.lib import audit_logger
38 from rhodecode.lib import audit_logger
39 from rhodecode.lib.compat import OrderedDict
39 from rhodecode.lib.compat import OrderedDict
40 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
40 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
41 from rhodecode.lib.markup_renderer import (
41 from rhodecode.lib.markup_renderer import (
42 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
42 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
43 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
43 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
44 from rhodecode.lib.vcs.backends.base import (
44 from rhodecode.lib.vcs.backends.base import (
45 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
45 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
46 from rhodecode.lib.vcs.conf import settings as vcs_settings
46 from rhodecode.lib.vcs.conf import settings as vcs_settings
47 from rhodecode.lib.vcs.exceptions import (
47 from rhodecode.lib.vcs.exceptions import (
48 CommitDoesNotExistError, EmptyRepositoryError)
48 CommitDoesNotExistError, EmptyRepositoryError)
49 from rhodecode.model import BaseModel
49 from rhodecode.model import BaseModel
50 from rhodecode.model.changeset_status import ChangesetStatusModel
50 from rhodecode.model.changeset_status import ChangesetStatusModel
51 from rhodecode.model.comment import CommentsModel
51 from rhodecode.model.comment import CommentsModel
52 from rhodecode.model.db import (
52 from rhodecode.model.db import (
53 or_, PullRequest, PullRequestReviewers, ChangesetStatus,
53 or_, PullRequest, PullRequestReviewers, ChangesetStatus,
54 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
54 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
55 from rhodecode.model.meta import Session
55 from rhodecode.model.meta import Session
56 from rhodecode.model.notification import NotificationModel, \
56 from rhodecode.model.notification import NotificationModel, \
57 EmailNotificationModel
57 EmailNotificationModel
58 from rhodecode.model.scm import ScmModel
58 from rhodecode.model.scm import ScmModel
59 from rhodecode.model.settings import VcsSettingsModel
59 from rhodecode.model.settings import VcsSettingsModel
60
60
61
61
62 log = logging.getLogger(__name__)
62 log = logging.getLogger(__name__)
63
63
64
64
65 # Data structure to hold the response data when updating commits during a pull
65 # Data structure to hold the response data when updating commits during a pull
66 # request update.
66 # request update.
67 UpdateResponse = collections.namedtuple('UpdateResponse', [
67 UpdateResponse = collections.namedtuple('UpdateResponse', [
68 'executed', 'reason', 'new', 'old', 'changes',
68 'executed', 'reason', 'new', 'old', 'changes',
69 'source_changed', 'target_changed'])
69 'source_changed', 'target_changed'])
70
70
71
71
72 class PullRequestModel(BaseModel):
72 class PullRequestModel(BaseModel):
73
73
74 cls = PullRequest
74 cls = PullRequest
75
75
76 DIFF_CONTEXT = 3
76 DIFF_CONTEXT = 3
77
77
78 MERGE_STATUS_MESSAGES = {
78 MERGE_STATUS_MESSAGES = {
79 MergeFailureReason.NONE: lazy_ugettext(
79 MergeFailureReason.NONE: lazy_ugettext(
80 'This pull request can be automatically merged.'),
80 'This pull request can be automatically merged.'),
81 MergeFailureReason.UNKNOWN: lazy_ugettext(
81 MergeFailureReason.UNKNOWN: lazy_ugettext(
82 'This pull request cannot be merged because of an unhandled'
82 'This pull request cannot be merged because of an unhandled'
83 ' exception.'),
83 ' exception.'),
84 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
84 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
85 'This pull request cannot be merged because of merge conflicts.'),
85 'This pull request cannot be merged because of merge conflicts.'),
86 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
86 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
87 'This pull request could not be merged because push to target'
87 'This pull request could not be merged because push to target'
88 ' failed.'),
88 ' failed.'),
89 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
89 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
90 'This pull request cannot be merged because the target is not a'
90 'This pull request cannot be merged because the target is not a'
91 ' head.'),
91 ' head.'),
92 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
92 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
93 'This pull request cannot be merged because the source contains'
93 'This pull request cannot be merged because the source contains'
94 ' more branches than the target.'),
94 ' more branches than the target.'),
95 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
95 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
96 'This pull request cannot be merged because the target has'
96 'This pull request cannot be merged because the target has'
97 ' multiple heads.'),
97 ' multiple heads.'),
98 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
98 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
99 'This pull request cannot be merged because the target repository'
99 'This pull request cannot be merged because the target repository'
100 ' is locked.'),
100 ' is locked.'),
101 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
101 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
102 'This pull request cannot be merged because the target or the '
102 'This pull request cannot be merged because the target or the '
103 'source reference is missing.'),
103 'source reference is missing.'),
104 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
104 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
105 'This pull request cannot be merged because the target '
105 'This pull request cannot be merged because the target '
106 'reference is missing.'),
106 'reference is missing.'),
107 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
107 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
108 'This pull request cannot be merged because the source '
108 'This pull request cannot be merged because the source '
109 'reference is missing.'),
109 'reference is missing.'),
110 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
110 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
111 'This pull request cannot be merged because of conflicts related '
111 'This pull request cannot be merged because of conflicts related '
112 'to sub repositories.'),
112 'to sub repositories.'),
113 }
113 }
114
114
115 UPDATE_STATUS_MESSAGES = {
115 UPDATE_STATUS_MESSAGES = {
116 UpdateFailureReason.NONE: lazy_ugettext(
116 UpdateFailureReason.NONE: lazy_ugettext(
117 'Pull request update successful.'),
117 'Pull request update successful.'),
118 UpdateFailureReason.UNKNOWN: lazy_ugettext(
118 UpdateFailureReason.UNKNOWN: lazy_ugettext(
119 'Pull request update failed because of an unknown error.'),
119 'Pull request update failed because of an unknown error.'),
120 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
120 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
121 'No update needed because the source and target have not changed.'),
121 'No update needed because the source and target have not changed.'),
122 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
122 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
123 'Pull request cannot be updated because the reference type is '
123 'Pull request cannot be updated because the reference type is '
124 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
124 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
125 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
125 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
126 'This pull request cannot be updated because the target '
126 'This pull request cannot be updated because the target '
127 'reference is missing.'),
127 'reference is missing.'),
128 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
128 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
129 'This pull request cannot be updated because the source '
129 'This pull request cannot be updated because the source '
130 'reference is missing.'),
130 'reference is missing.'),
131 }
131 }
132
132
133 def __get_pull_request(self, pull_request):
133 def __get_pull_request(self, pull_request):
134 return self._get_instance((
134 return self._get_instance((
135 PullRequest, PullRequestVersion), pull_request)
135 PullRequest, PullRequestVersion), pull_request)
136
136
137 def _check_perms(self, perms, pull_request, user, api=False):
137 def _check_perms(self, perms, pull_request, user, api=False):
138 if not api:
138 if not api:
139 return h.HasRepoPermissionAny(*perms)(
139 return h.HasRepoPermissionAny(*perms)(
140 user=user, repo_name=pull_request.target_repo.repo_name)
140 user=user, repo_name=pull_request.target_repo.repo_name)
141 else:
141 else:
142 return h.HasRepoPermissionAnyApi(*perms)(
142 return h.HasRepoPermissionAnyApi(*perms)(
143 user=user, repo_name=pull_request.target_repo.repo_name)
143 user=user, repo_name=pull_request.target_repo.repo_name)
144
144
145 def check_user_read(self, pull_request, user, api=False):
145 def check_user_read(self, pull_request, user, api=False):
146 _perms = ('repository.admin', 'repository.write', 'repository.read',)
146 _perms = ('repository.admin', 'repository.write', 'repository.read',)
147 return self._check_perms(_perms, pull_request, user, api)
147 return self._check_perms(_perms, pull_request, user, api)
148
148
149 def check_user_merge(self, pull_request, user, api=False):
149 def check_user_merge(self, pull_request, user, api=False):
150 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
150 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
151 return self._check_perms(_perms, pull_request, user, api)
151 return self._check_perms(_perms, pull_request, user, api)
152
152
153 def check_user_update(self, pull_request, user, api=False):
153 def check_user_update(self, pull_request, user, api=False):
154 owner = user.user_id == pull_request.user_id
154 owner = user.user_id == pull_request.user_id
155 return self.check_user_merge(pull_request, user, api) or owner
155 return self.check_user_merge(pull_request, user, api) or owner
156
156
157 def check_user_delete(self, pull_request, user):
157 def check_user_delete(self, pull_request, user):
158 owner = user.user_id == pull_request.user_id
158 owner = user.user_id == pull_request.user_id
159 _perms = ('repository.admin',)
159 _perms = ('repository.admin',)
160 return self._check_perms(_perms, pull_request, user) or owner
160 return self._check_perms(_perms, pull_request, user) or owner
161
161
162 def check_user_change_status(self, pull_request, user, api=False):
162 def check_user_change_status(self, pull_request, user, api=False):
163 reviewer = user.user_id in [x.user_id for x in
163 reviewer = user.user_id in [x.user_id for x in
164 pull_request.reviewers]
164 pull_request.reviewers]
165 return self.check_user_update(pull_request, user, api) or reviewer
165 return self.check_user_update(pull_request, user, api) or reviewer
166
166
167 def check_user_comment(self, pull_request, user):
167 def check_user_comment(self, pull_request, user):
168 owner = user.user_id == pull_request.user_id
168 owner = user.user_id == pull_request.user_id
169 return self.check_user_read(pull_request, user) or owner
169 return self.check_user_read(pull_request, user) or owner
170
170
171 def get(self, pull_request):
171 def get(self, pull_request):
172 return self.__get_pull_request(pull_request)
172 return self.__get_pull_request(pull_request)
173
173
174 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
174 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
175 opened_by=None, order_by=None,
175 opened_by=None, order_by=None,
176 order_dir='desc'):
176 order_dir='desc'):
177 repo = None
177 repo = None
178 if repo_name:
178 if repo_name:
179 repo = self._get_repo(repo_name)
179 repo = self._get_repo(repo_name)
180
180
181 q = PullRequest.query()
181 q = PullRequest.query()
182
182
183 # source or target
183 # source or target
184 if repo and source:
184 if repo and source:
185 q = q.filter(PullRequest.source_repo == repo)
185 q = q.filter(PullRequest.source_repo == repo)
186 elif repo:
186 elif repo:
187 q = q.filter(PullRequest.target_repo == repo)
187 q = q.filter(PullRequest.target_repo == repo)
188
188
189 # closed,opened
189 # closed,opened
190 if statuses:
190 if statuses:
191 q = q.filter(PullRequest.status.in_(statuses))
191 q = q.filter(PullRequest.status.in_(statuses))
192
192
193 # opened by filter
193 # opened by filter
194 if opened_by:
194 if opened_by:
195 q = q.filter(PullRequest.user_id.in_(opened_by))
195 q = q.filter(PullRequest.user_id.in_(opened_by))
196
196
197 if order_by:
197 if order_by:
198 order_map = {
198 order_map = {
199 'name_raw': PullRequest.pull_request_id,
199 'name_raw': PullRequest.pull_request_id,
200 'title': PullRequest.title,
200 'title': PullRequest.title,
201 'updated_on_raw': PullRequest.updated_on,
201 'updated_on_raw': PullRequest.updated_on,
202 'target_repo': PullRequest.target_repo_id
202 'target_repo': PullRequest.target_repo_id
203 }
203 }
204 if order_dir == 'asc':
204 if order_dir == 'asc':
205 q = q.order_by(order_map[order_by].asc())
205 q = q.order_by(order_map[order_by].asc())
206 else:
206 else:
207 q = q.order_by(order_map[order_by].desc())
207 q = q.order_by(order_map[order_by].desc())
208
208
209 return q
209 return q
210
210
211 def count_all(self, repo_name, source=False, statuses=None,
211 def count_all(self, repo_name, source=False, statuses=None,
212 opened_by=None):
212 opened_by=None):
213 """
213 """
214 Count the number of pull requests for a specific repository.
214 Count the number of pull requests for a specific repository.
215
215
216 :param repo_name: target or source repo
216 :param repo_name: target or source repo
217 :param source: boolean flag to specify if repo_name refers to source
217 :param source: boolean flag to specify if repo_name refers to source
218 :param statuses: list of pull request statuses
218 :param statuses: list of pull request statuses
219 :param opened_by: author user of the pull request
219 :param opened_by: author user of the pull request
220 :returns: int number of pull requests
220 :returns: int number of pull requests
221 """
221 """
222 q = self._prepare_get_all_query(
222 q = self._prepare_get_all_query(
223 repo_name, source=source, statuses=statuses, opened_by=opened_by)
223 repo_name, source=source, statuses=statuses, opened_by=opened_by)
224
224
225 return q.count()
225 return q.count()
226
226
227 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
227 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
228 offset=0, length=None, order_by=None, order_dir='desc'):
228 offset=0, length=None, order_by=None, order_dir='desc'):
229 """
229 """
230 Get all pull requests for a specific repository.
230 Get all pull requests for a specific repository.
231
231
232 :param repo_name: target or source repo
232 :param repo_name: target or source repo
233 :param source: boolean flag to specify if repo_name refers to source
233 :param source: boolean flag to specify if repo_name refers to source
234 :param statuses: list of pull request statuses
234 :param statuses: list of pull request statuses
235 :param opened_by: author user of the pull request
235 :param opened_by: author user of the pull request
236 :param offset: pagination offset
236 :param offset: pagination offset
237 :param length: length of returned list
237 :param length: length of returned list
238 :param order_by: order of the returned list
238 :param order_by: order of the returned list
239 :param order_dir: 'asc' or 'desc' ordering direction
239 :param order_dir: 'asc' or 'desc' ordering direction
240 :returns: list of pull requests
240 :returns: list of pull requests
241 """
241 """
242 q = self._prepare_get_all_query(
242 q = self._prepare_get_all_query(
243 repo_name, source=source, statuses=statuses, opened_by=opened_by,
243 repo_name, source=source, statuses=statuses, opened_by=opened_by,
244 order_by=order_by, order_dir=order_dir)
244 order_by=order_by, order_dir=order_dir)
245
245
246 if length:
246 if length:
247 pull_requests = q.limit(length).offset(offset).all()
247 pull_requests = q.limit(length).offset(offset).all()
248 else:
248 else:
249 pull_requests = q.all()
249 pull_requests = q.all()
250
250
251 return pull_requests
251 return pull_requests
252
252
253 def count_awaiting_review(self, repo_name, source=False, statuses=None,
253 def count_awaiting_review(self, repo_name, source=False, statuses=None,
254 opened_by=None):
254 opened_by=None):
255 """
255 """
256 Count the number of pull requests for a specific repository that are
256 Count the number of pull requests for a specific repository that are
257 awaiting review.
257 awaiting review.
258
258
259 :param repo_name: target or source repo
259 :param repo_name: target or source repo
260 :param source: boolean flag to specify if repo_name refers to source
260 :param source: boolean flag to specify if repo_name refers to source
261 :param statuses: list of pull request statuses
261 :param statuses: list of pull request statuses
262 :param opened_by: author user of the pull request
262 :param opened_by: author user of the pull request
263 :returns: int number of pull requests
263 :returns: int number of pull requests
264 """
264 """
265 pull_requests = self.get_awaiting_review(
265 pull_requests = self.get_awaiting_review(
266 repo_name, source=source, statuses=statuses, opened_by=opened_by)
266 repo_name, source=source, statuses=statuses, opened_by=opened_by)
267
267
268 return len(pull_requests)
268 return len(pull_requests)
269
269
270 def get_awaiting_review(self, repo_name, source=False, statuses=None,
270 def get_awaiting_review(self, repo_name, source=False, statuses=None,
271 opened_by=None, offset=0, length=None,
271 opened_by=None, offset=0, length=None,
272 order_by=None, order_dir='desc'):
272 order_by=None, order_dir='desc'):
273 """
273 """
274 Get all pull requests for a specific repository that are awaiting
274 Get all pull requests for a specific repository that are awaiting
275 review.
275 review.
276
276
277 :param repo_name: target or source repo
277 :param repo_name: target or source repo
278 :param source: boolean flag to specify if repo_name refers to source
278 :param source: boolean flag to specify if repo_name refers to source
279 :param statuses: list of pull request statuses
279 :param statuses: list of pull request statuses
280 :param opened_by: author user of the pull request
280 :param opened_by: author user of the pull request
281 :param offset: pagination offset
281 :param offset: pagination offset
282 :param length: length of returned list
282 :param length: length of returned list
283 :param order_by: order of the returned list
283 :param order_by: order of the returned list
284 :param order_dir: 'asc' or 'desc' ordering direction
284 :param order_dir: 'asc' or 'desc' ordering direction
285 :returns: list of pull requests
285 :returns: list of pull requests
286 """
286 """
287 pull_requests = self.get_all(
287 pull_requests = self.get_all(
288 repo_name, source=source, statuses=statuses, opened_by=opened_by,
288 repo_name, source=source, statuses=statuses, opened_by=opened_by,
289 order_by=order_by, order_dir=order_dir)
289 order_by=order_by, order_dir=order_dir)
290
290
291 _filtered_pull_requests = []
291 _filtered_pull_requests = []
292 for pr in pull_requests:
292 for pr in pull_requests:
293 status = pr.calculated_review_status()
293 status = pr.calculated_review_status()
294 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
294 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
295 ChangesetStatus.STATUS_UNDER_REVIEW]:
295 ChangesetStatus.STATUS_UNDER_REVIEW]:
296 _filtered_pull_requests.append(pr)
296 _filtered_pull_requests.append(pr)
297 if length:
297 if length:
298 return _filtered_pull_requests[offset:offset+length]
298 return _filtered_pull_requests[offset:offset+length]
299 else:
299 else:
300 return _filtered_pull_requests
300 return _filtered_pull_requests
301
301
302 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
302 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
303 opened_by=None, user_id=None):
303 opened_by=None, user_id=None):
304 """
304 """
305 Count the number of pull requests for a specific repository that are
305 Count the number of pull requests for a specific repository that are
306 awaiting review from a specific user.
306 awaiting review from a specific user.
307
307
308 :param repo_name: target or source repo
308 :param repo_name: target or source repo
309 :param source: boolean flag to specify if repo_name refers to source
309 :param source: boolean flag to specify if repo_name refers to source
310 :param statuses: list of pull request statuses
310 :param statuses: list of pull request statuses
311 :param opened_by: author user of the pull request
311 :param opened_by: author user of the pull request
312 :param user_id: reviewer user of the pull request
312 :param user_id: reviewer user of the pull request
313 :returns: int number of pull requests
313 :returns: int number of pull requests
314 """
314 """
315 pull_requests = self.get_awaiting_my_review(
315 pull_requests = self.get_awaiting_my_review(
316 repo_name, source=source, statuses=statuses, opened_by=opened_by,
316 repo_name, source=source, statuses=statuses, opened_by=opened_by,
317 user_id=user_id)
317 user_id=user_id)
318
318
319 return len(pull_requests)
319 return len(pull_requests)
320
320
321 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
321 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
322 opened_by=None, user_id=None, offset=0,
322 opened_by=None, user_id=None, offset=0,
323 length=None, order_by=None, order_dir='desc'):
323 length=None, order_by=None, order_dir='desc'):
324 """
324 """
325 Get all pull requests for a specific repository that are awaiting
325 Get all pull requests for a specific repository that are awaiting
326 review from a specific user.
326 review from a specific user.
327
327
328 :param repo_name: target or source repo
328 :param repo_name: target or source repo
329 :param source: boolean flag to specify if repo_name refers to source
329 :param source: boolean flag to specify if repo_name refers to source
330 :param statuses: list of pull request statuses
330 :param statuses: list of pull request statuses
331 :param opened_by: author user of the pull request
331 :param opened_by: author user of the pull request
332 :param user_id: reviewer user of the pull request
332 :param user_id: reviewer user of the pull request
333 :param offset: pagination offset
333 :param offset: pagination offset
334 :param length: length of returned list
334 :param length: length of returned list
335 :param order_by: order of the returned list
335 :param order_by: order of the returned list
336 :param order_dir: 'asc' or 'desc' ordering direction
336 :param order_dir: 'asc' or 'desc' ordering direction
337 :returns: list of pull requests
337 :returns: list of pull requests
338 """
338 """
339 pull_requests = self.get_all(
339 pull_requests = self.get_all(
340 repo_name, source=source, statuses=statuses, opened_by=opened_by,
340 repo_name, source=source, statuses=statuses, opened_by=opened_by,
341 order_by=order_by, order_dir=order_dir)
341 order_by=order_by, order_dir=order_dir)
342
342
343 _my = PullRequestModel().get_not_reviewed(user_id)
343 _my = PullRequestModel().get_not_reviewed(user_id)
344 my_participation = []
344 my_participation = []
345 for pr in pull_requests:
345 for pr in pull_requests:
346 if pr in _my:
346 if pr in _my:
347 my_participation.append(pr)
347 my_participation.append(pr)
348 _filtered_pull_requests = my_participation
348 _filtered_pull_requests = my_participation
349 if length:
349 if length:
350 return _filtered_pull_requests[offset:offset+length]
350 return _filtered_pull_requests[offset:offset+length]
351 else:
351 else:
352 return _filtered_pull_requests
352 return _filtered_pull_requests
353
353
354 def get_not_reviewed(self, user_id):
354 def get_not_reviewed(self, user_id):
355 return [
355 return [
356 x.pull_request for x in PullRequestReviewers.query().filter(
356 x.pull_request for x in PullRequestReviewers.query().filter(
357 PullRequestReviewers.user_id == user_id).all()
357 PullRequestReviewers.user_id == user_id).all()
358 ]
358 ]
359
359
360 def _prepare_participating_query(self, user_id=None, statuses=None,
360 def _prepare_participating_query(self, user_id=None, statuses=None,
361 order_by=None, order_dir='desc'):
361 order_by=None, order_dir='desc'):
362 q = PullRequest.query()
362 q = PullRequest.query()
363 if user_id:
363 if user_id:
364 reviewers_subquery = Session().query(
364 reviewers_subquery = Session().query(
365 PullRequestReviewers.pull_request_id).filter(
365 PullRequestReviewers.pull_request_id).filter(
366 PullRequestReviewers.user_id == user_id).subquery()
366 PullRequestReviewers.user_id == user_id).subquery()
367 user_filter = or_(
367 user_filter = or_(
368 PullRequest.user_id == user_id,
368 PullRequest.user_id == user_id,
369 PullRequest.pull_request_id.in_(reviewers_subquery)
369 PullRequest.pull_request_id.in_(reviewers_subquery)
370 )
370 )
371 q = PullRequest.query().filter(user_filter)
371 q = PullRequest.query().filter(user_filter)
372
372
373 # closed,opened
373 # closed,opened
374 if statuses:
374 if statuses:
375 q = q.filter(PullRequest.status.in_(statuses))
375 q = q.filter(PullRequest.status.in_(statuses))
376
376
377 if order_by:
377 if order_by:
378 order_map = {
378 order_map = {
379 'name_raw': PullRequest.pull_request_id,
379 'name_raw': PullRequest.pull_request_id,
380 'title': PullRequest.title,
380 'title': PullRequest.title,
381 'updated_on_raw': PullRequest.updated_on,
381 'updated_on_raw': PullRequest.updated_on,
382 'target_repo': PullRequest.target_repo_id
382 'target_repo': PullRequest.target_repo_id
383 }
383 }
384 if order_dir == 'asc':
384 if order_dir == 'asc':
385 q = q.order_by(order_map[order_by].asc())
385 q = q.order_by(order_map[order_by].asc())
386 else:
386 else:
387 q = q.order_by(order_map[order_by].desc())
387 q = q.order_by(order_map[order_by].desc())
388
388
389 return q
389 return q
390
390
391 def count_im_participating_in(self, user_id=None, statuses=None):
391 def count_im_participating_in(self, user_id=None, statuses=None):
392 q = self._prepare_participating_query(user_id, statuses=statuses)
392 q = self._prepare_participating_query(user_id, statuses=statuses)
393 return q.count()
393 return q.count()
394
394
395 def get_im_participating_in(
395 def get_im_participating_in(
396 self, user_id=None, statuses=None, offset=0,
396 self, user_id=None, statuses=None, offset=0,
397 length=None, order_by=None, order_dir='desc'):
397 length=None, order_by=None, order_dir='desc'):
398 """
398 """
399 Get all Pull requests that i'm participating in, or i have opened
399 Get all Pull requests that i'm participating in, or i have opened
400 """
400 """
401
401
402 q = self._prepare_participating_query(
402 q = self._prepare_participating_query(
403 user_id, statuses=statuses, order_by=order_by,
403 user_id, statuses=statuses, order_by=order_by,
404 order_dir=order_dir)
404 order_dir=order_dir)
405
405
406 if length:
406 if length:
407 pull_requests = q.limit(length).offset(offset).all()
407 pull_requests = q.limit(length).offset(offset).all()
408 else:
408 else:
409 pull_requests = q.all()
409 pull_requests = q.all()
410
410
411 return pull_requests
411 return pull_requests
412
412
413 def get_versions(self, pull_request):
413 def get_versions(self, pull_request):
414 """
414 """
415 returns version of pull request sorted by ID descending
415 returns version of pull request sorted by ID descending
416 """
416 """
417 return PullRequestVersion.query()\
417 return PullRequestVersion.query()\
418 .filter(PullRequestVersion.pull_request == pull_request)\
418 .filter(PullRequestVersion.pull_request == pull_request)\
419 .order_by(PullRequestVersion.pull_request_version_id.asc())\
419 .order_by(PullRequestVersion.pull_request_version_id.asc())\
420 .all()
420 .all()
421
421
422 def get_pr_version(self, pull_request_id, version=None):
422 def get_pr_version(self, pull_request_id, version=None):
423 at_version = None
423 at_version = None
424
424
425 if version and version == 'latest':
425 if version and version == 'latest':
426 pull_request_ver = PullRequest.get(pull_request_id)
426 pull_request_ver = PullRequest.get(pull_request_id)
427 pull_request_obj = pull_request_ver
427 pull_request_obj = pull_request_ver
428 _org_pull_request_obj = pull_request_obj
428 _org_pull_request_obj = pull_request_obj
429 at_version = 'latest'
429 at_version = 'latest'
430 elif version:
430 elif version:
431 pull_request_ver = PullRequestVersion.get_or_404(version)
431 pull_request_ver = PullRequestVersion.get_or_404(version)
432 pull_request_obj = pull_request_ver
432 pull_request_obj = pull_request_ver
433 _org_pull_request_obj = pull_request_ver.pull_request
433 _org_pull_request_obj = pull_request_ver.pull_request
434 at_version = pull_request_ver.pull_request_version_id
434 at_version = pull_request_ver.pull_request_version_id
435 else:
435 else:
436 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
436 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
437 pull_request_id)
437 pull_request_id)
438
438
439 pull_request_display_obj = PullRequest.get_pr_display_object(
439 pull_request_display_obj = PullRequest.get_pr_display_object(
440 pull_request_obj, _org_pull_request_obj)
440 pull_request_obj, _org_pull_request_obj)
441
441
442 return _org_pull_request_obj, pull_request_obj, \
442 return _org_pull_request_obj, pull_request_obj, \
443 pull_request_display_obj, at_version
443 pull_request_display_obj, at_version
444
444
445 def create(self, created_by, source_repo, source_ref, target_repo,
445 def create(self, created_by, source_repo, source_ref, target_repo,
446 target_ref, revisions, reviewers, title, description=None,
446 target_ref, revisions, reviewers, title, description=None,
447 description_renderer=None,
447 reviewer_data=None, translator=None, auth_user=None):
448 reviewer_data=None, translator=None, auth_user=None):
448 translator = translator or get_current_request().translate
449 translator = translator or get_current_request().translate
449
450
450 created_by_user = self._get_user(created_by)
451 created_by_user = self._get_user(created_by)
451 auth_user = auth_user or created_by_user
452 auth_user = auth_user or created_by_user
452 source_repo = self._get_repo(source_repo)
453 source_repo = self._get_repo(source_repo)
453 target_repo = self._get_repo(target_repo)
454 target_repo = self._get_repo(target_repo)
454
455
455 pull_request = PullRequest()
456 pull_request = PullRequest()
456 pull_request.source_repo = source_repo
457 pull_request.source_repo = source_repo
457 pull_request.source_ref = source_ref
458 pull_request.source_ref = source_ref
458 pull_request.target_repo = target_repo
459 pull_request.target_repo = target_repo
459 pull_request.target_ref = target_ref
460 pull_request.target_ref = target_ref
460 pull_request.revisions = revisions
461 pull_request.revisions = revisions
461 pull_request.title = title
462 pull_request.title = title
462 pull_request.description = description
463 pull_request.description = description
464 pull_request.description_renderer = description_renderer
463 pull_request.author = created_by_user
465 pull_request.author = created_by_user
464 pull_request.reviewer_data = reviewer_data
466 pull_request.reviewer_data = reviewer_data
465
467
466 Session().add(pull_request)
468 Session().add(pull_request)
467 Session().flush()
469 Session().flush()
468
470
469 reviewer_ids = set()
471 reviewer_ids = set()
470 # members / reviewers
472 # members / reviewers
471 for reviewer_object in reviewers:
473 for reviewer_object in reviewers:
472 user_id, reasons, mandatory, rules = reviewer_object
474 user_id, reasons, mandatory, rules = reviewer_object
473 user = self._get_user(user_id)
475 user = self._get_user(user_id)
474
476
475 # skip duplicates
477 # skip duplicates
476 if user.user_id in reviewer_ids:
478 if user.user_id in reviewer_ids:
477 continue
479 continue
478
480
479 reviewer_ids.add(user.user_id)
481 reviewer_ids.add(user.user_id)
480
482
481 reviewer = PullRequestReviewers()
483 reviewer = PullRequestReviewers()
482 reviewer.user = user
484 reviewer.user = user
483 reviewer.pull_request = pull_request
485 reviewer.pull_request = pull_request
484 reviewer.reasons = reasons
486 reviewer.reasons = reasons
485 reviewer.mandatory = mandatory
487 reviewer.mandatory = mandatory
486
488
487 # NOTE(marcink): pick only first rule for now
489 # NOTE(marcink): pick only first rule for now
488 rule_id = list(rules)[0] if rules else None
490 rule_id = list(rules)[0] if rules else None
489 rule = RepoReviewRule.get(rule_id) if rule_id else None
491 rule = RepoReviewRule.get(rule_id) if rule_id else None
490 if rule:
492 if rule:
491 review_group = rule.user_group_vote_rule()
493 review_group = rule.user_group_vote_rule()
492 if review_group:
494 if review_group:
493 # NOTE(marcink):
495 # NOTE(marcink):
494 # again, can be that user is member of more,
496 # again, can be that user is member of more,
495 # but we pick the first same, as default reviewers algo
497 # but we pick the first same, as default reviewers algo
496 review_group = review_group[0]
498 review_group = review_group[0]
497
499
498 rule_data = {
500 rule_data = {
499 'rule_name':
501 'rule_name':
500 rule.review_rule_name,
502 rule.review_rule_name,
501 'rule_user_group_entry_id':
503 'rule_user_group_entry_id':
502 review_group.repo_review_rule_users_group_id,
504 review_group.repo_review_rule_users_group_id,
503 'rule_user_group_name':
505 'rule_user_group_name':
504 review_group.users_group.users_group_name,
506 review_group.users_group.users_group_name,
505 'rule_user_group_members':
507 'rule_user_group_members':
506 [x.user.username for x in review_group.users_group.members],
508 [x.user.username for x in review_group.users_group.members],
507 }
509 }
508 # e.g {'vote_rule': -1, 'mandatory': True}
510 # e.g {'vote_rule': -1, 'mandatory': True}
509 rule_data.update(review_group.rule_data())
511 rule_data.update(review_group.rule_data())
510
512
511 reviewer.rule_data = rule_data
513 reviewer.rule_data = rule_data
512
514
513 Session().add(reviewer)
515 Session().add(reviewer)
514 Session().flush()
516 Session().flush()
515
517
516 # Set approval status to "Under Review" for all commits which are
518 # Set approval status to "Under Review" for all commits which are
517 # part of this pull request.
519 # part of this pull request.
518 ChangesetStatusModel().set_status(
520 ChangesetStatusModel().set_status(
519 repo=target_repo,
521 repo=target_repo,
520 status=ChangesetStatus.STATUS_UNDER_REVIEW,
522 status=ChangesetStatus.STATUS_UNDER_REVIEW,
521 user=created_by_user,
523 user=created_by_user,
522 pull_request=pull_request
524 pull_request=pull_request
523 )
525 )
524 # we commit early at this point. This has to do with a fact
526 # we commit early at this point. This has to do with a fact
525 # that before queries do some row-locking. And because of that
527 # that before queries do some row-locking. And because of that
526 # we need to commit and finish transation before below validate call
528 # we need to commit and finish transation before below validate call
527 # that for large repos could be long resulting in long row locks
529 # that for large repos could be long resulting in long row locks
528 Session().commit()
530 Session().commit()
529
531
530 # prepare workspace, and run initial merge simulation
532 # prepare workspace, and run initial merge simulation
531 MergeCheck.validate(
533 MergeCheck.validate(
532 pull_request, user=created_by_user, translator=translator)
534 pull_request, user=created_by_user, translator=translator)
533
535
534 self.notify_reviewers(pull_request, reviewer_ids)
536 self.notify_reviewers(pull_request, reviewer_ids)
535 self._trigger_pull_request_hook(
537 self._trigger_pull_request_hook(
536 pull_request, created_by_user, 'create')
538 pull_request, created_by_user, 'create')
537
539
538 creation_data = pull_request.get_api_data(with_merge_state=False)
540 creation_data = pull_request.get_api_data(with_merge_state=False)
539 self._log_audit_action(
541 self._log_audit_action(
540 'repo.pull_request.create', {'data': creation_data},
542 'repo.pull_request.create', {'data': creation_data},
541 auth_user, pull_request)
543 auth_user, pull_request)
542
544
543 return pull_request
545 return pull_request
544
546
545 def _trigger_pull_request_hook(self, pull_request, user, action):
547 def _trigger_pull_request_hook(self, pull_request, user, action):
546 pull_request = self.__get_pull_request(pull_request)
548 pull_request = self.__get_pull_request(pull_request)
547 target_scm = pull_request.target_repo.scm_instance()
549 target_scm = pull_request.target_repo.scm_instance()
548 if action == 'create':
550 if action == 'create':
549 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
551 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
550 elif action == 'merge':
552 elif action == 'merge':
551 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
553 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
552 elif action == 'close':
554 elif action == 'close':
553 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
555 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
554 elif action == 'review_status_change':
556 elif action == 'review_status_change':
555 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
557 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
556 elif action == 'update':
558 elif action == 'update':
557 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
559 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
558 else:
560 else:
559 return
561 return
560
562
561 trigger_hook(
563 trigger_hook(
562 username=user.username,
564 username=user.username,
563 repo_name=pull_request.target_repo.repo_name,
565 repo_name=pull_request.target_repo.repo_name,
564 repo_alias=target_scm.alias,
566 repo_alias=target_scm.alias,
565 pull_request=pull_request)
567 pull_request=pull_request)
566
568
567 def _get_commit_ids(self, pull_request):
569 def _get_commit_ids(self, pull_request):
568 """
570 """
569 Return the commit ids of the merged pull request.
571 Return the commit ids of the merged pull request.
570
572
571 This method is not dealing correctly yet with the lack of autoupdates
573 This method is not dealing correctly yet with the lack of autoupdates
572 nor with the implicit target updates.
574 nor with the implicit target updates.
573 For example: if a commit in the source repo is already in the target it
575 For example: if a commit in the source repo is already in the target it
574 will be reported anyways.
576 will be reported anyways.
575 """
577 """
576 merge_rev = pull_request.merge_rev
578 merge_rev = pull_request.merge_rev
577 if merge_rev is None:
579 if merge_rev is None:
578 raise ValueError('This pull request was not merged yet')
580 raise ValueError('This pull request was not merged yet')
579
581
580 commit_ids = list(pull_request.revisions)
582 commit_ids = list(pull_request.revisions)
581 if merge_rev not in commit_ids:
583 if merge_rev not in commit_ids:
582 commit_ids.append(merge_rev)
584 commit_ids.append(merge_rev)
583
585
584 return commit_ids
586 return commit_ids
585
587
586 def merge_repo(self, pull_request, user, extras):
588 def merge_repo(self, pull_request, user, extras):
587 log.debug("Merging pull request %s", pull_request.pull_request_id)
589 log.debug("Merging pull request %s", pull_request.pull_request_id)
588 merge_state = self._merge_pull_request(pull_request, user, extras)
590 merge_state = self._merge_pull_request(pull_request, user, extras)
589 if merge_state.executed:
591 if merge_state.executed:
590 log.debug(
592 log.debug(
591 "Merge was successful, updating the pull request comments.")
593 "Merge was successful, updating the pull request comments.")
592 self._comment_and_close_pr(pull_request, user, merge_state)
594 self._comment_and_close_pr(pull_request, user, merge_state)
593
595
594 self._log_audit_action(
596 self._log_audit_action(
595 'repo.pull_request.merge',
597 'repo.pull_request.merge',
596 {'merge_state': merge_state.__dict__},
598 {'merge_state': merge_state.__dict__},
597 user, pull_request)
599 user, pull_request)
598
600
599 else:
601 else:
600 log.warn("Merge failed, not updating the pull request.")
602 log.warn("Merge failed, not updating the pull request.")
601 return merge_state
603 return merge_state
602
604
603 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
605 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
604 target_vcs = pull_request.target_repo.scm_instance()
606 target_vcs = pull_request.target_repo.scm_instance()
605 source_vcs = pull_request.source_repo.scm_instance()
607 source_vcs = pull_request.source_repo.scm_instance()
606 target_ref = self._refresh_reference(
608 target_ref = self._refresh_reference(
607 pull_request.target_ref_parts, target_vcs)
609 pull_request.target_ref_parts, target_vcs)
608
610
609 message = merge_msg or (
611 message = merge_msg or (
610 'Merge pull request #%(pr_id)s from '
612 'Merge pull request #%(pr_id)s from '
611 '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % {
613 '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % {
612 'pr_id': pull_request.pull_request_id,
614 'pr_id': pull_request.pull_request_id,
613 'source_repo': source_vcs.name,
615 'source_repo': source_vcs.name,
614 'source_ref_name': pull_request.source_ref_parts.name,
616 'source_ref_name': pull_request.source_ref_parts.name,
615 'pr_title': pull_request.title
617 'pr_title': pull_request.title
616 }
618 }
617
619
618 workspace_id = self._workspace_id(pull_request)
620 workspace_id = self._workspace_id(pull_request)
619 repo_id = pull_request.target_repo.repo_id
621 repo_id = pull_request.target_repo.repo_id
620 use_rebase = self._use_rebase_for_merging(pull_request)
622 use_rebase = self._use_rebase_for_merging(pull_request)
621 close_branch = self._close_branch_before_merging(pull_request)
623 close_branch = self._close_branch_before_merging(pull_request)
622
624
623 callback_daemon, extras = prepare_callback_daemon(
625 callback_daemon, extras = prepare_callback_daemon(
624 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
626 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
625 host=vcs_settings.HOOKS_HOST,
627 host=vcs_settings.HOOKS_HOST,
626 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
628 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
627
629
628 with callback_daemon:
630 with callback_daemon:
629 # TODO: johbo: Implement a clean way to run a config_override
631 # TODO: johbo: Implement a clean way to run a config_override
630 # for a single call.
632 # for a single call.
631 target_vcs.config.set(
633 target_vcs.config.set(
632 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
634 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
633 merge_state = target_vcs.merge(
635 merge_state = target_vcs.merge(
634 repo_id, workspace_id, target_ref, source_vcs,
636 repo_id, workspace_id, target_ref, source_vcs,
635 pull_request.source_ref_parts,
637 pull_request.source_ref_parts,
636 user_name=user.username, user_email=user.email,
638 user_name=user.username, user_email=user.email,
637 message=message, use_rebase=use_rebase,
639 message=message, use_rebase=use_rebase,
638 close_branch=close_branch)
640 close_branch=close_branch)
639 return merge_state
641 return merge_state
640
642
641 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
643 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
642 pull_request.merge_rev = merge_state.merge_ref.commit_id
644 pull_request.merge_rev = merge_state.merge_ref.commit_id
643 pull_request.updated_on = datetime.datetime.now()
645 pull_request.updated_on = datetime.datetime.now()
644 close_msg = close_msg or 'Pull request merged and closed'
646 close_msg = close_msg or 'Pull request merged and closed'
645
647
646 CommentsModel().create(
648 CommentsModel().create(
647 text=safe_unicode(close_msg),
649 text=safe_unicode(close_msg),
648 repo=pull_request.target_repo.repo_id,
650 repo=pull_request.target_repo.repo_id,
649 user=user.user_id,
651 user=user.user_id,
650 pull_request=pull_request.pull_request_id,
652 pull_request=pull_request.pull_request_id,
651 f_path=None,
653 f_path=None,
652 line_no=None,
654 line_no=None,
653 closing_pr=True
655 closing_pr=True
654 )
656 )
655
657
656 Session().add(pull_request)
658 Session().add(pull_request)
657 Session().flush()
659 Session().flush()
658 # TODO: paris: replace invalidation with less radical solution
660 # TODO: paris: replace invalidation with less radical solution
659 ScmModel().mark_for_invalidation(
661 ScmModel().mark_for_invalidation(
660 pull_request.target_repo.repo_name)
662 pull_request.target_repo.repo_name)
661 self._trigger_pull_request_hook(pull_request, user, 'merge')
663 self._trigger_pull_request_hook(pull_request, user, 'merge')
662
664
663 def has_valid_update_type(self, pull_request):
665 def has_valid_update_type(self, pull_request):
664 source_ref_type = pull_request.source_ref_parts.type
666 source_ref_type = pull_request.source_ref_parts.type
665 return source_ref_type in ['book', 'branch', 'tag']
667 return source_ref_type in ['book', 'branch', 'tag']
666
668
667 def update_commits(self, pull_request):
669 def update_commits(self, pull_request):
668 """
670 """
669 Get the updated list of commits for the pull request
671 Get the updated list of commits for the pull request
670 and return the new pull request version and the list
672 and return the new pull request version and the list
671 of commits processed by this update action
673 of commits processed by this update action
672 """
674 """
673 pull_request = self.__get_pull_request(pull_request)
675 pull_request = self.__get_pull_request(pull_request)
674 source_ref_type = pull_request.source_ref_parts.type
676 source_ref_type = pull_request.source_ref_parts.type
675 source_ref_name = pull_request.source_ref_parts.name
677 source_ref_name = pull_request.source_ref_parts.name
676 source_ref_id = pull_request.source_ref_parts.commit_id
678 source_ref_id = pull_request.source_ref_parts.commit_id
677
679
678 target_ref_type = pull_request.target_ref_parts.type
680 target_ref_type = pull_request.target_ref_parts.type
679 target_ref_name = pull_request.target_ref_parts.name
681 target_ref_name = pull_request.target_ref_parts.name
680 target_ref_id = pull_request.target_ref_parts.commit_id
682 target_ref_id = pull_request.target_ref_parts.commit_id
681
683
682 if not self.has_valid_update_type(pull_request):
684 if not self.has_valid_update_type(pull_request):
683 log.debug(
685 log.debug(
684 "Skipping update of pull request %s due to ref type: %s",
686 "Skipping update of pull request %s due to ref type: %s",
685 pull_request, source_ref_type)
687 pull_request, source_ref_type)
686 return UpdateResponse(
688 return UpdateResponse(
687 executed=False,
689 executed=False,
688 reason=UpdateFailureReason.WRONG_REF_TYPE,
690 reason=UpdateFailureReason.WRONG_REF_TYPE,
689 old=pull_request, new=None, changes=None,
691 old=pull_request, new=None, changes=None,
690 source_changed=False, target_changed=False)
692 source_changed=False, target_changed=False)
691
693
692 # source repo
694 # source repo
693 source_repo = pull_request.source_repo.scm_instance()
695 source_repo = pull_request.source_repo.scm_instance()
694 try:
696 try:
695 source_commit = source_repo.get_commit(commit_id=source_ref_name)
697 source_commit = source_repo.get_commit(commit_id=source_ref_name)
696 except CommitDoesNotExistError:
698 except CommitDoesNotExistError:
697 return UpdateResponse(
699 return UpdateResponse(
698 executed=False,
700 executed=False,
699 reason=UpdateFailureReason.MISSING_SOURCE_REF,
701 reason=UpdateFailureReason.MISSING_SOURCE_REF,
700 old=pull_request, new=None, changes=None,
702 old=pull_request, new=None, changes=None,
701 source_changed=False, target_changed=False)
703 source_changed=False, target_changed=False)
702
704
703 source_changed = source_ref_id != source_commit.raw_id
705 source_changed = source_ref_id != source_commit.raw_id
704
706
705 # target repo
707 # target repo
706 target_repo = pull_request.target_repo.scm_instance()
708 target_repo = pull_request.target_repo.scm_instance()
707 try:
709 try:
708 target_commit = target_repo.get_commit(commit_id=target_ref_name)
710 target_commit = target_repo.get_commit(commit_id=target_ref_name)
709 except CommitDoesNotExistError:
711 except CommitDoesNotExistError:
710 return UpdateResponse(
712 return UpdateResponse(
711 executed=False,
713 executed=False,
712 reason=UpdateFailureReason.MISSING_TARGET_REF,
714 reason=UpdateFailureReason.MISSING_TARGET_REF,
713 old=pull_request, new=None, changes=None,
715 old=pull_request, new=None, changes=None,
714 source_changed=False, target_changed=False)
716 source_changed=False, target_changed=False)
715 target_changed = target_ref_id != target_commit.raw_id
717 target_changed = target_ref_id != target_commit.raw_id
716
718
717 if not (source_changed or target_changed):
719 if not (source_changed or target_changed):
718 log.debug("Nothing changed in pull request %s", pull_request)
720 log.debug("Nothing changed in pull request %s", pull_request)
719 return UpdateResponse(
721 return UpdateResponse(
720 executed=False,
722 executed=False,
721 reason=UpdateFailureReason.NO_CHANGE,
723 reason=UpdateFailureReason.NO_CHANGE,
722 old=pull_request, new=None, changes=None,
724 old=pull_request, new=None, changes=None,
723 source_changed=target_changed, target_changed=source_changed)
725 source_changed=target_changed, target_changed=source_changed)
724
726
725 change_in_found = 'target repo' if target_changed else 'source repo'
727 change_in_found = 'target repo' if target_changed else 'source repo'
726 log.debug('Updating pull request because of change in %s detected',
728 log.debug('Updating pull request because of change in %s detected',
727 change_in_found)
729 change_in_found)
728
730
729 # Finally there is a need for an update, in case of source change
731 # Finally there is a need for an update, in case of source change
730 # we create a new version, else just an update
732 # we create a new version, else just an update
731 if source_changed:
733 if source_changed:
732 pull_request_version = self._create_version_from_snapshot(pull_request)
734 pull_request_version = self._create_version_from_snapshot(pull_request)
733 self._link_comments_to_version(pull_request_version)
735 self._link_comments_to_version(pull_request_version)
734 else:
736 else:
735 try:
737 try:
736 ver = pull_request.versions[-1]
738 ver = pull_request.versions[-1]
737 except IndexError:
739 except IndexError:
738 ver = None
740 ver = None
739
741
740 pull_request.pull_request_version_id = \
742 pull_request.pull_request_version_id = \
741 ver.pull_request_version_id if ver else None
743 ver.pull_request_version_id if ver else None
742 pull_request_version = pull_request
744 pull_request_version = pull_request
743
745
744 try:
746 try:
745 if target_ref_type in ('tag', 'branch', 'book'):
747 if target_ref_type in ('tag', 'branch', 'book'):
746 target_commit = target_repo.get_commit(target_ref_name)
748 target_commit = target_repo.get_commit(target_ref_name)
747 else:
749 else:
748 target_commit = target_repo.get_commit(target_ref_id)
750 target_commit = target_repo.get_commit(target_ref_id)
749 except CommitDoesNotExistError:
751 except CommitDoesNotExistError:
750 return UpdateResponse(
752 return UpdateResponse(
751 executed=False,
753 executed=False,
752 reason=UpdateFailureReason.MISSING_TARGET_REF,
754 reason=UpdateFailureReason.MISSING_TARGET_REF,
753 old=pull_request, new=None, changes=None,
755 old=pull_request, new=None, changes=None,
754 source_changed=source_changed, target_changed=target_changed)
756 source_changed=source_changed, target_changed=target_changed)
755
757
756 # re-compute commit ids
758 # re-compute commit ids
757 old_commit_ids = pull_request.revisions
759 old_commit_ids = pull_request.revisions
758 pre_load = ["author", "branch", "date", "message"]
760 pre_load = ["author", "branch", "date", "message"]
759 commit_ranges = target_repo.compare(
761 commit_ranges = target_repo.compare(
760 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
762 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
761 pre_load=pre_load)
763 pre_load=pre_load)
762
764
763 ancestor = target_repo.get_common_ancestor(
765 ancestor = target_repo.get_common_ancestor(
764 target_commit.raw_id, source_commit.raw_id, source_repo)
766 target_commit.raw_id, source_commit.raw_id, source_repo)
765
767
766 pull_request.source_ref = '%s:%s:%s' % (
768 pull_request.source_ref = '%s:%s:%s' % (
767 source_ref_type, source_ref_name, source_commit.raw_id)
769 source_ref_type, source_ref_name, source_commit.raw_id)
768 pull_request.target_ref = '%s:%s:%s' % (
770 pull_request.target_ref = '%s:%s:%s' % (
769 target_ref_type, target_ref_name, ancestor)
771 target_ref_type, target_ref_name, ancestor)
770
772
771 pull_request.revisions = [
773 pull_request.revisions = [
772 commit.raw_id for commit in reversed(commit_ranges)]
774 commit.raw_id for commit in reversed(commit_ranges)]
773 pull_request.updated_on = datetime.datetime.now()
775 pull_request.updated_on = datetime.datetime.now()
774 Session().add(pull_request)
776 Session().add(pull_request)
775 new_commit_ids = pull_request.revisions
777 new_commit_ids = pull_request.revisions
776
778
777 old_diff_data, new_diff_data = self._generate_update_diffs(
779 old_diff_data, new_diff_data = self._generate_update_diffs(
778 pull_request, pull_request_version)
780 pull_request, pull_request_version)
779
781
780 # calculate commit and file changes
782 # calculate commit and file changes
781 changes = self._calculate_commit_id_changes(
783 changes = self._calculate_commit_id_changes(
782 old_commit_ids, new_commit_ids)
784 old_commit_ids, new_commit_ids)
783 file_changes = self._calculate_file_changes(
785 file_changes = self._calculate_file_changes(
784 old_diff_data, new_diff_data)
786 old_diff_data, new_diff_data)
785
787
786 # set comments as outdated if DIFFS changed
788 # set comments as outdated if DIFFS changed
787 CommentsModel().outdate_comments(
789 CommentsModel().outdate_comments(
788 pull_request, old_diff_data=old_diff_data,
790 pull_request, old_diff_data=old_diff_data,
789 new_diff_data=new_diff_data)
791 new_diff_data=new_diff_data)
790
792
791 commit_changes = (changes.added or changes.removed)
793 commit_changes = (changes.added or changes.removed)
792 file_node_changes = (
794 file_node_changes = (
793 file_changes.added or file_changes.modified or file_changes.removed)
795 file_changes.added or file_changes.modified or file_changes.removed)
794 pr_has_changes = commit_changes or file_node_changes
796 pr_has_changes = commit_changes or file_node_changes
795
797
796 # Add an automatic comment to the pull request, in case
798 # Add an automatic comment to the pull request, in case
797 # anything has changed
799 # anything has changed
798 if pr_has_changes:
800 if pr_has_changes:
799 update_comment = CommentsModel().create(
801 update_comment = CommentsModel().create(
800 text=self._render_update_message(changes, file_changes),
802 text=self._render_update_message(changes, file_changes),
801 repo=pull_request.target_repo,
803 repo=pull_request.target_repo,
802 user=pull_request.author,
804 user=pull_request.author,
803 pull_request=pull_request,
805 pull_request=pull_request,
804 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
806 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
805
807
806 # Update status to "Under Review" for added commits
808 # Update status to "Under Review" for added commits
807 for commit_id in changes.added:
809 for commit_id in changes.added:
808 ChangesetStatusModel().set_status(
810 ChangesetStatusModel().set_status(
809 repo=pull_request.source_repo,
811 repo=pull_request.source_repo,
810 status=ChangesetStatus.STATUS_UNDER_REVIEW,
812 status=ChangesetStatus.STATUS_UNDER_REVIEW,
811 comment=update_comment,
813 comment=update_comment,
812 user=pull_request.author,
814 user=pull_request.author,
813 pull_request=pull_request,
815 pull_request=pull_request,
814 revision=commit_id)
816 revision=commit_id)
815
817
816 log.debug(
818 log.debug(
817 'Updated pull request %s, added_ids: %s, common_ids: %s, '
819 'Updated pull request %s, added_ids: %s, common_ids: %s, '
818 'removed_ids: %s', pull_request.pull_request_id,
820 'removed_ids: %s', pull_request.pull_request_id,
819 changes.added, changes.common, changes.removed)
821 changes.added, changes.common, changes.removed)
820 log.debug(
822 log.debug(
821 'Updated pull request with the following file changes: %s',
823 'Updated pull request with the following file changes: %s',
822 file_changes)
824 file_changes)
823
825
824 log.info(
826 log.info(
825 "Updated pull request %s from commit %s to commit %s, "
827 "Updated pull request %s from commit %s to commit %s, "
826 "stored new version %s of this pull request.",
828 "stored new version %s of this pull request.",
827 pull_request.pull_request_id, source_ref_id,
829 pull_request.pull_request_id, source_ref_id,
828 pull_request.source_ref_parts.commit_id,
830 pull_request.source_ref_parts.commit_id,
829 pull_request_version.pull_request_version_id)
831 pull_request_version.pull_request_version_id)
830 Session().commit()
832 Session().commit()
831 self._trigger_pull_request_hook(
833 self._trigger_pull_request_hook(
832 pull_request, pull_request.author, 'update')
834 pull_request, pull_request.author, 'update')
833
835
834 return UpdateResponse(
836 return UpdateResponse(
835 executed=True, reason=UpdateFailureReason.NONE,
837 executed=True, reason=UpdateFailureReason.NONE,
836 old=pull_request, new=pull_request_version, changes=changes,
838 old=pull_request, new=pull_request_version, changes=changes,
837 source_changed=source_changed, target_changed=target_changed)
839 source_changed=source_changed, target_changed=target_changed)
838
840
839 def _create_version_from_snapshot(self, pull_request):
841 def _create_version_from_snapshot(self, pull_request):
840 version = PullRequestVersion()
842 version = PullRequestVersion()
841 version.title = pull_request.title
843 version.title = pull_request.title
842 version.description = pull_request.description
844 version.description = pull_request.description
843 version.status = pull_request.status
845 version.status = pull_request.status
844 version.created_on = datetime.datetime.now()
846 version.created_on = datetime.datetime.now()
845 version.updated_on = pull_request.updated_on
847 version.updated_on = pull_request.updated_on
846 version.user_id = pull_request.user_id
848 version.user_id = pull_request.user_id
847 version.source_repo = pull_request.source_repo
849 version.source_repo = pull_request.source_repo
848 version.source_ref = pull_request.source_ref
850 version.source_ref = pull_request.source_ref
849 version.target_repo = pull_request.target_repo
851 version.target_repo = pull_request.target_repo
850 version.target_ref = pull_request.target_ref
852 version.target_ref = pull_request.target_ref
851
853
852 version._last_merge_source_rev = pull_request._last_merge_source_rev
854 version._last_merge_source_rev = pull_request._last_merge_source_rev
853 version._last_merge_target_rev = pull_request._last_merge_target_rev
855 version._last_merge_target_rev = pull_request._last_merge_target_rev
854 version.last_merge_status = pull_request.last_merge_status
856 version.last_merge_status = pull_request.last_merge_status
855 version.shadow_merge_ref = pull_request.shadow_merge_ref
857 version.shadow_merge_ref = pull_request.shadow_merge_ref
856 version.merge_rev = pull_request.merge_rev
858 version.merge_rev = pull_request.merge_rev
857 version.reviewer_data = pull_request.reviewer_data
859 version.reviewer_data = pull_request.reviewer_data
858
860
859 version.revisions = pull_request.revisions
861 version.revisions = pull_request.revisions
860 version.pull_request = pull_request
862 version.pull_request = pull_request
861 Session().add(version)
863 Session().add(version)
862 Session().flush()
864 Session().flush()
863
865
864 return version
866 return version
865
867
866 def _generate_update_diffs(self, pull_request, pull_request_version):
868 def _generate_update_diffs(self, pull_request, pull_request_version):
867
869
868 diff_context = (
870 diff_context = (
869 self.DIFF_CONTEXT +
871 self.DIFF_CONTEXT +
870 CommentsModel.needed_extra_diff_context())
872 CommentsModel.needed_extra_diff_context())
871
873
872 source_repo = pull_request_version.source_repo
874 source_repo = pull_request_version.source_repo
873 source_ref_id = pull_request_version.source_ref_parts.commit_id
875 source_ref_id = pull_request_version.source_ref_parts.commit_id
874 target_ref_id = pull_request_version.target_ref_parts.commit_id
876 target_ref_id = pull_request_version.target_ref_parts.commit_id
875 old_diff = self._get_diff_from_pr_or_version(
877 old_diff = self._get_diff_from_pr_or_version(
876 source_repo, source_ref_id, target_ref_id, context=diff_context)
878 source_repo, source_ref_id, target_ref_id, context=diff_context)
877
879
878 source_repo = pull_request.source_repo
880 source_repo = pull_request.source_repo
879 source_ref_id = pull_request.source_ref_parts.commit_id
881 source_ref_id = pull_request.source_ref_parts.commit_id
880 target_ref_id = pull_request.target_ref_parts.commit_id
882 target_ref_id = pull_request.target_ref_parts.commit_id
881
883
882 new_diff = self._get_diff_from_pr_or_version(
884 new_diff = self._get_diff_from_pr_or_version(
883 source_repo, source_ref_id, target_ref_id, context=diff_context)
885 source_repo, source_ref_id, target_ref_id, context=diff_context)
884
886
885 old_diff_data = diffs.DiffProcessor(old_diff)
887 old_diff_data = diffs.DiffProcessor(old_diff)
886 old_diff_data.prepare()
888 old_diff_data.prepare()
887 new_diff_data = diffs.DiffProcessor(new_diff)
889 new_diff_data = diffs.DiffProcessor(new_diff)
888 new_diff_data.prepare()
890 new_diff_data.prepare()
889
891
890 return old_diff_data, new_diff_data
892 return old_diff_data, new_diff_data
891
893
892 def _link_comments_to_version(self, pull_request_version):
894 def _link_comments_to_version(self, pull_request_version):
893 """
895 """
894 Link all unlinked comments of this pull request to the given version.
896 Link all unlinked comments of this pull request to the given version.
895
897
896 :param pull_request_version: The `PullRequestVersion` to which
898 :param pull_request_version: The `PullRequestVersion` to which
897 the comments shall be linked.
899 the comments shall be linked.
898
900
899 """
901 """
900 pull_request = pull_request_version.pull_request
902 pull_request = pull_request_version.pull_request
901 comments = ChangesetComment.query()\
903 comments = ChangesetComment.query()\
902 .filter(
904 .filter(
903 # TODO: johbo: Should we query for the repo at all here?
905 # TODO: johbo: Should we query for the repo at all here?
904 # Pending decision on how comments of PRs are to be related
906 # Pending decision on how comments of PRs are to be related
905 # to either the source repo, the target repo or no repo at all.
907 # to either the source repo, the target repo or no repo at all.
906 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
908 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
907 ChangesetComment.pull_request == pull_request,
909 ChangesetComment.pull_request == pull_request,
908 ChangesetComment.pull_request_version == None)\
910 ChangesetComment.pull_request_version == None)\
909 .order_by(ChangesetComment.comment_id.asc())
911 .order_by(ChangesetComment.comment_id.asc())
910
912
911 # TODO: johbo: Find out why this breaks if it is done in a bulk
913 # TODO: johbo: Find out why this breaks if it is done in a bulk
912 # operation.
914 # operation.
913 for comment in comments:
915 for comment in comments:
914 comment.pull_request_version_id = (
916 comment.pull_request_version_id = (
915 pull_request_version.pull_request_version_id)
917 pull_request_version.pull_request_version_id)
916 Session().add(comment)
918 Session().add(comment)
917
919
918 def _calculate_commit_id_changes(self, old_ids, new_ids):
920 def _calculate_commit_id_changes(self, old_ids, new_ids):
919 added = [x for x in new_ids if x not in old_ids]
921 added = [x for x in new_ids if x not in old_ids]
920 common = [x for x in new_ids if x in old_ids]
922 common = [x for x in new_ids if x in old_ids]
921 removed = [x for x in old_ids if x not in new_ids]
923 removed = [x for x in old_ids if x not in new_ids]
922 total = new_ids
924 total = new_ids
923 return ChangeTuple(added, common, removed, total)
925 return ChangeTuple(added, common, removed, total)
924
926
925 def _calculate_file_changes(self, old_diff_data, new_diff_data):
927 def _calculate_file_changes(self, old_diff_data, new_diff_data):
926
928
927 old_files = OrderedDict()
929 old_files = OrderedDict()
928 for diff_data in old_diff_data.parsed_diff:
930 for diff_data in old_diff_data.parsed_diff:
929 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
931 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
930
932
931 added_files = []
933 added_files = []
932 modified_files = []
934 modified_files = []
933 removed_files = []
935 removed_files = []
934 for diff_data in new_diff_data.parsed_diff:
936 for diff_data in new_diff_data.parsed_diff:
935 new_filename = diff_data['filename']
937 new_filename = diff_data['filename']
936 new_hash = md5_safe(diff_data['raw_diff'])
938 new_hash = md5_safe(diff_data['raw_diff'])
937
939
938 old_hash = old_files.get(new_filename)
940 old_hash = old_files.get(new_filename)
939 if not old_hash:
941 if not old_hash:
940 # file is not present in old diff, means it's added
942 # file is not present in old diff, means it's added
941 added_files.append(new_filename)
943 added_files.append(new_filename)
942 else:
944 else:
943 if new_hash != old_hash:
945 if new_hash != old_hash:
944 modified_files.append(new_filename)
946 modified_files.append(new_filename)
945 # now remove a file from old, since we have seen it already
947 # now remove a file from old, since we have seen it already
946 del old_files[new_filename]
948 del old_files[new_filename]
947
949
948 # removed files is when there are present in old, but not in NEW,
950 # removed files is when there are present in old, but not in NEW,
949 # since we remove old files that are present in new diff, left-overs
951 # since we remove old files that are present in new diff, left-overs
950 # if any should be the removed files
952 # if any should be the removed files
951 removed_files.extend(old_files.keys())
953 removed_files.extend(old_files.keys())
952
954
953 return FileChangeTuple(added_files, modified_files, removed_files)
955 return FileChangeTuple(added_files, modified_files, removed_files)
954
956
955 def _render_update_message(self, changes, file_changes):
957 def _render_update_message(self, changes, file_changes):
956 """
958 """
957 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
959 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
958 so it's always looking the same disregarding on which default
960 so it's always looking the same disregarding on which default
959 renderer system is using.
961 renderer system is using.
960
962
961 :param changes: changes named tuple
963 :param changes: changes named tuple
962 :param file_changes: file changes named tuple
964 :param file_changes: file changes named tuple
963
965
964 """
966 """
965 new_status = ChangesetStatus.get_status_lbl(
967 new_status = ChangesetStatus.get_status_lbl(
966 ChangesetStatus.STATUS_UNDER_REVIEW)
968 ChangesetStatus.STATUS_UNDER_REVIEW)
967
969
968 changed_files = (
970 changed_files = (
969 file_changes.added + file_changes.modified + file_changes.removed)
971 file_changes.added + file_changes.modified + file_changes.removed)
970
972
971 params = {
973 params = {
972 'under_review_label': new_status,
974 'under_review_label': new_status,
973 'added_commits': changes.added,
975 'added_commits': changes.added,
974 'removed_commits': changes.removed,
976 'removed_commits': changes.removed,
975 'changed_files': changed_files,
977 'changed_files': changed_files,
976 'added_files': file_changes.added,
978 'added_files': file_changes.added,
977 'modified_files': file_changes.modified,
979 'modified_files': file_changes.modified,
978 'removed_files': file_changes.removed,
980 'removed_files': file_changes.removed,
979 }
981 }
980 renderer = RstTemplateRenderer()
982 renderer = RstTemplateRenderer()
981 return renderer.render('pull_request_update.mako', **params)
983 return renderer.render('pull_request_update.mako', **params)
982
984
983 def edit(self, pull_request, title, description, user):
985 def edit(self, pull_request, title, description, description_renderer, user):
984 pull_request = self.__get_pull_request(pull_request)
986 pull_request = self.__get_pull_request(pull_request)
985 old_data = pull_request.get_api_data(with_merge_state=False)
987 old_data = pull_request.get_api_data(with_merge_state=False)
986 if pull_request.is_closed():
988 if pull_request.is_closed():
987 raise ValueError('This pull request is closed')
989 raise ValueError('This pull request is closed')
988 if title:
990 if title:
989 pull_request.title = title
991 pull_request.title = title
990 pull_request.description = description
992 pull_request.description = description
991 pull_request.updated_on = datetime.datetime.now()
993 pull_request.updated_on = datetime.datetime.now()
994 pull_request.description_renderer = description_renderer
992 Session().add(pull_request)
995 Session().add(pull_request)
993 self._log_audit_action(
996 self._log_audit_action(
994 'repo.pull_request.edit', {'old_data': old_data},
997 'repo.pull_request.edit', {'old_data': old_data},
995 user, pull_request)
998 user, pull_request)
996
999
997 def update_reviewers(self, pull_request, reviewer_data, user):
1000 def update_reviewers(self, pull_request, reviewer_data, user):
998 """
1001 """
999 Update the reviewers in the pull request
1002 Update the reviewers in the pull request
1000
1003
1001 :param pull_request: the pr to update
1004 :param pull_request: the pr to update
1002 :param reviewer_data: list of tuples
1005 :param reviewer_data: list of tuples
1003 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
1006 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
1004 """
1007 """
1005 pull_request = self.__get_pull_request(pull_request)
1008 pull_request = self.__get_pull_request(pull_request)
1006 if pull_request.is_closed():
1009 if pull_request.is_closed():
1007 raise ValueError('This pull request is closed')
1010 raise ValueError('This pull request is closed')
1008
1011
1009 reviewers = {}
1012 reviewers = {}
1010 for user_id, reasons, mandatory, rules in reviewer_data:
1013 for user_id, reasons, mandatory, rules in reviewer_data:
1011 if isinstance(user_id, (int, basestring)):
1014 if isinstance(user_id, (int, basestring)):
1012 user_id = self._get_user(user_id).user_id
1015 user_id = self._get_user(user_id).user_id
1013 reviewers[user_id] = {
1016 reviewers[user_id] = {
1014 'reasons': reasons, 'mandatory': mandatory}
1017 'reasons': reasons, 'mandatory': mandatory}
1015
1018
1016 reviewers_ids = set(reviewers.keys())
1019 reviewers_ids = set(reviewers.keys())
1017 current_reviewers = PullRequestReviewers.query()\
1020 current_reviewers = PullRequestReviewers.query()\
1018 .filter(PullRequestReviewers.pull_request ==
1021 .filter(PullRequestReviewers.pull_request ==
1019 pull_request).all()
1022 pull_request).all()
1020 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1023 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1021
1024
1022 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1025 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1023 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1026 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1024
1027
1025 log.debug("Adding %s reviewers", ids_to_add)
1028 log.debug("Adding %s reviewers", ids_to_add)
1026 log.debug("Removing %s reviewers", ids_to_remove)
1029 log.debug("Removing %s reviewers", ids_to_remove)
1027 changed = False
1030 changed = False
1028 for uid in ids_to_add:
1031 for uid in ids_to_add:
1029 changed = True
1032 changed = True
1030 _usr = self._get_user(uid)
1033 _usr = self._get_user(uid)
1031 reviewer = PullRequestReviewers()
1034 reviewer = PullRequestReviewers()
1032 reviewer.user = _usr
1035 reviewer.user = _usr
1033 reviewer.pull_request = pull_request
1036 reviewer.pull_request = pull_request
1034 reviewer.reasons = reviewers[uid]['reasons']
1037 reviewer.reasons = reviewers[uid]['reasons']
1035 # NOTE(marcink): mandatory shouldn't be changed now
1038 # NOTE(marcink): mandatory shouldn't be changed now
1036 # reviewer.mandatory = reviewers[uid]['reasons']
1039 # reviewer.mandatory = reviewers[uid]['reasons']
1037 Session().add(reviewer)
1040 Session().add(reviewer)
1038 self._log_audit_action(
1041 self._log_audit_action(
1039 'repo.pull_request.reviewer.add', {'data': reviewer.get_dict()},
1042 'repo.pull_request.reviewer.add', {'data': reviewer.get_dict()},
1040 user, pull_request)
1043 user, pull_request)
1041
1044
1042 for uid in ids_to_remove:
1045 for uid in ids_to_remove:
1043 changed = True
1046 changed = True
1044 reviewers = PullRequestReviewers.query()\
1047 reviewers = PullRequestReviewers.query()\
1045 .filter(PullRequestReviewers.user_id == uid,
1048 .filter(PullRequestReviewers.user_id == uid,
1046 PullRequestReviewers.pull_request == pull_request)\
1049 PullRequestReviewers.pull_request == pull_request)\
1047 .all()
1050 .all()
1048 # use .all() in case we accidentally added the same person twice
1051 # use .all() in case we accidentally added the same person twice
1049 # this CAN happen due to the lack of DB checks
1052 # this CAN happen due to the lack of DB checks
1050 for obj in reviewers:
1053 for obj in reviewers:
1051 old_data = obj.get_dict()
1054 old_data = obj.get_dict()
1052 Session().delete(obj)
1055 Session().delete(obj)
1053 self._log_audit_action(
1056 self._log_audit_action(
1054 'repo.pull_request.reviewer.delete',
1057 'repo.pull_request.reviewer.delete',
1055 {'old_data': old_data}, user, pull_request)
1058 {'old_data': old_data}, user, pull_request)
1056
1059
1057 if changed:
1060 if changed:
1058 pull_request.updated_on = datetime.datetime.now()
1061 pull_request.updated_on = datetime.datetime.now()
1059 Session().add(pull_request)
1062 Session().add(pull_request)
1060
1063
1061 self.notify_reviewers(pull_request, ids_to_add)
1064 self.notify_reviewers(pull_request, ids_to_add)
1062 return ids_to_add, ids_to_remove
1065 return ids_to_add, ids_to_remove
1063
1066
1064 def get_url(self, pull_request, request=None, permalink=False):
1067 def get_url(self, pull_request, request=None, permalink=False):
1065 if not request:
1068 if not request:
1066 request = get_current_request()
1069 request = get_current_request()
1067
1070
1068 if permalink:
1071 if permalink:
1069 return request.route_url(
1072 return request.route_url(
1070 'pull_requests_global',
1073 'pull_requests_global',
1071 pull_request_id=pull_request.pull_request_id,)
1074 pull_request_id=pull_request.pull_request_id,)
1072 else:
1075 else:
1073 return request.route_url('pullrequest_show',
1076 return request.route_url('pullrequest_show',
1074 repo_name=safe_str(pull_request.target_repo.repo_name),
1077 repo_name=safe_str(pull_request.target_repo.repo_name),
1075 pull_request_id=pull_request.pull_request_id,)
1078 pull_request_id=pull_request.pull_request_id,)
1076
1079
1077 def get_shadow_clone_url(self, pull_request, request=None):
1080 def get_shadow_clone_url(self, pull_request, request=None):
1078 """
1081 """
1079 Returns qualified url pointing to the shadow repository. If this pull
1082 Returns qualified url pointing to the shadow repository. If this pull
1080 request is closed there is no shadow repository and ``None`` will be
1083 request is closed there is no shadow repository and ``None`` will be
1081 returned.
1084 returned.
1082 """
1085 """
1083 if pull_request.is_closed():
1086 if pull_request.is_closed():
1084 return None
1087 return None
1085 else:
1088 else:
1086 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1089 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1087 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1090 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1088
1091
1089 def notify_reviewers(self, pull_request, reviewers_ids):
1092 def notify_reviewers(self, pull_request, reviewers_ids):
1090 # notification to reviewers
1093 # notification to reviewers
1091 if not reviewers_ids:
1094 if not reviewers_ids:
1092 return
1095 return
1093
1096
1094 pull_request_obj = pull_request
1097 pull_request_obj = pull_request
1095 # get the current participants of this pull request
1098 # get the current participants of this pull request
1096 recipients = reviewers_ids
1099 recipients = reviewers_ids
1097 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1100 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1098
1101
1099 pr_source_repo = pull_request_obj.source_repo
1102 pr_source_repo = pull_request_obj.source_repo
1100 pr_target_repo = pull_request_obj.target_repo
1103 pr_target_repo = pull_request_obj.target_repo
1101
1104
1102 pr_url = h.route_url('pullrequest_show',
1105 pr_url = h.route_url('pullrequest_show',
1103 repo_name=pr_target_repo.repo_name,
1106 repo_name=pr_target_repo.repo_name,
1104 pull_request_id=pull_request_obj.pull_request_id,)
1107 pull_request_id=pull_request_obj.pull_request_id,)
1105
1108
1106 # set some variables for email notification
1109 # set some variables for email notification
1107 pr_target_repo_url = h.route_url(
1110 pr_target_repo_url = h.route_url(
1108 'repo_summary', repo_name=pr_target_repo.repo_name)
1111 'repo_summary', repo_name=pr_target_repo.repo_name)
1109
1112
1110 pr_source_repo_url = h.route_url(
1113 pr_source_repo_url = h.route_url(
1111 'repo_summary', repo_name=pr_source_repo.repo_name)
1114 'repo_summary', repo_name=pr_source_repo.repo_name)
1112
1115
1113 # pull request specifics
1116 # pull request specifics
1114 pull_request_commits = [
1117 pull_request_commits = [
1115 (x.raw_id, x.message)
1118 (x.raw_id, x.message)
1116 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1119 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1117
1120
1118 kwargs = {
1121 kwargs = {
1119 'user': pull_request.author,
1122 'user': pull_request.author,
1120 'pull_request': pull_request_obj,
1123 'pull_request': pull_request_obj,
1121 'pull_request_commits': pull_request_commits,
1124 'pull_request_commits': pull_request_commits,
1122
1125
1123 'pull_request_target_repo': pr_target_repo,
1126 'pull_request_target_repo': pr_target_repo,
1124 'pull_request_target_repo_url': pr_target_repo_url,
1127 'pull_request_target_repo_url': pr_target_repo_url,
1125
1128
1126 'pull_request_source_repo': pr_source_repo,
1129 'pull_request_source_repo': pr_source_repo,
1127 'pull_request_source_repo_url': pr_source_repo_url,
1130 'pull_request_source_repo_url': pr_source_repo_url,
1128
1131
1129 'pull_request_url': pr_url,
1132 'pull_request_url': pr_url,
1130 }
1133 }
1131
1134
1132 # pre-generate the subject for notification itself
1135 # pre-generate the subject for notification itself
1133 (subject,
1136 (subject,
1134 _h, _e, # we don't care about those
1137 _h, _e, # we don't care about those
1135 body_plaintext) = EmailNotificationModel().render_email(
1138 body_plaintext) = EmailNotificationModel().render_email(
1136 notification_type, **kwargs)
1139 notification_type, **kwargs)
1137
1140
1138 # create notification objects, and emails
1141 # create notification objects, and emails
1139 NotificationModel().create(
1142 NotificationModel().create(
1140 created_by=pull_request.author,
1143 created_by=pull_request.author,
1141 notification_subject=subject,
1144 notification_subject=subject,
1142 notification_body=body_plaintext,
1145 notification_body=body_plaintext,
1143 notification_type=notification_type,
1146 notification_type=notification_type,
1144 recipients=recipients,
1147 recipients=recipients,
1145 email_kwargs=kwargs,
1148 email_kwargs=kwargs,
1146 )
1149 )
1147
1150
1148 def delete(self, pull_request, user):
1151 def delete(self, pull_request, user):
1149 pull_request = self.__get_pull_request(pull_request)
1152 pull_request = self.__get_pull_request(pull_request)
1150 old_data = pull_request.get_api_data(with_merge_state=False)
1153 old_data = pull_request.get_api_data(with_merge_state=False)
1151 self._cleanup_merge_workspace(pull_request)
1154 self._cleanup_merge_workspace(pull_request)
1152 self._log_audit_action(
1155 self._log_audit_action(
1153 'repo.pull_request.delete', {'old_data': old_data},
1156 'repo.pull_request.delete', {'old_data': old_data},
1154 user, pull_request)
1157 user, pull_request)
1155 Session().delete(pull_request)
1158 Session().delete(pull_request)
1156
1159
1157 def close_pull_request(self, pull_request, user):
1160 def close_pull_request(self, pull_request, user):
1158 pull_request = self.__get_pull_request(pull_request)
1161 pull_request = self.__get_pull_request(pull_request)
1159 self._cleanup_merge_workspace(pull_request)
1162 self._cleanup_merge_workspace(pull_request)
1160 pull_request.status = PullRequest.STATUS_CLOSED
1163 pull_request.status = PullRequest.STATUS_CLOSED
1161 pull_request.updated_on = datetime.datetime.now()
1164 pull_request.updated_on = datetime.datetime.now()
1162 Session().add(pull_request)
1165 Session().add(pull_request)
1163 self._trigger_pull_request_hook(
1166 self._trigger_pull_request_hook(
1164 pull_request, pull_request.author, 'close')
1167 pull_request, pull_request.author, 'close')
1165
1168
1166 pr_data = pull_request.get_api_data(with_merge_state=False)
1169 pr_data = pull_request.get_api_data(with_merge_state=False)
1167 self._log_audit_action(
1170 self._log_audit_action(
1168 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1171 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1169
1172
1170 def close_pull_request_with_comment(
1173 def close_pull_request_with_comment(
1171 self, pull_request, user, repo, message=None):
1174 self, pull_request, user, repo, message=None):
1172
1175
1173 pull_request_review_status = pull_request.calculated_review_status()
1176 pull_request_review_status = pull_request.calculated_review_status()
1174
1177
1175 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1178 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1176 # approved only if we have voting consent
1179 # approved only if we have voting consent
1177 status = ChangesetStatus.STATUS_APPROVED
1180 status = ChangesetStatus.STATUS_APPROVED
1178 else:
1181 else:
1179 status = ChangesetStatus.STATUS_REJECTED
1182 status = ChangesetStatus.STATUS_REJECTED
1180 status_lbl = ChangesetStatus.get_status_lbl(status)
1183 status_lbl = ChangesetStatus.get_status_lbl(status)
1181
1184
1182 default_message = (
1185 default_message = (
1183 'Closing with status change {transition_icon} {status}.'
1186 'Closing with status change {transition_icon} {status}.'
1184 ).format(transition_icon='>', status=status_lbl)
1187 ).format(transition_icon='>', status=status_lbl)
1185 text = message or default_message
1188 text = message or default_message
1186
1189
1187 # create a comment, and link it to new status
1190 # create a comment, and link it to new status
1188 comment = CommentsModel().create(
1191 comment = CommentsModel().create(
1189 text=text,
1192 text=text,
1190 repo=repo.repo_id,
1193 repo=repo.repo_id,
1191 user=user.user_id,
1194 user=user.user_id,
1192 pull_request=pull_request.pull_request_id,
1195 pull_request=pull_request.pull_request_id,
1193 status_change=status_lbl,
1196 status_change=status_lbl,
1194 status_change_type=status,
1197 status_change_type=status,
1195 closing_pr=True
1198 closing_pr=True
1196 )
1199 )
1197
1200
1198 # calculate old status before we change it
1201 # calculate old status before we change it
1199 old_calculated_status = pull_request.calculated_review_status()
1202 old_calculated_status = pull_request.calculated_review_status()
1200 ChangesetStatusModel().set_status(
1203 ChangesetStatusModel().set_status(
1201 repo.repo_id,
1204 repo.repo_id,
1202 status,
1205 status,
1203 user.user_id,
1206 user.user_id,
1204 comment=comment,
1207 comment=comment,
1205 pull_request=pull_request.pull_request_id
1208 pull_request=pull_request.pull_request_id
1206 )
1209 )
1207
1210
1208 Session().flush()
1211 Session().flush()
1209 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1212 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1210 # we now calculate the status of pull request again, and based on that
1213 # we now calculate the status of pull request again, and based on that
1211 # calculation trigger status change. This might happen in cases
1214 # calculation trigger status change. This might happen in cases
1212 # that non-reviewer admin closes a pr, which means his vote doesn't
1215 # that non-reviewer admin closes a pr, which means his vote doesn't
1213 # change the status, while if he's a reviewer this might change it.
1216 # change the status, while if he's a reviewer this might change it.
1214 calculated_status = pull_request.calculated_review_status()
1217 calculated_status = pull_request.calculated_review_status()
1215 if old_calculated_status != calculated_status:
1218 if old_calculated_status != calculated_status:
1216 self._trigger_pull_request_hook(
1219 self._trigger_pull_request_hook(
1217 pull_request, user, 'review_status_change')
1220 pull_request, user, 'review_status_change')
1218
1221
1219 # finally close the PR
1222 # finally close the PR
1220 PullRequestModel().close_pull_request(
1223 PullRequestModel().close_pull_request(
1221 pull_request.pull_request_id, user)
1224 pull_request.pull_request_id, user)
1222
1225
1223 return comment, status
1226 return comment, status
1224
1227
1225 def merge_status(self, pull_request, translator=None,
1228 def merge_status(self, pull_request, translator=None,
1226 force_shadow_repo_refresh=False):
1229 force_shadow_repo_refresh=False):
1227 _ = translator or get_current_request().translate
1230 _ = translator or get_current_request().translate
1228
1231
1229 if not self._is_merge_enabled(pull_request):
1232 if not self._is_merge_enabled(pull_request):
1230 return False, _('Server-side pull request merging is disabled.')
1233 return False, _('Server-side pull request merging is disabled.')
1231 if pull_request.is_closed():
1234 if pull_request.is_closed():
1232 return False, _('This pull request is closed.')
1235 return False, _('This pull request is closed.')
1233 merge_possible, msg = self._check_repo_requirements(
1236 merge_possible, msg = self._check_repo_requirements(
1234 target=pull_request.target_repo, source=pull_request.source_repo,
1237 target=pull_request.target_repo, source=pull_request.source_repo,
1235 translator=_)
1238 translator=_)
1236 if not merge_possible:
1239 if not merge_possible:
1237 return merge_possible, msg
1240 return merge_possible, msg
1238
1241
1239 try:
1242 try:
1240 resp = self._try_merge(
1243 resp = self._try_merge(
1241 pull_request,
1244 pull_request,
1242 force_shadow_repo_refresh=force_shadow_repo_refresh)
1245 force_shadow_repo_refresh=force_shadow_repo_refresh)
1243 log.debug("Merge response: %s", resp)
1246 log.debug("Merge response: %s", resp)
1244 status = resp.possible, self.merge_status_message(
1247 status = resp.possible, self.merge_status_message(
1245 resp.failure_reason)
1248 resp.failure_reason)
1246 except NotImplementedError:
1249 except NotImplementedError:
1247 status = False, _('Pull request merging is not supported.')
1250 status = False, _('Pull request merging is not supported.')
1248
1251
1249 return status
1252 return status
1250
1253
1251 def _check_repo_requirements(self, target, source, translator):
1254 def _check_repo_requirements(self, target, source, translator):
1252 """
1255 """
1253 Check if `target` and `source` have compatible requirements.
1256 Check if `target` and `source` have compatible requirements.
1254
1257
1255 Currently this is just checking for largefiles.
1258 Currently this is just checking for largefiles.
1256 """
1259 """
1257 _ = translator
1260 _ = translator
1258 target_has_largefiles = self._has_largefiles(target)
1261 target_has_largefiles = self._has_largefiles(target)
1259 source_has_largefiles = self._has_largefiles(source)
1262 source_has_largefiles = self._has_largefiles(source)
1260 merge_possible = True
1263 merge_possible = True
1261 message = u''
1264 message = u''
1262
1265
1263 if target_has_largefiles != source_has_largefiles:
1266 if target_has_largefiles != source_has_largefiles:
1264 merge_possible = False
1267 merge_possible = False
1265 if source_has_largefiles:
1268 if source_has_largefiles:
1266 message = _(
1269 message = _(
1267 'Target repository large files support is disabled.')
1270 'Target repository large files support is disabled.')
1268 else:
1271 else:
1269 message = _(
1272 message = _(
1270 'Source repository large files support is disabled.')
1273 'Source repository large files support is disabled.')
1271
1274
1272 return merge_possible, message
1275 return merge_possible, message
1273
1276
1274 def _has_largefiles(self, repo):
1277 def _has_largefiles(self, repo):
1275 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1278 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1276 'extensions', 'largefiles')
1279 'extensions', 'largefiles')
1277 return largefiles_ui and largefiles_ui[0].active
1280 return largefiles_ui and largefiles_ui[0].active
1278
1281
1279 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1282 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1280 """
1283 """
1281 Try to merge the pull request and return the merge status.
1284 Try to merge the pull request and return the merge status.
1282 """
1285 """
1283 log.debug(
1286 log.debug(
1284 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1287 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1285 pull_request.pull_request_id, force_shadow_repo_refresh)
1288 pull_request.pull_request_id, force_shadow_repo_refresh)
1286 target_vcs = pull_request.target_repo.scm_instance()
1289 target_vcs = pull_request.target_repo.scm_instance()
1287
1290
1288 # Refresh the target reference.
1291 # Refresh the target reference.
1289 try:
1292 try:
1290 target_ref = self._refresh_reference(
1293 target_ref = self._refresh_reference(
1291 pull_request.target_ref_parts, target_vcs)
1294 pull_request.target_ref_parts, target_vcs)
1292 except CommitDoesNotExistError:
1295 except CommitDoesNotExistError:
1293 merge_state = MergeResponse(
1296 merge_state = MergeResponse(
1294 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
1297 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
1295 return merge_state
1298 return merge_state
1296
1299
1297 target_locked = pull_request.target_repo.locked
1300 target_locked = pull_request.target_repo.locked
1298 if target_locked and target_locked[0]:
1301 if target_locked and target_locked[0]:
1299 log.debug("The target repository is locked.")
1302 log.debug("The target repository is locked.")
1300 merge_state = MergeResponse(
1303 merge_state = MergeResponse(
1301 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
1304 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
1302 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1305 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1303 pull_request, target_ref):
1306 pull_request, target_ref):
1304 log.debug("Refreshing the merge status of the repository.")
1307 log.debug("Refreshing the merge status of the repository.")
1305 merge_state = self._refresh_merge_state(
1308 merge_state = self._refresh_merge_state(
1306 pull_request, target_vcs, target_ref)
1309 pull_request, target_vcs, target_ref)
1307 else:
1310 else:
1308 possible = pull_request.\
1311 possible = pull_request.\
1309 last_merge_status == MergeFailureReason.NONE
1312 last_merge_status == MergeFailureReason.NONE
1310 merge_state = MergeResponse(
1313 merge_state = MergeResponse(
1311 possible, False, None, pull_request.last_merge_status)
1314 possible, False, None, pull_request.last_merge_status)
1312
1315
1313 return merge_state
1316 return merge_state
1314
1317
1315 def _refresh_reference(self, reference, vcs_repository):
1318 def _refresh_reference(self, reference, vcs_repository):
1316 if reference.type in ('branch', 'book'):
1319 if reference.type in ('branch', 'book'):
1317 name_or_id = reference.name
1320 name_or_id = reference.name
1318 else:
1321 else:
1319 name_or_id = reference.commit_id
1322 name_or_id = reference.commit_id
1320 refreshed_commit = vcs_repository.get_commit(name_or_id)
1323 refreshed_commit = vcs_repository.get_commit(name_or_id)
1321 refreshed_reference = Reference(
1324 refreshed_reference = Reference(
1322 reference.type, reference.name, refreshed_commit.raw_id)
1325 reference.type, reference.name, refreshed_commit.raw_id)
1323 return refreshed_reference
1326 return refreshed_reference
1324
1327
1325 def _needs_merge_state_refresh(self, pull_request, target_reference):
1328 def _needs_merge_state_refresh(self, pull_request, target_reference):
1326 return not(
1329 return not(
1327 pull_request.revisions and
1330 pull_request.revisions and
1328 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1331 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1329 target_reference.commit_id == pull_request._last_merge_target_rev)
1332 target_reference.commit_id == pull_request._last_merge_target_rev)
1330
1333
1331 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1334 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1332 workspace_id = self._workspace_id(pull_request)
1335 workspace_id = self._workspace_id(pull_request)
1333 source_vcs = pull_request.source_repo.scm_instance()
1336 source_vcs = pull_request.source_repo.scm_instance()
1334 repo_id = pull_request.target_repo.repo_id
1337 repo_id = pull_request.target_repo.repo_id
1335 use_rebase = self._use_rebase_for_merging(pull_request)
1338 use_rebase = self._use_rebase_for_merging(pull_request)
1336 close_branch = self._close_branch_before_merging(pull_request)
1339 close_branch = self._close_branch_before_merging(pull_request)
1337 merge_state = target_vcs.merge(
1340 merge_state = target_vcs.merge(
1338 repo_id, workspace_id,
1341 repo_id, workspace_id,
1339 target_reference, source_vcs, pull_request.source_ref_parts,
1342 target_reference, source_vcs, pull_request.source_ref_parts,
1340 dry_run=True, use_rebase=use_rebase,
1343 dry_run=True, use_rebase=use_rebase,
1341 close_branch=close_branch)
1344 close_branch=close_branch)
1342
1345
1343 # Do not store the response if there was an unknown error.
1346 # Do not store the response if there was an unknown error.
1344 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1347 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1345 pull_request._last_merge_source_rev = \
1348 pull_request._last_merge_source_rev = \
1346 pull_request.source_ref_parts.commit_id
1349 pull_request.source_ref_parts.commit_id
1347 pull_request._last_merge_target_rev = target_reference.commit_id
1350 pull_request._last_merge_target_rev = target_reference.commit_id
1348 pull_request.last_merge_status = merge_state.failure_reason
1351 pull_request.last_merge_status = merge_state.failure_reason
1349 pull_request.shadow_merge_ref = merge_state.merge_ref
1352 pull_request.shadow_merge_ref = merge_state.merge_ref
1350 Session().add(pull_request)
1353 Session().add(pull_request)
1351 Session().commit()
1354 Session().commit()
1352
1355
1353 return merge_state
1356 return merge_state
1354
1357
1355 def _workspace_id(self, pull_request):
1358 def _workspace_id(self, pull_request):
1356 workspace_id = 'pr-%s' % pull_request.pull_request_id
1359 workspace_id = 'pr-%s' % pull_request.pull_request_id
1357 return workspace_id
1360 return workspace_id
1358
1361
1359 def merge_status_message(self, status_code):
1362 def merge_status_message(self, status_code):
1360 """
1363 """
1361 Return a human friendly error message for the given merge status code.
1364 Return a human friendly error message for the given merge status code.
1362 """
1365 """
1363 return self.MERGE_STATUS_MESSAGES[status_code]
1366 return self.MERGE_STATUS_MESSAGES[status_code]
1364
1367
1365 def generate_repo_data(self, repo, commit_id=None, branch=None,
1368 def generate_repo_data(self, repo, commit_id=None, branch=None,
1366 bookmark=None, translator=None):
1369 bookmark=None, translator=None):
1367 from rhodecode.model.repo import RepoModel
1370 from rhodecode.model.repo import RepoModel
1368
1371
1369 all_refs, selected_ref = \
1372 all_refs, selected_ref = \
1370 self._get_repo_pullrequest_sources(
1373 self._get_repo_pullrequest_sources(
1371 repo.scm_instance(), commit_id=commit_id,
1374 repo.scm_instance(), commit_id=commit_id,
1372 branch=branch, bookmark=bookmark, translator=translator)
1375 branch=branch, bookmark=bookmark, translator=translator)
1373
1376
1374 refs_select2 = []
1377 refs_select2 = []
1375 for element in all_refs:
1378 for element in all_refs:
1376 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1379 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1377 refs_select2.append({'text': element[1], 'children': children})
1380 refs_select2.append({'text': element[1], 'children': children})
1378
1381
1379 return {
1382 return {
1380 'user': {
1383 'user': {
1381 'user_id': repo.user.user_id,
1384 'user_id': repo.user.user_id,
1382 'username': repo.user.username,
1385 'username': repo.user.username,
1383 'firstname': repo.user.first_name,
1386 'firstname': repo.user.first_name,
1384 'lastname': repo.user.last_name,
1387 'lastname': repo.user.last_name,
1385 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1388 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1386 },
1389 },
1387 'name': repo.repo_name,
1390 'name': repo.repo_name,
1388 'link': RepoModel().get_url(repo),
1391 'link': RepoModel().get_url(repo),
1389 'description': h.chop_at_smart(repo.description_safe, '\n'),
1392 'description': h.chop_at_smart(repo.description_safe, '\n'),
1390 'refs': {
1393 'refs': {
1391 'all_refs': all_refs,
1394 'all_refs': all_refs,
1392 'selected_ref': selected_ref,
1395 'selected_ref': selected_ref,
1393 'select2_refs': refs_select2
1396 'select2_refs': refs_select2
1394 }
1397 }
1395 }
1398 }
1396
1399
1397 def generate_pullrequest_title(self, source, source_ref, target):
1400 def generate_pullrequest_title(self, source, source_ref, target):
1398 return u'{source}#{at_ref} to {target}'.format(
1401 return u'{source}#{at_ref} to {target}'.format(
1399 source=source,
1402 source=source,
1400 at_ref=source_ref,
1403 at_ref=source_ref,
1401 target=target,
1404 target=target,
1402 )
1405 )
1403
1406
1404 def _cleanup_merge_workspace(self, pull_request):
1407 def _cleanup_merge_workspace(self, pull_request):
1405 # Merging related cleanup
1408 # Merging related cleanup
1406 repo_id = pull_request.target_repo.repo_id
1409 repo_id = pull_request.target_repo.repo_id
1407 target_scm = pull_request.target_repo.scm_instance()
1410 target_scm = pull_request.target_repo.scm_instance()
1408 workspace_id = self._workspace_id(pull_request)
1411 workspace_id = self._workspace_id(pull_request)
1409
1412
1410 try:
1413 try:
1411 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1414 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1412 except NotImplementedError:
1415 except NotImplementedError:
1413 pass
1416 pass
1414
1417
1415 def _get_repo_pullrequest_sources(
1418 def _get_repo_pullrequest_sources(
1416 self, repo, commit_id=None, branch=None, bookmark=None,
1419 self, repo, commit_id=None, branch=None, bookmark=None,
1417 translator=None):
1420 translator=None):
1418 """
1421 """
1419 Return a structure with repo's interesting commits, suitable for
1422 Return a structure with repo's interesting commits, suitable for
1420 the selectors in pullrequest controller
1423 the selectors in pullrequest controller
1421
1424
1422 :param commit_id: a commit that must be in the list somehow
1425 :param commit_id: a commit that must be in the list somehow
1423 and selected by default
1426 and selected by default
1424 :param branch: a branch that must be in the list and selected
1427 :param branch: a branch that must be in the list and selected
1425 by default - even if closed
1428 by default - even if closed
1426 :param bookmark: a bookmark that must be in the list and selected
1429 :param bookmark: a bookmark that must be in the list and selected
1427 """
1430 """
1428 _ = translator or get_current_request().translate
1431 _ = translator or get_current_request().translate
1429
1432
1430 commit_id = safe_str(commit_id) if commit_id else None
1433 commit_id = safe_str(commit_id) if commit_id else None
1431 branch = safe_str(branch) if branch else None
1434 branch = safe_str(branch) if branch else None
1432 bookmark = safe_str(bookmark) if bookmark else None
1435 bookmark = safe_str(bookmark) if bookmark else None
1433
1436
1434 selected = None
1437 selected = None
1435
1438
1436 # order matters: first source that has commit_id in it will be selected
1439 # order matters: first source that has commit_id in it will be selected
1437 sources = []
1440 sources = []
1438 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1441 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1439 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1442 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1440
1443
1441 if commit_id:
1444 if commit_id:
1442 ref_commit = (h.short_id(commit_id), commit_id)
1445 ref_commit = (h.short_id(commit_id), commit_id)
1443 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1446 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1444
1447
1445 sources.append(
1448 sources.append(
1446 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1449 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1447 )
1450 )
1448
1451
1449 groups = []
1452 groups = []
1450 for group_key, ref_list, group_name, match in sources:
1453 for group_key, ref_list, group_name, match in sources:
1451 group_refs = []
1454 group_refs = []
1452 for ref_name, ref_id in ref_list:
1455 for ref_name, ref_id in ref_list:
1453 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1456 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1454 group_refs.append((ref_key, ref_name))
1457 group_refs.append((ref_key, ref_name))
1455
1458
1456 if not selected:
1459 if not selected:
1457 if set([commit_id, match]) & set([ref_id, ref_name]):
1460 if set([commit_id, match]) & set([ref_id, ref_name]):
1458 selected = ref_key
1461 selected = ref_key
1459
1462
1460 if group_refs:
1463 if group_refs:
1461 groups.append((group_refs, group_name))
1464 groups.append((group_refs, group_name))
1462
1465
1463 if not selected:
1466 if not selected:
1464 ref = commit_id or branch or bookmark
1467 ref = commit_id or branch or bookmark
1465 if ref:
1468 if ref:
1466 raise CommitDoesNotExistError(
1469 raise CommitDoesNotExistError(
1467 'No commit refs could be found matching: %s' % ref)
1470 'No commit refs could be found matching: %s' % ref)
1468 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1471 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1469 selected = 'branch:%s:%s' % (
1472 selected = 'branch:%s:%s' % (
1470 repo.DEFAULT_BRANCH_NAME,
1473 repo.DEFAULT_BRANCH_NAME,
1471 repo.branches[repo.DEFAULT_BRANCH_NAME]
1474 repo.branches[repo.DEFAULT_BRANCH_NAME]
1472 )
1475 )
1473 elif repo.commit_ids:
1476 elif repo.commit_ids:
1474 # make the user select in this case
1477 # make the user select in this case
1475 selected = None
1478 selected = None
1476 else:
1479 else:
1477 raise EmptyRepositoryError()
1480 raise EmptyRepositoryError()
1478 return groups, selected
1481 return groups, selected
1479
1482
1480 def get_diff(self, source_repo, source_ref_id, target_ref_id, context=DIFF_CONTEXT):
1483 def get_diff(self, source_repo, source_ref_id, target_ref_id, context=DIFF_CONTEXT):
1481 return self._get_diff_from_pr_or_version(
1484 return self._get_diff_from_pr_or_version(
1482 source_repo, source_ref_id, target_ref_id, context=context)
1485 source_repo, source_ref_id, target_ref_id, context=context)
1483
1486
1484 def _get_diff_from_pr_or_version(
1487 def _get_diff_from_pr_or_version(
1485 self, source_repo, source_ref_id, target_ref_id, context):
1488 self, source_repo, source_ref_id, target_ref_id, context):
1486 target_commit = source_repo.get_commit(
1489 target_commit = source_repo.get_commit(
1487 commit_id=safe_str(target_ref_id))
1490 commit_id=safe_str(target_ref_id))
1488 source_commit = source_repo.get_commit(
1491 source_commit = source_repo.get_commit(
1489 commit_id=safe_str(source_ref_id))
1492 commit_id=safe_str(source_ref_id))
1490 if isinstance(source_repo, Repository):
1493 if isinstance(source_repo, Repository):
1491 vcs_repo = source_repo.scm_instance()
1494 vcs_repo = source_repo.scm_instance()
1492 else:
1495 else:
1493 vcs_repo = source_repo
1496 vcs_repo = source_repo
1494
1497
1495 # TODO: johbo: In the context of an update, we cannot reach
1498 # TODO: johbo: In the context of an update, we cannot reach
1496 # the old commit anymore with our normal mechanisms. It needs
1499 # the old commit anymore with our normal mechanisms. It needs
1497 # some sort of special support in the vcs layer to avoid this
1500 # some sort of special support in the vcs layer to avoid this
1498 # workaround.
1501 # workaround.
1499 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1502 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1500 vcs_repo.alias == 'git'):
1503 vcs_repo.alias == 'git'):
1501 source_commit.raw_id = safe_str(source_ref_id)
1504 source_commit.raw_id = safe_str(source_ref_id)
1502
1505
1503 log.debug('calculating diff between '
1506 log.debug('calculating diff between '
1504 'source_ref:%s and target_ref:%s for repo `%s`',
1507 'source_ref:%s and target_ref:%s for repo `%s`',
1505 target_ref_id, source_ref_id,
1508 target_ref_id, source_ref_id,
1506 safe_unicode(vcs_repo.path))
1509 safe_unicode(vcs_repo.path))
1507
1510
1508 vcs_diff = vcs_repo.get_diff(
1511 vcs_diff = vcs_repo.get_diff(
1509 commit1=target_commit, commit2=source_commit, context=context)
1512 commit1=target_commit, commit2=source_commit, context=context)
1510 return vcs_diff
1513 return vcs_diff
1511
1514
1512 def _is_merge_enabled(self, pull_request):
1515 def _is_merge_enabled(self, pull_request):
1513 return self._get_general_setting(
1516 return self._get_general_setting(
1514 pull_request, 'rhodecode_pr_merge_enabled')
1517 pull_request, 'rhodecode_pr_merge_enabled')
1515
1518
1516 def _use_rebase_for_merging(self, pull_request):
1519 def _use_rebase_for_merging(self, pull_request):
1517 repo_type = pull_request.target_repo.repo_type
1520 repo_type = pull_request.target_repo.repo_type
1518 if repo_type == 'hg':
1521 if repo_type == 'hg':
1519 return self._get_general_setting(
1522 return self._get_general_setting(
1520 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1523 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1521 elif repo_type == 'git':
1524 elif repo_type == 'git':
1522 return self._get_general_setting(
1525 return self._get_general_setting(
1523 pull_request, 'rhodecode_git_use_rebase_for_merging')
1526 pull_request, 'rhodecode_git_use_rebase_for_merging')
1524
1527
1525 return False
1528 return False
1526
1529
1527 def _close_branch_before_merging(self, pull_request):
1530 def _close_branch_before_merging(self, pull_request):
1528 repo_type = pull_request.target_repo.repo_type
1531 repo_type = pull_request.target_repo.repo_type
1529 if repo_type == 'hg':
1532 if repo_type == 'hg':
1530 return self._get_general_setting(
1533 return self._get_general_setting(
1531 pull_request, 'rhodecode_hg_close_branch_before_merging')
1534 pull_request, 'rhodecode_hg_close_branch_before_merging')
1532 elif repo_type == 'git':
1535 elif repo_type == 'git':
1533 return self._get_general_setting(
1536 return self._get_general_setting(
1534 pull_request, 'rhodecode_git_close_branch_before_merging')
1537 pull_request, 'rhodecode_git_close_branch_before_merging')
1535
1538
1536 return False
1539 return False
1537
1540
1538 def _get_general_setting(self, pull_request, settings_key, default=False):
1541 def _get_general_setting(self, pull_request, settings_key, default=False):
1539 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1542 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1540 settings = settings_model.get_general_settings()
1543 settings = settings_model.get_general_settings()
1541 return settings.get(settings_key, default)
1544 return settings.get(settings_key, default)
1542
1545
1543 def _log_audit_action(self, action, action_data, user, pull_request):
1546 def _log_audit_action(self, action, action_data, user, pull_request):
1544 audit_logger.store(
1547 audit_logger.store(
1545 action=action,
1548 action=action,
1546 action_data=action_data,
1549 action_data=action_data,
1547 user=user,
1550 user=user,
1548 repo=pull_request.target_repo)
1551 repo=pull_request.target_repo)
1549
1552
1550 def get_reviewer_functions(self):
1553 def get_reviewer_functions(self):
1551 """
1554 """
1552 Fetches functions for validation and fetching default reviewers.
1555 Fetches functions for validation and fetching default reviewers.
1553 If available we use the EE package, else we fallback to CE
1556 If available we use the EE package, else we fallback to CE
1554 package functions
1557 package functions
1555 """
1558 """
1556 try:
1559 try:
1557 from rc_reviewers.utils import get_default_reviewers_data
1560 from rc_reviewers.utils import get_default_reviewers_data
1558 from rc_reviewers.utils import validate_default_reviewers
1561 from rc_reviewers.utils import validate_default_reviewers
1559 except ImportError:
1562 except ImportError:
1560 from rhodecode.apps.repository.utils import \
1563 from rhodecode.apps.repository.utils import \
1561 get_default_reviewers_data
1564 get_default_reviewers_data
1562 from rhodecode.apps.repository.utils import \
1565 from rhodecode.apps.repository.utils import \
1563 validate_default_reviewers
1566 validate_default_reviewers
1564
1567
1565 return get_default_reviewers_data, validate_default_reviewers
1568 return get_default_reviewers_data, validate_default_reviewers
1566
1569
1567
1570
1568 class MergeCheck(object):
1571 class MergeCheck(object):
1569 """
1572 """
1570 Perform Merge Checks and returns a check object which stores information
1573 Perform Merge Checks and returns a check object which stores information
1571 about merge errors, and merge conditions
1574 about merge errors, and merge conditions
1572 """
1575 """
1573 TODO_CHECK = 'todo'
1576 TODO_CHECK = 'todo'
1574 PERM_CHECK = 'perm'
1577 PERM_CHECK = 'perm'
1575 REVIEW_CHECK = 'review'
1578 REVIEW_CHECK = 'review'
1576 MERGE_CHECK = 'merge'
1579 MERGE_CHECK = 'merge'
1577
1580
1578 def __init__(self):
1581 def __init__(self):
1579 self.review_status = None
1582 self.review_status = None
1580 self.merge_possible = None
1583 self.merge_possible = None
1581 self.merge_msg = ''
1584 self.merge_msg = ''
1582 self.failed = None
1585 self.failed = None
1583 self.errors = []
1586 self.errors = []
1584 self.error_details = OrderedDict()
1587 self.error_details = OrderedDict()
1585
1588
1586 def push_error(self, error_type, message, error_key, details):
1589 def push_error(self, error_type, message, error_key, details):
1587 self.failed = True
1590 self.failed = True
1588 self.errors.append([error_type, message])
1591 self.errors.append([error_type, message])
1589 self.error_details[error_key] = dict(
1592 self.error_details[error_key] = dict(
1590 details=details,
1593 details=details,
1591 error_type=error_type,
1594 error_type=error_type,
1592 message=message
1595 message=message
1593 )
1596 )
1594
1597
1595 @classmethod
1598 @classmethod
1596 def validate(cls, pull_request, user, translator, fail_early=False,
1599 def validate(cls, pull_request, user, translator, fail_early=False,
1597 force_shadow_repo_refresh=False):
1600 force_shadow_repo_refresh=False):
1598 _ = translator
1601 _ = translator
1599 merge_check = cls()
1602 merge_check = cls()
1600
1603
1601 # permissions to merge
1604 # permissions to merge
1602 user_allowed_to_merge = PullRequestModel().check_user_merge(
1605 user_allowed_to_merge = PullRequestModel().check_user_merge(
1603 pull_request, user)
1606 pull_request, user)
1604 if not user_allowed_to_merge:
1607 if not user_allowed_to_merge:
1605 log.debug("MergeCheck: cannot merge, approval is pending.")
1608 log.debug("MergeCheck: cannot merge, approval is pending.")
1606
1609
1607 msg = _('User `{}` not allowed to perform merge.').format(user.username)
1610 msg = _('User `{}` not allowed to perform merge.').format(user.username)
1608 merge_check.push_error('error', msg, cls.PERM_CHECK, user.username)
1611 merge_check.push_error('error', msg, cls.PERM_CHECK, user.username)
1609 if fail_early:
1612 if fail_early:
1610 return merge_check
1613 return merge_check
1611
1614
1612 # review status, must be always present
1615 # review status, must be always present
1613 review_status = pull_request.calculated_review_status()
1616 review_status = pull_request.calculated_review_status()
1614 merge_check.review_status = review_status
1617 merge_check.review_status = review_status
1615
1618
1616 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1619 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1617 if not status_approved:
1620 if not status_approved:
1618 log.debug("MergeCheck: cannot merge, approval is pending.")
1621 log.debug("MergeCheck: cannot merge, approval is pending.")
1619
1622
1620 msg = _('Pull request reviewer approval is pending.')
1623 msg = _('Pull request reviewer approval is pending.')
1621
1624
1622 merge_check.push_error(
1625 merge_check.push_error(
1623 'warning', msg, cls.REVIEW_CHECK, review_status)
1626 'warning', msg, cls.REVIEW_CHECK, review_status)
1624
1627
1625 if fail_early:
1628 if fail_early:
1626 return merge_check
1629 return merge_check
1627
1630
1628 # left over TODOs
1631 # left over TODOs
1629 todos = CommentsModel().get_unresolved_todos(pull_request)
1632 todos = CommentsModel().get_unresolved_todos(pull_request)
1630 if todos:
1633 if todos:
1631 log.debug("MergeCheck: cannot merge, {} "
1634 log.debug("MergeCheck: cannot merge, {} "
1632 "unresolved todos left.".format(len(todos)))
1635 "unresolved todos left.".format(len(todos)))
1633
1636
1634 if len(todos) == 1:
1637 if len(todos) == 1:
1635 msg = _('Cannot merge, {} TODO still not resolved.').format(
1638 msg = _('Cannot merge, {} TODO still not resolved.').format(
1636 len(todos))
1639 len(todos))
1637 else:
1640 else:
1638 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1641 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1639 len(todos))
1642 len(todos))
1640
1643
1641 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1644 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1642
1645
1643 if fail_early:
1646 if fail_early:
1644 return merge_check
1647 return merge_check
1645
1648
1646 # merge possible, here is the filesystem simulation + shadow repo
1649 # merge possible, here is the filesystem simulation + shadow repo
1647 merge_status, msg = PullRequestModel().merge_status(
1650 merge_status, msg = PullRequestModel().merge_status(
1648 pull_request, translator=translator,
1651 pull_request, translator=translator,
1649 force_shadow_repo_refresh=force_shadow_repo_refresh)
1652 force_shadow_repo_refresh=force_shadow_repo_refresh)
1650 merge_check.merge_possible = merge_status
1653 merge_check.merge_possible = merge_status
1651 merge_check.merge_msg = msg
1654 merge_check.merge_msg = msg
1652 if not merge_status:
1655 if not merge_status:
1653 log.debug(
1656 log.debug(
1654 "MergeCheck: cannot merge, pull request merge not possible.")
1657 "MergeCheck: cannot merge, pull request merge not possible.")
1655 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1658 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1656
1659
1657 if fail_early:
1660 if fail_early:
1658 return merge_check
1661 return merge_check
1659
1662
1660 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1663 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1661 return merge_check
1664 return merge_check
1662
1665
1663 @classmethod
1666 @classmethod
1664 def get_merge_conditions(cls, pull_request, translator):
1667 def get_merge_conditions(cls, pull_request, translator):
1665 _ = translator
1668 _ = translator
1666 merge_details = {}
1669 merge_details = {}
1667
1670
1668 model = PullRequestModel()
1671 model = PullRequestModel()
1669 use_rebase = model._use_rebase_for_merging(pull_request)
1672 use_rebase = model._use_rebase_for_merging(pull_request)
1670
1673
1671 if use_rebase:
1674 if use_rebase:
1672 merge_details['merge_strategy'] = dict(
1675 merge_details['merge_strategy'] = dict(
1673 details={},
1676 details={},
1674 message=_('Merge strategy: rebase')
1677 message=_('Merge strategy: rebase')
1675 )
1678 )
1676 else:
1679 else:
1677 merge_details['merge_strategy'] = dict(
1680 merge_details['merge_strategy'] = dict(
1678 details={},
1681 details={},
1679 message=_('Merge strategy: explicit merge commit')
1682 message=_('Merge strategy: explicit merge commit')
1680 )
1683 )
1681
1684
1682 close_branch = model._close_branch_before_merging(pull_request)
1685 close_branch = model._close_branch_before_merging(pull_request)
1683 if close_branch:
1686 if close_branch:
1684 repo_type = pull_request.target_repo.repo_type
1687 repo_type = pull_request.target_repo.repo_type
1685 if repo_type == 'hg':
1688 if repo_type == 'hg':
1686 close_msg = _('Source branch will be closed after merge.')
1689 close_msg = _('Source branch will be closed after merge.')
1687 elif repo_type == 'git':
1690 elif repo_type == 'git':
1688 close_msg = _('Source branch will be deleted after merge.')
1691 close_msg = _('Source branch will be deleted after merge.')
1689
1692
1690 merge_details['close_branch'] = dict(
1693 merge_details['close_branch'] = dict(
1691 details={},
1694 details={},
1692 message=close_msg
1695 message=close_msg
1693 )
1696 )
1694
1697
1695 return merge_details
1698 return merge_details
1696
1699
1697 ChangeTuple = collections.namedtuple(
1700 ChangeTuple = collections.namedtuple(
1698 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1701 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1699
1702
1700 FileChangeTuple = collections.namedtuple(
1703 FileChangeTuple = collections.namedtuple(
1701 'FileChangeTuple', ['added', 'modified', 'removed'])
1704 'FileChangeTuple', ['added', 'modified', 'removed'])
@@ -1,550 +1,551 b''
1 // # Copyright (C) 2010-2018 RhodeCode GmbH
1 // # Copyright (C) 2010-2018 RhodeCode GmbH
2 // #
2 // #
3 // # This program is free software: you can redistribute it and/or modify
3 // # This program is free software: you can redistribute it and/or modify
4 // # it under the terms of the GNU Affero General Public License, version 3
4 // # it under the terms of the GNU Affero General Public License, version 3
5 // # (only), as published by the Free Software Foundation.
5 // # (only), as published by the Free Software Foundation.
6 // #
6 // #
7 // # This program is distributed in the hope that it will be useful,
7 // # This program is distributed in the hope that it will be useful,
8 // # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 // # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 // # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 // # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 // # GNU General Public License for more details.
10 // # GNU General Public License for more details.
11 // #
11 // #
12 // # You should have received a copy of the GNU Affero General Public License
12 // # You should have received a copy of the GNU Affero General Public License
13 // # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 // # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 // #
14 // #
15 // # This program is dual-licensed. If you wish to learn more about the
15 // # This program is dual-licensed. If you wish to learn more about the
16 // # RhodeCode Enterprise Edition, including its added features, Support services,
16 // # RhodeCode Enterprise Edition, including its added features, Support services,
17 // # and proprietary license terms, please see https://rhodecode.com/licenses/
17 // # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19
19
20 var prButtonLockChecks = {
20 var prButtonLockChecks = {
21 'compare': false,
21 'compare': false,
22 'reviewers': false
22 'reviewers': false
23 };
23 };
24
24
25 /**
25 /**
26 * lock button until all checks and loads are made. E.g reviewer calculation
26 * lock button until all checks and loads are made. E.g reviewer calculation
27 * should prevent from submitting a PR
27 * should prevent from submitting a PR
28 * @param lockEnabled
28 * @param lockEnabled
29 * @param msg
29 * @param msg
30 * @param scope
30 * @param scope
31 */
31 */
32 var prButtonLock = function(lockEnabled, msg, scope) {
32 var prButtonLock = function(lockEnabled, msg, scope) {
33 scope = scope || 'all';
33 scope = scope || 'all';
34 if (scope == 'all'){
34 if (scope == 'all'){
35 prButtonLockChecks['compare'] = !lockEnabled;
35 prButtonLockChecks['compare'] = !lockEnabled;
36 prButtonLockChecks['reviewers'] = !lockEnabled;
36 prButtonLockChecks['reviewers'] = !lockEnabled;
37 } else if (scope == 'compare') {
37 } else if (scope == 'compare') {
38 prButtonLockChecks['compare'] = !lockEnabled;
38 prButtonLockChecks['compare'] = !lockEnabled;
39 } else if (scope == 'reviewers'){
39 } else if (scope == 'reviewers'){
40 prButtonLockChecks['reviewers'] = !lockEnabled;
40 prButtonLockChecks['reviewers'] = !lockEnabled;
41 }
41 }
42 var checksMeet = prButtonLockChecks.compare && prButtonLockChecks.reviewers;
42 var checksMeet = prButtonLockChecks.compare && prButtonLockChecks.reviewers;
43 if (lockEnabled) {
43 if (lockEnabled) {
44 $('#pr_submit').attr('disabled', 'disabled');
44 $('#pr_submit').attr('disabled', 'disabled');
45 }
45 }
46 else if (checksMeet) {
46 else if (checksMeet) {
47 $('#pr_submit').removeAttr('disabled');
47 $('#pr_submit').removeAttr('disabled');
48 }
48 }
49
49
50 if (msg) {
50 if (msg) {
51 $('#pr_open_message').html(msg);
51 $('#pr_open_message').html(msg);
52 }
52 }
53 };
53 };
54
54
55
55
56 /**
56 /**
57 Generate Title and Description for a PullRequest.
57 Generate Title and Description for a PullRequest.
58 In case of 1 commits, the title and description is that one commit
58 In case of 1 commits, the title and description is that one commit
59 in case of multiple commits, we iterate on them with max N number of commits,
59 in case of multiple commits, we iterate on them with max N number of commits,
60 and build description in a form
60 and build description in a form
61 - commitN
61 - commitN
62 - commitN+1
62 - commitN+1
63 ...
63 ...
64
64
65 Title is then constructed from branch names, or other references,
65 Title is then constructed from branch names, or other references,
66 replacing '-' and '_' into spaces
66 replacing '-' and '_' into spaces
67
67
68 * @param sourceRef
68 * @param sourceRef
69 * @param elements
69 * @param elements
70 * @param limit
70 * @param limit
71 * @returns {*[]}
71 * @returns {*[]}
72 */
72 */
73 var getTitleAndDescription = function(sourceRef, elements, limit) {
73 var getTitleAndDescription = function(sourceRef, elements, limit) {
74 var title = '';
74 var title = '';
75 var desc = '';
75 var desc = '';
76
76
77 $.each($(elements).get().reverse().slice(0, limit), function(idx, value) {
77 $.each($(elements).get().reverse().slice(0, limit), function(idx, value) {
78 var rawMessage = $(value).find('td.td-description .message').data('messageRaw');
78 var rawMessage = $(value).find('td.td-description .message').data('messageRaw');
79 desc += '- ' + rawMessage.split('\n')[0].replace(/\n+$/, "") + '\n';
79 desc += '- ' + rawMessage.split('\n')[0].replace(/\n+$/, "") + '\n';
80 });
80 });
81 // only 1 commit, use commit message as title
81 // only 1 commit, use commit message as title
82 if (elements.length === 1) {
82 if (elements.length === 1) {
83 title = $(elements[0]).find('td.td-description .message').data('messageRaw').split('\n')[0];
83 title = $(elements[0]).find('td.td-description .message').data('messageRaw').split('\n')[0];
84 }
84 }
85 else {
85 else {
86 // use reference name
86 // use reference name
87 title = sourceRef.replace(/-/g, ' ').replace(/_/g, ' ').capitalizeFirstLetter();
87 title = sourceRef.replace(/-/g, ' ').replace(/_/g, ' ').capitalizeFirstLetter();
88 }
88 }
89
89
90 return [title, desc]
90 return [title, desc]
91 };
91 };
92
92
93
93
94
94
95 ReviewersController = function () {
95 ReviewersController = function () {
96 var self = this;
96 var self = this;
97 this.$reviewRulesContainer = $('#review_rules');
97 this.$reviewRulesContainer = $('#review_rules');
98 this.$rulesList = this.$reviewRulesContainer.find('.pr-reviewer-rules');
98 this.$rulesList = this.$reviewRulesContainer.find('.pr-reviewer-rules');
99 this.forbidReviewUsers = undefined;
99 this.forbidReviewUsers = undefined;
100 this.$reviewMembers = $('#review_members');
100 this.$reviewMembers = $('#review_members');
101 this.currentRequest = null;
101 this.currentRequest = null;
102
102
103 this.defaultForbidReviewUsers = function() {
103 this.defaultForbidReviewUsers = function() {
104 return [
104 return [
105 {'username': 'default',
105 {'username': 'default',
106 'user_id': templateContext.default_user.user_id}
106 'user_id': templateContext.default_user.user_id}
107 ];
107 ];
108 };
108 };
109
109
110 this.hideReviewRules = function() {
110 this.hideReviewRules = function() {
111 self.$reviewRulesContainer.hide();
111 self.$reviewRulesContainer.hide();
112 };
112 };
113
113
114 this.showReviewRules = function() {
114 this.showReviewRules = function() {
115 self.$reviewRulesContainer.show();
115 self.$reviewRulesContainer.show();
116 };
116 };
117
117
118 this.addRule = function(ruleText) {
118 this.addRule = function(ruleText) {
119 self.showReviewRules();
119 self.showReviewRules();
120 return '<div>- {0}</div>'.format(ruleText)
120 return '<div>- {0}</div>'.format(ruleText)
121 };
121 };
122
122
123 this.loadReviewRules = function(data) {
123 this.loadReviewRules = function(data) {
124 // reset forbidden Users
124 // reset forbidden Users
125 this.forbidReviewUsers = self.defaultForbidReviewUsers();
125 this.forbidReviewUsers = self.defaultForbidReviewUsers();
126
126
127 // reset state of review rules
127 // reset state of review rules
128 self.$rulesList.html('');
128 self.$rulesList.html('');
129
129
130 if (!data || data.rules === undefined || $.isEmptyObject(data.rules)) {
130 if (!data || data.rules === undefined || $.isEmptyObject(data.rules)) {
131 // default rule, case for older repo that don't have any rules stored
131 // default rule, case for older repo that don't have any rules stored
132 self.$rulesList.append(
132 self.$rulesList.append(
133 self.addRule(
133 self.addRule(
134 _gettext('All reviewers must vote.'))
134 _gettext('All reviewers must vote.'))
135 );
135 );
136 return self.forbidReviewUsers
136 return self.forbidReviewUsers
137 }
137 }
138
138
139 if (data.rules.voting !== undefined) {
139 if (data.rules.voting !== undefined) {
140 if (data.rules.voting < 0) {
140 if (data.rules.voting < 0) {
141 self.$rulesList.append(
141 self.$rulesList.append(
142 self.addRule(
142 self.addRule(
143 _gettext('All individual reviewers must vote.'))
143 _gettext('All individual reviewers must vote.'))
144 )
144 )
145 } else if (data.rules.voting === 1) {
145 } else if (data.rules.voting === 1) {
146 self.$rulesList.append(
146 self.$rulesList.append(
147 self.addRule(
147 self.addRule(
148 _gettext('At least {0} reviewer must vote.').format(data.rules.voting))
148 _gettext('At least {0} reviewer must vote.').format(data.rules.voting))
149 )
149 )
150
150
151 } else {
151 } else {
152 self.$rulesList.append(
152 self.$rulesList.append(
153 self.addRule(
153 self.addRule(
154 _gettext('At least {0} reviewers must vote.').format(data.rules.voting))
154 _gettext('At least {0} reviewers must vote.').format(data.rules.voting))
155 )
155 )
156 }
156 }
157 }
157 }
158
158
159 if (data.rules.voting_groups !== undefined) {
159 if (data.rules.voting_groups !== undefined) {
160 $.each(data.rules.voting_groups, function(index, rule_data) {
160 $.each(data.rules.voting_groups, function(index, rule_data) {
161 self.$rulesList.append(
161 self.$rulesList.append(
162 self.addRule(rule_data.text)
162 self.addRule(rule_data.text)
163 )
163 )
164 });
164 });
165 }
165 }
166
166
167 if (data.rules.use_code_authors_for_review) {
167 if (data.rules.use_code_authors_for_review) {
168 self.$rulesList.append(
168 self.$rulesList.append(
169 self.addRule(
169 self.addRule(
170 _gettext('Reviewers picked from source code changes.'))
170 _gettext('Reviewers picked from source code changes.'))
171 )
171 )
172 }
172 }
173 if (data.rules.forbid_adding_reviewers) {
173 if (data.rules.forbid_adding_reviewers) {
174 $('#add_reviewer_input').remove();
174 $('#add_reviewer_input').remove();
175 self.$rulesList.append(
175 self.$rulesList.append(
176 self.addRule(
176 self.addRule(
177 _gettext('Adding new reviewers is forbidden.'))
177 _gettext('Adding new reviewers is forbidden.'))
178 )
178 )
179 }
179 }
180 if (data.rules.forbid_author_to_review) {
180 if (data.rules.forbid_author_to_review) {
181 self.forbidReviewUsers.push(data.rules_data.pr_author);
181 self.forbidReviewUsers.push(data.rules_data.pr_author);
182 self.$rulesList.append(
182 self.$rulesList.append(
183 self.addRule(
183 self.addRule(
184 _gettext('Author is not allowed to be a reviewer.'))
184 _gettext('Author is not allowed to be a reviewer.'))
185 )
185 )
186 }
186 }
187 if (data.rules.forbid_commit_author_to_review) {
187 if (data.rules.forbid_commit_author_to_review) {
188
188
189 if (data.rules_data.forbidden_users) {
189 if (data.rules_data.forbidden_users) {
190 $.each(data.rules_data.forbidden_users, function(index, member_data) {
190 $.each(data.rules_data.forbidden_users, function(index, member_data) {
191 self.forbidReviewUsers.push(member_data)
191 self.forbidReviewUsers.push(member_data)
192 });
192 });
193
193
194 }
194 }
195
195
196 self.$rulesList.append(
196 self.$rulesList.append(
197 self.addRule(
197 self.addRule(
198 _gettext('Commit Authors are not allowed to be a reviewer.'))
198 _gettext('Commit Authors are not allowed to be a reviewer.'))
199 )
199 )
200 }
200 }
201
201
202 return self.forbidReviewUsers
202 return self.forbidReviewUsers
203 };
203 };
204
204
205 this.loadDefaultReviewers = function(sourceRepo, sourceRef, targetRepo, targetRef) {
205 this.loadDefaultReviewers = function(sourceRepo, sourceRef, targetRepo, targetRef) {
206
206
207 if (self.currentRequest) {
207 if (self.currentRequest) {
208 // make sure we cleanup old running requests before triggering this
208 // make sure we cleanup old running requests before triggering this
209 // again
209 // again
210 self.currentRequest.abort();
210 self.currentRequest.abort();
211 }
211 }
212
212
213 $('.calculate-reviewers').show();
213 $('.calculate-reviewers').show();
214 // reset reviewer members
214 // reset reviewer members
215 self.$reviewMembers.empty();
215 self.$reviewMembers.empty();
216
216
217 prButtonLock(true, null, 'reviewers');
217 prButtonLock(true, null, 'reviewers');
218 $('#user').hide(); // hide user autocomplete before load
218 $('#user').hide(); // hide user autocomplete before load
219
219
220 if (sourceRef.length !== 3 || targetRef.length !== 3) {
220 if (sourceRef.length !== 3 || targetRef.length !== 3) {
221 // don't load defaults in case we're missing some refs...
221 // don't load defaults in case we're missing some refs...
222 $('.calculate-reviewers').hide();
222 $('.calculate-reviewers').hide();
223 return
223 return
224 }
224 }
225
225
226 var url = pyroutes.url('repo_default_reviewers_data',
226 var url = pyroutes.url('repo_default_reviewers_data',
227 {
227 {
228 'repo_name': templateContext.repo_name,
228 'repo_name': templateContext.repo_name,
229 'source_repo': sourceRepo,
229 'source_repo': sourceRepo,
230 'source_ref': sourceRef[2],
230 'source_ref': sourceRef[2],
231 'target_repo': targetRepo,
231 'target_repo': targetRepo,
232 'target_ref': targetRef[2]
232 'target_ref': targetRef[2]
233 });
233 });
234
234
235 self.currentRequest = $.get(url)
235 self.currentRequest = $.get(url)
236 .done(function(data) {
236 .done(function(data) {
237 self.currentRequest = null;
237 self.currentRequest = null;
238
238
239 // review rules
239 // review rules
240 self.loadReviewRules(data);
240 self.loadReviewRules(data);
241
241
242 for (var i = 0; i < data.reviewers.length; i++) {
242 for (var i = 0; i < data.reviewers.length; i++) {
243 var reviewer = data.reviewers[i];
243 var reviewer = data.reviewers[i];
244 self.addReviewMember(
244 self.addReviewMember(
245 reviewer, reviewer.reasons, reviewer.mandatory);
245 reviewer, reviewer.reasons, reviewer.mandatory);
246 }
246 }
247 $('.calculate-reviewers').hide();
247 $('.calculate-reviewers').hide();
248 prButtonLock(false, null, 'reviewers');
248 prButtonLock(false, null, 'reviewers');
249 $('#user').show(); // show user autocomplete after load
249 $('#user').show(); // show user autocomplete after load
250 });
250 });
251 };
251 };
252
252
253 // check those, refactor
253 // check those, refactor
254 this.removeReviewMember = function(reviewer_id, mark_delete) {
254 this.removeReviewMember = function(reviewer_id, mark_delete) {
255 var reviewer = $('#reviewer_{0}'.format(reviewer_id));
255 var reviewer = $('#reviewer_{0}'.format(reviewer_id));
256
256
257 if(typeof(mark_delete) === undefined){
257 if(typeof(mark_delete) === undefined){
258 mark_delete = false;
258 mark_delete = false;
259 }
259 }
260
260
261 if(mark_delete === true){
261 if(mark_delete === true){
262 if (reviewer){
262 if (reviewer){
263 // now delete the input
263 // now delete the input
264 $('#reviewer_{0} input'.format(reviewer_id)).remove();
264 $('#reviewer_{0} input'.format(reviewer_id)).remove();
265 // mark as to-delete
265 // mark as to-delete
266 var obj = $('#reviewer_{0}_name'.format(reviewer_id));
266 var obj = $('#reviewer_{0}_name'.format(reviewer_id));
267 obj.addClass('to-delete');
267 obj.addClass('to-delete');
268 obj.css({"text-decoration":"line-through", "opacity": 0.5});
268 obj.css({"text-decoration":"line-through", "opacity": 0.5});
269 }
269 }
270 }
270 }
271 else{
271 else{
272 $('#reviewer_{0}'.format(reviewer_id)).remove();
272 $('#reviewer_{0}'.format(reviewer_id)).remove();
273 }
273 }
274 };
274 };
275 this.reviewMemberEntry = function() {
275 this.reviewMemberEntry = function() {
276
276
277 };
277 };
278 this.addReviewMember = function(reviewer_obj, reasons, mandatory) {
278 this.addReviewMember = function(reviewer_obj, reasons, mandatory) {
279 var members = self.$reviewMembers.get(0);
279 var members = self.$reviewMembers.get(0);
280 var id = reviewer_obj.user_id;
280 var id = reviewer_obj.user_id;
281 var username = reviewer_obj.username;
281 var username = reviewer_obj.username;
282
282
283 var reasons = reasons || [];
283 var reasons = reasons || [];
284 var mandatory = mandatory || false;
284 var mandatory = mandatory || false;
285
285
286 // register IDS to check if we don't have this ID already in
286 // register IDS to check if we don't have this ID already in
287 var currentIds = [];
287 var currentIds = [];
288 var _els = self.$reviewMembers.find('li').toArray();
288 var _els = self.$reviewMembers.find('li').toArray();
289 for (el in _els){
289 for (el in _els){
290 currentIds.push(_els[el].id)
290 currentIds.push(_els[el].id)
291 }
291 }
292
292
293 var userAllowedReview = function(userId) {
293 var userAllowedReview = function(userId) {
294 var allowed = true;
294 var allowed = true;
295 $.each(self.forbidReviewUsers, function(index, member_data) {
295 $.each(self.forbidReviewUsers, function(index, member_data) {
296 if (parseInt(userId) === member_data['user_id']) {
296 if (parseInt(userId) === member_data['user_id']) {
297 allowed = false;
297 allowed = false;
298 return false // breaks the loop
298 return false // breaks the loop
299 }
299 }
300 });
300 });
301 return allowed
301 return allowed
302 };
302 };
303
303
304 var userAllowed = userAllowedReview(id);
304 var userAllowed = userAllowedReview(id);
305 if (!userAllowed){
305 if (!userAllowed){
306 alert(_gettext('User `{0}` not allowed to be a reviewer').format(username));
306 alert(_gettext('User `{0}` not allowed to be a reviewer').format(username));
307 } else {
307 } else {
308 // only add if it's not there
308 // only add if it's not there
309 var alreadyReviewer = currentIds.indexOf('reviewer_'+id) != -1;
309 var alreadyReviewer = currentIds.indexOf('reviewer_'+id) != -1;
310
310
311 if (alreadyReviewer) {
311 if (alreadyReviewer) {
312 alert(_gettext('User `{0}` already in reviewers').format(username));
312 alert(_gettext('User `{0}` already in reviewers').format(username));
313 } else {
313 } else {
314 members.innerHTML += renderTemplate('reviewMemberEntry', {
314 members.innerHTML += renderTemplate('reviewMemberEntry', {
315 'member': reviewer_obj,
315 'member': reviewer_obj,
316 'mandatory': mandatory,
316 'mandatory': mandatory,
317 'allowed_to_update': true,
317 'allowed_to_update': true,
318 'review_status': 'not_reviewed',
318 'review_status': 'not_reviewed',
319 'review_status_label': _gettext('Not Reviewed'),
319 'review_status_label': _gettext('Not Reviewed'),
320 'reasons': reasons,
320 'reasons': reasons,
321 'create': true
321 'create': true
322 });
322 });
323 }
323 }
324 }
324 }
325
325
326 };
326 };
327
327
328 this.updateReviewers = function(repo_name, pull_request_id){
328 this.updateReviewers = function(repo_name, pull_request_id){
329 var postData = $('#reviewers input').serialize();
329 var postData = $('#reviewers input').serialize();
330 _updatePullRequest(repo_name, pull_request_id, postData);
330 _updatePullRequest(repo_name, pull_request_id, postData);
331 };
331 };
332
332
333 };
333 };
334
334
335
335
336 var _updatePullRequest = function(repo_name, pull_request_id, postData) {
336 var _updatePullRequest = function(repo_name, pull_request_id, postData) {
337 var url = pyroutes.url(
337 var url = pyroutes.url(
338 'pullrequest_update',
338 'pullrequest_update',
339 {"repo_name": repo_name, "pull_request_id": pull_request_id});
339 {"repo_name": repo_name, "pull_request_id": pull_request_id});
340 if (typeof postData === 'string' ) {
340 if (typeof postData === 'string' ) {
341 postData += '&csrf_token=' + CSRF_TOKEN;
341 postData += '&csrf_token=' + CSRF_TOKEN;
342 } else {
342 } else {
343 postData.csrf_token = CSRF_TOKEN;
343 postData.csrf_token = CSRF_TOKEN;
344 }
344 }
345 var success = function(o) {
345 var success = function(o) {
346 window.location.reload();
346 window.location.reload();
347 };
347 };
348 ajaxPOST(url, postData, success);
348 ajaxPOST(url, postData, success);
349 };
349 };
350
350
351 /**
351 /**
352 * PULL REQUEST update commits
352 * PULL REQUEST update commits
353 */
353 */
354 var updateCommits = function(repo_name, pull_request_id) {
354 var updateCommits = function(repo_name, pull_request_id) {
355 var postData = {
355 var postData = {
356 'update_commits': true};
356 'update_commits': true};
357 _updatePullRequest(repo_name, pull_request_id, postData);
357 _updatePullRequest(repo_name, pull_request_id, postData);
358 };
358 };
359
359
360
360
361 /**
361 /**
362 * PULL REQUEST edit info
362 * PULL REQUEST edit info
363 */
363 */
364 var editPullRequest = function(repo_name, pull_request_id, title, description) {
364 var editPullRequest = function(repo_name, pull_request_id, title, description, renderer) {
365 var url = pyroutes.url(
365 var url = pyroutes.url(
366 'pullrequest_update',
366 'pullrequest_update',
367 {"repo_name": repo_name, "pull_request_id": pull_request_id});
367 {"repo_name": repo_name, "pull_request_id": pull_request_id});
368
368
369 var postData = {
369 var postData = {
370 'title': title,
370 'title': title,
371 'description': description,
371 'description': description,
372 'description_renderer': renderer,
372 'edit_pull_request': true,
373 'edit_pull_request': true,
373 'csrf_token': CSRF_TOKEN
374 'csrf_token': CSRF_TOKEN
374 };
375 };
375 var success = function(o) {
376 var success = function(o) {
376 window.location.reload();
377 window.location.reload();
377 };
378 };
378 ajaxPOST(url, postData, success);
379 ajaxPOST(url, postData, success);
379 };
380 };
380
381
381
382
382 /**
383 /**
383 * Reviewer autocomplete
384 * Reviewer autocomplete
384 */
385 */
385 var ReviewerAutoComplete = function(inputId) {
386 var ReviewerAutoComplete = function(inputId) {
386 $(inputId).autocomplete({
387 $(inputId).autocomplete({
387 serviceUrl: pyroutes.url('user_autocomplete_data'),
388 serviceUrl: pyroutes.url('user_autocomplete_data'),
388 minChars:2,
389 minChars:2,
389 maxHeight:400,
390 maxHeight:400,
390 deferRequestBy: 300, //miliseconds
391 deferRequestBy: 300, //miliseconds
391 showNoSuggestionNotice: true,
392 showNoSuggestionNotice: true,
392 tabDisabled: true,
393 tabDisabled: true,
393 autoSelectFirst: true,
394 autoSelectFirst: true,
394 params: { user_id: templateContext.rhodecode_user.user_id, user_groups:true, user_groups_expand:true, skip_default_user:true },
395 params: { user_id: templateContext.rhodecode_user.user_id, user_groups:true, user_groups_expand:true, skip_default_user:true },
395 formatResult: autocompleteFormatResult,
396 formatResult: autocompleteFormatResult,
396 lookupFilter: autocompleteFilterResult,
397 lookupFilter: autocompleteFilterResult,
397 onSelect: function(element, data) {
398 onSelect: function(element, data) {
398 var mandatory = false;
399 var mandatory = false;
399 var reasons = [_gettext('added manually by "{0}"').format(templateContext.rhodecode_user.username)];
400 var reasons = [_gettext('added manually by "{0}"').format(templateContext.rhodecode_user.username)];
400
401
401 // add whole user groups
402 // add whole user groups
402 if (data.value_type == 'user_group') {
403 if (data.value_type == 'user_group') {
403 reasons.push(_gettext('member of "{0}"').format(data.value_display));
404 reasons.push(_gettext('member of "{0}"').format(data.value_display));
404
405
405 $.each(data.members, function(index, member_data) {
406 $.each(data.members, function(index, member_data) {
406 var reviewer = member_data;
407 var reviewer = member_data;
407 reviewer['user_id'] = member_data['id'];
408 reviewer['user_id'] = member_data['id'];
408 reviewer['gravatar_link'] = member_data['icon_link'];
409 reviewer['gravatar_link'] = member_data['icon_link'];
409 reviewer['user_link'] = member_data['profile_link'];
410 reviewer['user_link'] = member_data['profile_link'];
410 reviewer['rules'] = [];
411 reviewer['rules'] = [];
411 reviewersController.addReviewMember(reviewer, reasons, mandatory);
412 reviewersController.addReviewMember(reviewer, reasons, mandatory);
412 })
413 })
413 }
414 }
414 // add single user
415 // add single user
415 else {
416 else {
416 var reviewer = data;
417 var reviewer = data;
417 reviewer['user_id'] = data['id'];
418 reviewer['user_id'] = data['id'];
418 reviewer['gravatar_link'] = data['icon_link'];
419 reviewer['gravatar_link'] = data['icon_link'];
419 reviewer['user_link'] = data['profile_link'];
420 reviewer['user_link'] = data['profile_link'];
420 reviewer['rules'] = [];
421 reviewer['rules'] = [];
421 reviewersController.addReviewMember(reviewer, reasons, mandatory);
422 reviewersController.addReviewMember(reviewer, reasons, mandatory);
422 }
423 }
423
424
424 $(inputId).val('');
425 $(inputId).val('');
425 }
426 }
426 });
427 });
427 };
428 };
428
429
429
430
430 VersionController = function () {
431 VersionController = function () {
431 var self = this;
432 var self = this;
432 this.$verSource = $('input[name=ver_source]');
433 this.$verSource = $('input[name=ver_source]');
433 this.$verTarget = $('input[name=ver_target]');
434 this.$verTarget = $('input[name=ver_target]');
434 this.$showVersionDiff = $('#show-version-diff');
435 this.$showVersionDiff = $('#show-version-diff');
435
436
436 this.adjustRadioSelectors = function (curNode) {
437 this.adjustRadioSelectors = function (curNode) {
437 var getVal = function (item) {
438 var getVal = function (item) {
438 if (item == 'latest') {
439 if (item == 'latest') {
439 return Number.MAX_SAFE_INTEGER
440 return Number.MAX_SAFE_INTEGER
440 }
441 }
441 else {
442 else {
442 return parseInt(item)
443 return parseInt(item)
443 }
444 }
444 };
445 };
445
446
446 var curVal = getVal($(curNode).val());
447 var curVal = getVal($(curNode).val());
447 var cleared = false;
448 var cleared = false;
448
449
449 $.each(self.$verSource, function (index, value) {
450 $.each(self.$verSource, function (index, value) {
450 var elVal = getVal($(value).val());
451 var elVal = getVal($(value).val());
451
452
452 if (elVal > curVal) {
453 if (elVal > curVal) {
453 if ($(value).is(':checked')) {
454 if ($(value).is(':checked')) {
454 cleared = true;
455 cleared = true;
455 }
456 }
456 $(value).attr('disabled', 'disabled');
457 $(value).attr('disabled', 'disabled');
457 $(value).removeAttr('checked');
458 $(value).removeAttr('checked');
458 $(value).css({'opacity': 0.1});
459 $(value).css({'opacity': 0.1});
459 }
460 }
460 else {
461 else {
461 $(value).css({'opacity': 1});
462 $(value).css({'opacity': 1});
462 $(value).removeAttr('disabled');
463 $(value).removeAttr('disabled');
463 }
464 }
464 });
465 });
465
466
466 if (cleared) {
467 if (cleared) {
467 // if we unchecked an active, set the next one to same loc.
468 // if we unchecked an active, set the next one to same loc.
468 $(this.$verSource).filter('[value={0}]'.format(
469 $(this.$verSource).filter('[value={0}]'.format(
469 curVal)).attr('checked', 'checked');
470 curVal)).attr('checked', 'checked');
470 }
471 }
471
472
472 self.setLockAction(false,
473 self.setLockAction(false,
473 $(curNode).data('verPos'),
474 $(curNode).data('verPos'),
474 $(this.$verSource).filter(':checked').data('verPos')
475 $(this.$verSource).filter(':checked').data('verPos')
475 );
476 );
476 };
477 };
477
478
478
479
479 this.attachVersionListener = function () {
480 this.attachVersionListener = function () {
480 self.$verTarget.change(function (e) {
481 self.$verTarget.change(function (e) {
481 self.adjustRadioSelectors(this)
482 self.adjustRadioSelectors(this)
482 });
483 });
483 self.$verSource.change(function (e) {
484 self.$verSource.change(function (e) {
484 self.adjustRadioSelectors(self.$verTarget.filter(':checked'))
485 self.adjustRadioSelectors(self.$verTarget.filter(':checked'))
485 });
486 });
486 };
487 };
487
488
488 this.init = function () {
489 this.init = function () {
489
490
490 var curNode = self.$verTarget.filter(':checked');
491 var curNode = self.$verTarget.filter(':checked');
491 self.adjustRadioSelectors(curNode);
492 self.adjustRadioSelectors(curNode);
492 self.setLockAction(true);
493 self.setLockAction(true);
493 self.attachVersionListener();
494 self.attachVersionListener();
494
495
495 };
496 };
496
497
497 this.setLockAction = function (state, selectedVersion, otherVersion) {
498 this.setLockAction = function (state, selectedVersion, otherVersion) {
498 var $showVersionDiff = this.$showVersionDiff;
499 var $showVersionDiff = this.$showVersionDiff;
499
500
500 if (state) {
501 if (state) {
501 $showVersionDiff.attr('disabled', 'disabled');
502 $showVersionDiff.attr('disabled', 'disabled');
502 $showVersionDiff.addClass('disabled');
503 $showVersionDiff.addClass('disabled');
503 $showVersionDiff.html($showVersionDiff.data('labelTextLocked'));
504 $showVersionDiff.html($showVersionDiff.data('labelTextLocked'));
504 }
505 }
505 else {
506 else {
506 $showVersionDiff.removeAttr('disabled');
507 $showVersionDiff.removeAttr('disabled');
507 $showVersionDiff.removeClass('disabled');
508 $showVersionDiff.removeClass('disabled');
508
509
509 if (selectedVersion == otherVersion) {
510 if (selectedVersion == otherVersion) {
510 $showVersionDiff.html($showVersionDiff.data('labelTextShow'));
511 $showVersionDiff.html($showVersionDiff.data('labelTextShow'));
511 } else {
512 } else {
512 $showVersionDiff.html($showVersionDiff.data('labelTextDiff'));
513 $showVersionDiff.html($showVersionDiff.data('labelTextDiff'));
513 }
514 }
514 }
515 }
515
516
516 };
517 };
517
518
518 this.showVersionDiff = function () {
519 this.showVersionDiff = function () {
519 var target = self.$verTarget.filter(':checked');
520 var target = self.$verTarget.filter(':checked');
520 var source = self.$verSource.filter(':checked');
521 var source = self.$verSource.filter(':checked');
521
522
522 if (target.val() && source.val()) {
523 if (target.val() && source.val()) {
523 var params = {
524 var params = {
524 'pull_request_id': templateContext.pull_request_data.pull_request_id,
525 'pull_request_id': templateContext.pull_request_data.pull_request_id,
525 'repo_name': templateContext.repo_name,
526 'repo_name': templateContext.repo_name,
526 'version': target.val(),
527 'version': target.val(),
527 'from_version': source.val()
528 'from_version': source.val()
528 };
529 };
529 window.location = pyroutes.url('pullrequest_show', params)
530 window.location = pyroutes.url('pullrequest_show', params)
530 }
531 }
531
532
532 return false;
533 return false;
533 };
534 };
534
535
535 this.toggleVersionView = function (elem) {
536 this.toggleVersionView = function (elem) {
536
537
537 if (this.$showVersionDiff.is(':visible')) {
538 if (this.$showVersionDiff.is(':visible')) {
538 $('.version-pr').hide();
539 $('.version-pr').hide();
539 this.$showVersionDiff.hide();
540 this.$showVersionDiff.hide();
540 $(elem).html($(elem).data('toggleOn'))
541 $(elem).html($(elem).data('toggleOn'))
541 } else {
542 } else {
542 $('.version-pr').show();
543 $('.version-pr').show();
543 this.$showVersionDiff.show();
544 this.$showVersionDiff.show();
544 $(elem).html($(elem).data('toggleOff'))
545 $(elem).html($(elem).data('toggleOff'))
545 }
546 }
546
547
547 return false
548 return false
548 }
549 }
549
550
550 }; No newline at end of file
551 };
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now