##// END OF EJS Templates
pull-requests: fixed case for GIT repositories when a merge check failed due to merge conflicts the pull request wrongly reported missing commits....
marcink -
r4299:04e45b92 default
parent child Browse files
Show More
@@ -0,0 +1,52 b''
1 # -*- coding: utf-8 -*-
2
3 import logging
4 from sqlalchemy import *
5
6 from alembic.migration import MigrationContext
7 from alembic.operations import Operations
8 from sqlalchemy import BigInteger
9
10 from rhodecode.lib.dbmigrate.versions import _reset_base
11 from rhodecode.model import init_model_encryption
12
13
14 log = logging.getLogger(__name__)
15
16
17 def upgrade(migrate_engine):
18 """
19 Upgrade operations go here.
20 Don't create your own engine; bind migrate_engine to your metadata
21 """
22 _reset_base(migrate_engine)
23 from rhodecode.lib.dbmigrate.schema import db_4_18_0_1 as db
24
25 init_model_encryption(db)
26
27 context = MigrationContext.configure(migrate_engine.connect())
28 op = Operations(context)
29
30 pull_requests = db.PullRequest.__table__
31
32 with op.batch_alter_table(pull_requests.name) as batch_op:
33 new_column = Column(
34 'last_merge_metadata',
35 db.JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
36 batch_op.add_column(new_column)
37
38 pull_request_version = db.PullRequestVersion.__table__
39 with op.batch_alter_table(pull_request_version.name) as batch_op:
40 new_column = Column(
41 'last_merge_metadata',
42 db.JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
43 batch_op.add_column(new_column)
44
45
46 def downgrade(migrate_engine):
47 meta = MetaData()
48 meta.bind = migrate_engine
49
50
51 def fixups(models, _SESSION):
52 pass
@@ -1,57 +1,57 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22 import sys
22 import sys
23 import platform
23 import platform
24
24
25 VERSION = tuple(open(os.path.join(
25 VERSION = tuple(open(os.path.join(
26 os.path.dirname(__file__), 'VERSION')).read().split('.'))
26 os.path.dirname(__file__), 'VERSION')).read().split('.'))
27
27
28 BACKENDS = {
28 BACKENDS = {
29 'hg': 'Mercurial repository',
29 'hg': 'Mercurial repository',
30 'git': 'Git repository',
30 'git': 'Git repository',
31 'svn': 'Subversion repository',
31 'svn': 'Subversion repository',
32 }
32 }
33
33
34 CELERY_ENABLED = False
34 CELERY_ENABLED = False
35 CELERY_EAGER = False
35 CELERY_EAGER = False
36
36
37 # link to config for pyramid
37 # link to config for pyramid
38 CONFIG = {}
38 CONFIG = {}
39
39
40 # Populated with the settings dictionary from application init in
40 # Populated with the settings dictionary from application init in
41 # rhodecode.conf.environment.load_pyramid_environment
41 # rhodecode.conf.environment.load_pyramid_environment
42 PYRAMID_SETTINGS = {}
42 PYRAMID_SETTINGS = {}
43
43
44 # Linked module for extensions
44 # Linked module for extensions
45 EXTENSIONS = {}
45 EXTENSIONS = {}
46
46
47 __version__ = ('.'.join((str(each) for each in VERSION[:3])))
47 __version__ = ('.'.join((str(each) for each in VERSION[:3])))
48 __dbversion__ = 103 # defines current db version for migrations
48 __dbversion__ = 104 # defines current db version for migrations
49 __platform__ = platform.system()
49 __platform__ = platform.system()
50 __license__ = 'AGPLv3, and Commercial License'
50 __license__ = 'AGPLv3, and Commercial License'
51 __author__ = 'RhodeCode GmbH'
51 __author__ = 'RhodeCode GmbH'
52 __url__ = 'https://code.rhodecode.com'
52 __url__ = 'https://code.rhodecode.com'
53
53
54 is_windows = __platform__ in ['Windows']
54 is_windows = __platform__ in ['Windows']
55 is_unix = not is_windows
55 is_unix = not is_windows
56 is_test = False
56 is_test = False
57 disable_error_handler = False
57 disable_error_handler = False
@@ -1,1215 +1,1217 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import mock
20 import mock
21 import pytest
21 import pytest
22
22
23 import rhodecode
23 import rhodecode
24 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
24 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
25 from rhodecode.lib.vcs.nodes import FileNode
25 from rhodecode.lib.vcs.nodes import FileNode
26 from rhodecode.lib import helpers as h
26 from rhodecode.lib import helpers as h
27 from rhodecode.model.changeset_status import ChangesetStatusModel
27 from rhodecode.model.changeset_status import ChangesetStatusModel
28 from rhodecode.model.db import (
28 from rhodecode.model.db import (
29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment, Repository)
29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment, Repository)
30 from rhodecode.model.meta import Session
30 from rhodecode.model.meta import Session
31 from rhodecode.model.pull_request import PullRequestModel
31 from rhodecode.model.pull_request import PullRequestModel
32 from rhodecode.model.user import UserModel
32 from rhodecode.model.user import UserModel
33 from rhodecode.tests import (
33 from rhodecode.tests import (
34 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
34 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
35
35
36
36
37 def route_path(name, params=None, **kwargs):
37 def route_path(name, params=None, **kwargs):
38 import urllib
38 import urllib
39
39
40 base_url = {
40 base_url = {
41 'repo_changelog': '/{repo_name}/changelog',
41 'repo_changelog': '/{repo_name}/changelog',
42 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
42 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
43 'repo_commits': '/{repo_name}/commits',
43 'repo_commits': '/{repo_name}/commits',
44 'repo_commits_file': '/{repo_name}/commits/{commit_id}/{f_path}',
44 'repo_commits_file': '/{repo_name}/commits/{commit_id}/{f_path}',
45 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}',
45 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}',
46 'pullrequest_show_all': '/{repo_name}/pull-request',
46 'pullrequest_show_all': '/{repo_name}/pull-request',
47 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
47 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
48 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
48 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
49 'pullrequest_repo_targets': '/{repo_name}/pull-request/repo-destinations',
49 'pullrequest_repo_targets': '/{repo_name}/pull-request/repo-destinations',
50 'pullrequest_new': '/{repo_name}/pull-request/new',
50 'pullrequest_new': '/{repo_name}/pull-request/new',
51 'pullrequest_create': '/{repo_name}/pull-request/create',
51 'pullrequest_create': '/{repo_name}/pull-request/create',
52 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update',
52 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update',
53 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge',
53 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge',
54 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete',
54 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete',
55 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment',
55 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment',
56 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete',
56 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete',
57 }[name].format(**kwargs)
57 }[name].format(**kwargs)
58
58
59 if params:
59 if params:
60 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
60 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
61 return base_url
61 return base_url
62
62
63
63
64 @pytest.mark.usefixtures('app', 'autologin_user')
64 @pytest.mark.usefixtures('app', 'autologin_user')
65 @pytest.mark.backends("git", "hg")
65 @pytest.mark.backends("git", "hg")
66 class TestPullrequestsView(object):
66 class TestPullrequestsView(object):
67
67
68 def test_index(self, backend):
68 def test_index(self, backend):
69 self.app.get(route_path(
69 self.app.get(route_path(
70 'pullrequest_new',
70 'pullrequest_new',
71 repo_name=backend.repo_name))
71 repo_name=backend.repo_name))
72
72
73 def test_option_menu_create_pull_request_exists(self, backend):
73 def test_option_menu_create_pull_request_exists(self, backend):
74 repo_name = backend.repo_name
74 repo_name = backend.repo_name
75 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
75 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
76
76
77 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
77 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
78 'pullrequest_new', repo_name=repo_name)
78 'pullrequest_new', repo_name=repo_name)
79 response.mustcontain(create_pr_link)
79 response.mustcontain(create_pr_link)
80
80
81 def test_create_pr_form_with_raw_commit_id(self, backend):
81 def test_create_pr_form_with_raw_commit_id(self, backend):
82 repo = backend.repo
82 repo = backend.repo
83
83
84 self.app.get(
84 self.app.get(
85 route_path('pullrequest_new', repo_name=repo.repo_name,
85 route_path('pullrequest_new', repo_name=repo.repo_name,
86 commit=repo.get_commit().raw_id),
86 commit=repo.get_commit().raw_id),
87 status=200)
87 status=200)
88
88
89 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
89 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
90 @pytest.mark.parametrize('range_diff', ["0", "1"])
90 @pytest.mark.parametrize('range_diff', ["0", "1"])
91 def test_show(self, pr_util, pr_merge_enabled, range_diff):
91 def test_show(self, pr_util, pr_merge_enabled, range_diff):
92 pull_request = pr_util.create_pull_request(
92 pull_request = pr_util.create_pull_request(
93 mergeable=pr_merge_enabled, enable_notifications=False)
93 mergeable=pr_merge_enabled, enable_notifications=False)
94
94
95 response = self.app.get(route_path(
95 response = self.app.get(route_path(
96 'pullrequest_show',
96 'pullrequest_show',
97 repo_name=pull_request.target_repo.scm_instance().name,
97 repo_name=pull_request.target_repo.scm_instance().name,
98 pull_request_id=pull_request.pull_request_id,
98 pull_request_id=pull_request.pull_request_id,
99 params={'range-diff': range_diff}))
99 params={'range-diff': range_diff}))
100
100
101 for commit_id in pull_request.revisions:
101 for commit_id in pull_request.revisions:
102 response.mustcontain(commit_id)
102 response.mustcontain(commit_id)
103
103
104 response.mustcontain(pull_request.target_ref_parts.type)
104 response.mustcontain(pull_request.target_ref_parts.type)
105 response.mustcontain(pull_request.target_ref_parts.name)
105 response.mustcontain(pull_request.target_ref_parts.name)
106
106
107 response.mustcontain('class="pull-request-merge"')
107 response.mustcontain('class="pull-request-merge"')
108
108
109 if pr_merge_enabled:
109 if pr_merge_enabled:
110 response.mustcontain('Pull request reviewer approval is pending')
110 response.mustcontain('Pull request reviewer approval is pending')
111 else:
111 else:
112 response.mustcontain('Server-side pull request merging is disabled.')
112 response.mustcontain('Server-side pull request merging is disabled.')
113
113
114 if range_diff == "1":
114 if range_diff == "1":
115 response.mustcontain('Turn off: Show the diff as commit range')
115 response.mustcontain('Turn off: Show the diff as commit range')
116
116
117 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
117 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
118 # Logout
118 # Logout
119 response = self.app.post(
119 response = self.app.post(
120 h.route_path('logout'),
120 h.route_path('logout'),
121 params={'csrf_token': csrf_token})
121 params={'csrf_token': csrf_token})
122 # Login as regular user
122 # Login as regular user
123 response = self.app.post(h.route_path('login'),
123 response = self.app.post(h.route_path('login'),
124 {'username': TEST_USER_REGULAR_LOGIN,
124 {'username': TEST_USER_REGULAR_LOGIN,
125 'password': 'test12'})
125 'password': 'test12'})
126
126
127 pull_request = pr_util.create_pull_request(
127 pull_request = pr_util.create_pull_request(
128 author=TEST_USER_REGULAR_LOGIN)
128 author=TEST_USER_REGULAR_LOGIN)
129
129
130 response = self.app.get(route_path(
130 response = self.app.get(route_path(
131 'pullrequest_show',
131 'pullrequest_show',
132 repo_name=pull_request.target_repo.scm_instance().name,
132 repo_name=pull_request.target_repo.scm_instance().name,
133 pull_request_id=pull_request.pull_request_id))
133 pull_request_id=pull_request.pull_request_id))
134
134
135 response.mustcontain('Server-side pull request merging is disabled.')
135 response.mustcontain('Server-side pull request merging is disabled.')
136
136
137 assert_response = response.assert_response()
137 assert_response = response.assert_response()
138 # for regular user without a merge permissions, we don't see it
138 # for regular user without a merge permissions, we don't see it
139 assert_response.no_element_exists('#close-pull-request-action')
139 assert_response.no_element_exists('#close-pull-request-action')
140
140
141 user_util.grant_user_permission_to_repo(
141 user_util.grant_user_permission_to_repo(
142 pull_request.target_repo,
142 pull_request.target_repo,
143 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
143 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
144 'repository.write')
144 'repository.write')
145 response = self.app.get(route_path(
145 response = self.app.get(route_path(
146 'pullrequest_show',
146 'pullrequest_show',
147 repo_name=pull_request.target_repo.scm_instance().name,
147 repo_name=pull_request.target_repo.scm_instance().name,
148 pull_request_id=pull_request.pull_request_id))
148 pull_request_id=pull_request.pull_request_id))
149
149
150 response.mustcontain('Server-side pull request merging is disabled.')
150 response.mustcontain('Server-side pull request merging is disabled.')
151
151
152 assert_response = response.assert_response()
152 assert_response = response.assert_response()
153 # now regular user has a merge permissions, we have CLOSE button
153 # now regular user has a merge permissions, we have CLOSE button
154 assert_response.one_element_exists('#close-pull-request-action')
154 assert_response.one_element_exists('#close-pull-request-action')
155
155
156 def test_show_invalid_commit_id(self, pr_util):
156 def test_show_invalid_commit_id(self, pr_util):
157 # Simulating invalid revisions which will cause a lookup error
157 # Simulating invalid revisions which will cause a lookup error
158 pull_request = pr_util.create_pull_request()
158 pull_request = pr_util.create_pull_request()
159 pull_request.revisions = ['invalid']
159 pull_request.revisions = ['invalid']
160 Session().add(pull_request)
160 Session().add(pull_request)
161 Session().commit()
161 Session().commit()
162
162
163 response = self.app.get(route_path(
163 response = self.app.get(route_path(
164 'pullrequest_show',
164 'pullrequest_show',
165 repo_name=pull_request.target_repo.scm_instance().name,
165 repo_name=pull_request.target_repo.scm_instance().name,
166 pull_request_id=pull_request.pull_request_id))
166 pull_request_id=pull_request.pull_request_id))
167
167
168 for commit_id in pull_request.revisions:
168 for commit_id in pull_request.revisions:
169 response.mustcontain(commit_id)
169 response.mustcontain(commit_id)
170
170
171 def test_show_invalid_source_reference(self, pr_util):
171 def test_show_invalid_source_reference(self, pr_util):
172 pull_request = pr_util.create_pull_request()
172 pull_request = pr_util.create_pull_request()
173 pull_request.source_ref = 'branch:b:invalid'
173 pull_request.source_ref = 'branch:b:invalid'
174 Session().add(pull_request)
174 Session().add(pull_request)
175 Session().commit()
175 Session().commit()
176
176
177 self.app.get(route_path(
177 self.app.get(route_path(
178 'pullrequest_show',
178 'pullrequest_show',
179 repo_name=pull_request.target_repo.scm_instance().name,
179 repo_name=pull_request.target_repo.scm_instance().name,
180 pull_request_id=pull_request.pull_request_id))
180 pull_request_id=pull_request.pull_request_id))
181
181
182 def test_edit_title_description(self, pr_util, csrf_token):
182 def test_edit_title_description(self, pr_util, csrf_token):
183 pull_request = pr_util.create_pull_request()
183 pull_request = pr_util.create_pull_request()
184 pull_request_id = pull_request.pull_request_id
184 pull_request_id = pull_request.pull_request_id
185
185
186 response = self.app.post(
186 response = self.app.post(
187 route_path('pullrequest_update',
187 route_path('pullrequest_update',
188 repo_name=pull_request.target_repo.repo_name,
188 repo_name=pull_request.target_repo.repo_name,
189 pull_request_id=pull_request_id),
189 pull_request_id=pull_request_id),
190 params={
190 params={
191 'edit_pull_request': 'true',
191 'edit_pull_request': 'true',
192 'title': 'New title',
192 'title': 'New title',
193 'description': 'New description',
193 'description': 'New description',
194 'csrf_token': csrf_token})
194 'csrf_token': csrf_token})
195
195
196 assert_session_flash(
196 assert_session_flash(
197 response, u'Pull request title & description updated.',
197 response, u'Pull request title & description updated.',
198 category='success')
198 category='success')
199
199
200 pull_request = PullRequest.get(pull_request_id)
200 pull_request = PullRequest.get(pull_request_id)
201 assert pull_request.title == 'New title'
201 assert pull_request.title == 'New title'
202 assert pull_request.description == 'New description'
202 assert pull_request.description == 'New description'
203
203
204 def test_edit_title_description_closed(self, pr_util, csrf_token):
204 def test_edit_title_description_closed(self, pr_util, csrf_token):
205 pull_request = pr_util.create_pull_request()
205 pull_request = pr_util.create_pull_request()
206 pull_request_id = pull_request.pull_request_id
206 pull_request_id = pull_request.pull_request_id
207 repo_name = pull_request.target_repo.repo_name
207 repo_name = pull_request.target_repo.repo_name
208 pr_util.close()
208 pr_util.close()
209
209
210 response = self.app.post(
210 response = self.app.post(
211 route_path('pullrequest_update',
211 route_path('pullrequest_update',
212 repo_name=repo_name, pull_request_id=pull_request_id),
212 repo_name=repo_name, pull_request_id=pull_request_id),
213 params={
213 params={
214 'edit_pull_request': 'true',
214 'edit_pull_request': 'true',
215 'title': 'New title',
215 'title': 'New title',
216 'description': 'New description',
216 'description': 'New description',
217 'csrf_token': csrf_token}, status=200)
217 'csrf_token': csrf_token}, status=200)
218 assert_session_flash(
218 assert_session_flash(
219 response, u'Cannot update closed pull requests.',
219 response, u'Cannot update closed pull requests.',
220 category='error')
220 category='error')
221
221
222 def test_update_invalid_source_reference(self, pr_util, csrf_token):
222 def test_update_invalid_source_reference(self, pr_util, csrf_token):
223 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
223 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
224
224
225 pull_request = pr_util.create_pull_request()
225 pull_request = pr_util.create_pull_request()
226 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
226 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
227 Session().add(pull_request)
227 Session().add(pull_request)
228 Session().commit()
228 Session().commit()
229
229
230 pull_request_id = pull_request.pull_request_id
230 pull_request_id = pull_request.pull_request_id
231
231
232 response = self.app.post(
232 response = self.app.post(
233 route_path('pullrequest_update',
233 route_path('pullrequest_update',
234 repo_name=pull_request.target_repo.repo_name,
234 repo_name=pull_request.target_repo.repo_name,
235 pull_request_id=pull_request_id),
235 pull_request_id=pull_request_id),
236 params={'update_commits': 'true', 'csrf_token': csrf_token})
236 params={'update_commits': 'true', 'csrf_token': csrf_token})
237
237
238 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
238 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
239 UpdateFailureReason.MISSING_SOURCE_REF])
239 UpdateFailureReason.MISSING_SOURCE_REF])
240 assert_session_flash(response, expected_msg, category='error')
240 assert_session_flash(response, expected_msg, category='error')
241
241
242 def test_missing_target_reference(self, pr_util, csrf_token):
242 def test_missing_target_reference(self, pr_util, csrf_token):
243 from rhodecode.lib.vcs.backends.base import MergeFailureReason
243 from rhodecode.lib.vcs.backends.base import MergeFailureReason
244 pull_request = pr_util.create_pull_request(
244 pull_request = pr_util.create_pull_request(
245 approved=True, mergeable=True)
245 approved=True, mergeable=True)
246 unicode_reference = u'branch:invalid-branch:invalid-commit-id'
246 unicode_reference = u'branch:invalid-branch:invalid-commit-id'
247 pull_request.target_ref = unicode_reference
247 pull_request.target_ref = unicode_reference
248 Session().add(pull_request)
248 Session().add(pull_request)
249 Session().commit()
249 Session().commit()
250
250
251 pull_request_id = pull_request.pull_request_id
251 pull_request_id = pull_request.pull_request_id
252 pull_request_url = route_path(
252 pull_request_url = route_path(
253 'pullrequest_show',
253 'pullrequest_show',
254 repo_name=pull_request.target_repo.repo_name,
254 repo_name=pull_request.target_repo.repo_name,
255 pull_request_id=pull_request_id)
255 pull_request_id=pull_request_id)
256
256
257 response = self.app.get(pull_request_url)
257 response = self.app.get(pull_request_url)
258 target_ref_id = 'invalid-branch'
258 target_ref_id = 'invalid-branch'
259 merge_resp = MergeResponse(
259 merge_resp = MergeResponse(
260 True, True, '', MergeFailureReason.MISSING_TARGET_REF,
260 True, True, '', MergeFailureReason.MISSING_TARGET_REF,
261 metadata={'target_ref': PullRequest.unicode_to_reference(unicode_reference)})
261 metadata={'target_ref': PullRequest.unicode_to_reference(unicode_reference)})
262 response.assert_response().element_contains(
262 response.assert_response().element_contains(
263 'div[data-role="merge-message"]', merge_resp.merge_status_message)
263 'div[data-role="merge-message"]', merge_resp.merge_status_message)
264
264
265 def test_comment_and_close_pull_request_custom_message_approved(
265 def test_comment_and_close_pull_request_custom_message_approved(
266 self, pr_util, csrf_token, xhr_header):
266 self, pr_util, csrf_token, xhr_header):
267
267
268 pull_request = pr_util.create_pull_request(approved=True)
268 pull_request = pr_util.create_pull_request(approved=True)
269 pull_request_id = pull_request.pull_request_id
269 pull_request_id = pull_request.pull_request_id
270 author = pull_request.user_id
270 author = pull_request.user_id
271 repo = pull_request.target_repo.repo_id
271 repo = pull_request.target_repo.repo_id
272
272
273 self.app.post(
273 self.app.post(
274 route_path('pullrequest_comment_create',
274 route_path('pullrequest_comment_create',
275 repo_name=pull_request.target_repo.scm_instance().name,
275 repo_name=pull_request.target_repo.scm_instance().name,
276 pull_request_id=pull_request_id),
276 pull_request_id=pull_request_id),
277 params={
277 params={
278 'close_pull_request': '1',
278 'close_pull_request': '1',
279 'text': 'Closing a PR',
279 'text': 'Closing a PR',
280 'csrf_token': csrf_token},
280 'csrf_token': csrf_token},
281 extra_environ=xhr_header,)
281 extra_environ=xhr_header,)
282
282
283 journal = UserLog.query()\
283 journal = UserLog.query()\
284 .filter(UserLog.user_id == author)\
284 .filter(UserLog.user_id == author)\
285 .filter(UserLog.repository_id == repo) \
285 .filter(UserLog.repository_id == repo) \
286 .order_by(UserLog.user_log_id.asc()) \
286 .order_by(UserLog.user_log_id.asc()) \
287 .all()
287 .all()
288 assert journal[-1].action == 'repo.pull_request.close'
288 assert journal[-1].action == 'repo.pull_request.close'
289
289
290 pull_request = PullRequest.get(pull_request_id)
290 pull_request = PullRequest.get(pull_request_id)
291 assert pull_request.is_closed()
291 assert pull_request.is_closed()
292
292
293 status = ChangesetStatusModel().get_status(
293 status = ChangesetStatusModel().get_status(
294 pull_request.source_repo, pull_request=pull_request)
294 pull_request.source_repo, pull_request=pull_request)
295 assert status == ChangesetStatus.STATUS_APPROVED
295 assert status == ChangesetStatus.STATUS_APPROVED
296 comments = ChangesetComment().query() \
296 comments = ChangesetComment().query() \
297 .filter(ChangesetComment.pull_request == pull_request) \
297 .filter(ChangesetComment.pull_request == pull_request) \
298 .order_by(ChangesetComment.comment_id.asc())\
298 .order_by(ChangesetComment.comment_id.asc())\
299 .all()
299 .all()
300 assert comments[-1].text == 'Closing a PR'
300 assert comments[-1].text == 'Closing a PR'
301
301
302 def test_comment_force_close_pull_request_rejected(
302 def test_comment_force_close_pull_request_rejected(
303 self, pr_util, csrf_token, xhr_header):
303 self, pr_util, csrf_token, xhr_header):
304 pull_request = pr_util.create_pull_request()
304 pull_request = pr_util.create_pull_request()
305 pull_request_id = pull_request.pull_request_id
305 pull_request_id = pull_request.pull_request_id
306 PullRequestModel().update_reviewers(
306 PullRequestModel().update_reviewers(
307 pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])],
307 pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])],
308 pull_request.author)
308 pull_request.author)
309 author = pull_request.user_id
309 author = pull_request.user_id
310 repo = pull_request.target_repo.repo_id
310 repo = pull_request.target_repo.repo_id
311
311
312 self.app.post(
312 self.app.post(
313 route_path('pullrequest_comment_create',
313 route_path('pullrequest_comment_create',
314 repo_name=pull_request.target_repo.scm_instance().name,
314 repo_name=pull_request.target_repo.scm_instance().name,
315 pull_request_id=pull_request_id),
315 pull_request_id=pull_request_id),
316 params={
316 params={
317 'close_pull_request': '1',
317 'close_pull_request': '1',
318 'csrf_token': csrf_token},
318 'csrf_token': csrf_token},
319 extra_environ=xhr_header)
319 extra_environ=xhr_header)
320
320
321 pull_request = PullRequest.get(pull_request_id)
321 pull_request = PullRequest.get(pull_request_id)
322
322
323 journal = UserLog.query()\
323 journal = UserLog.query()\
324 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
324 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
325 .order_by(UserLog.user_log_id.asc()) \
325 .order_by(UserLog.user_log_id.asc()) \
326 .all()
326 .all()
327 assert journal[-1].action == 'repo.pull_request.close'
327 assert journal[-1].action == 'repo.pull_request.close'
328
328
329 # check only the latest status, not the review status
329 # check only the latest status, not the review status
330 status = ChangesetStatusModel().get_status(
330 status = ChangesetStatusModel().get_status(
331 pull_request.source_repo, pull_request=pull_request)
331 pull_request.source_repo, pull_request=pull_request)
332 assert status == ChangesetStatus.STATUS_REJECTED
332 assert status == ChangesetStatus.STATUS_REJECTED
333
333
334 def test_comment_and_close_pull_request(
334 def test_comment_and_close_pull_request(
335 self, pr_util, csrf_token, xhr_header):
335 self, pr_util, csrf_token, xhr_header):
336 pull_request = pr_util.create_pull_request()
336 pull_request = pr_util.create_pull_request()
337 pull_request_id = pull_request.pull_request_id
337 pull_request_id = pull_request.pull_request_id
338
338
339 response = self.app.post(
339 response = self.app.post(
340 route_path('pullrequest_comment_create',
340 route_path('pullrequest_comment_create',
341 repo_name=pull_request.target_repo.scm_instance().name,
341 repo_name=pull_request.target_repo.scm_instance().name,
342 pull_request_id=pull_request.pull_request_id),
342 pull_request_id=pull_request.pull_request_id),
343 params={
343 params={
344 'close_pull_request': 'true',
344 'close_pull_request': 'true',
345 'csrf_token': csrf_token},
345 'csrf_token': csrf_token},
346 extra_environ=xhr_header)
346 extra_environ=xhr_header)
347
347
348 assert response.json
348 assert response.json
349
349
350 pull_request = PullRequest.get(pull_request_id)
350 pull_request = PullRequest.get(pull_request_id)
351 assert pull_request.is_closed()
351 assert pull_request.is_closed()
352
352
353 # check only the latest status, not the review status
353 # check only the latest status, not the review status
354 status = ChangesetStatusModel().get_status(
354 status = ChangesetStatusModel().get_status(
355 pull_request.source_repo, pull_request=pull_request)
355 pull_request.source_repo, pull_request=pull_request)
356 assert status == ChangesetStatus.STATUS_REJECTED
356 assert status == ChangesetStatus.STATUS_REJECTED
357
357
358 def test_create_pull_request(self, backend, csrf_token):
358 def test_create_pull_request(self, backend, csrf_token):
359 commits = [
359 commits = [
360 {'message': 'ancestor'},
360 {'message': 'ancestor'},
361 {'message': 'change'},
361 {'message': 'change'},
362 {'message': 'change2'},
362 {'message': 'change2'},
363 ]
363 ]
364 commit_ids = backend.create_master_repo(commits)
364 commit_ids = backend.create_master_repo(commits)
365 target = backend.create_repo(heads=['ancestor'])
365 target = backend.create_repo(heads=['ancestor'])
366 source = backend.create_repo(heads=['change2'])
366 source = backend.create_repo(heads=['change2'])
367
367
368 response = self.app.post(
368 response = self.app.post(
369 route_path('pullrequest_create', repo_name=source.repo_name),
369 route_path('pullrequest_create', repo_name=source.repo_name),
370 [
370 [
371 ('source_repo', source.repo_name),
371 ('source_repo', source.repo_name),
372 ('source_ref', 'branch:default:' + commit_ids['change2']),
372 ('source_ref', 'branch:default:' + commit_ids['change2']),
373 ('target_repo', target.repo_name),
373 ('target_repo', target.repo_name),
374 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
374 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
375 ('common_ancestor', commit_ids['ancestor']),
375 ('common_ancestor', commit_ids['ancestor']),
376 ('pullrequest_title', 'Title'),
376 ('pullrequest_title', 'Title'),
377 ('pullrequest_desc', 'Description'),
377 ('pullrequest_desc', 'Description'),
378 ('description_renderer', 'markdown'),
378 ('description_renderer', 'markdown'),
379 ('__start__', 'review_members:sequence'),
379 ('__start__', 'review_members:sequence'),
380 ('__start__', 'reviewer:mapping'),
380 ('__start__', 'reviewer:mapping'),
381 ('user_id', '1'),
381 ('user_id', '1'),
382 ('__start__', 'reasons:sequence'),
382 ('__start__', 'reasons:sequence'),
383 ('reason', 'Some reason'),
383 ('reason', 'Some reason'),
384 ('__end__', 'reasons:sequence'),
384 ('__end__', 'reasons:sequence'),
385 ('__start__', 'rules:sequence'),
385 ('__start__', 'rules:sequence'),
386 ('__end__', 'rules:sequence'),
386 ('__end__', 'rules:sequence'),
387 ('mandatory', 'False'),
387 ('mandatory', 'False'),
388 ('__end__', 'reviewer:mapping'),
388 ('__end__', 'reviewer:mapping'),
389 ('__end__', 'review_members:sequence'),
389 ('__end__', 'review_members:sequence'),
390 ('__start__', 'revisions:sequence'),
390 ('__start__', 'revisions:sequence'),
391 ('revisions', commit_ids['change']),
391 ('revisions', commit_ids['change']),
392 ('revisions', commit_ids['change2']),
392 ('revisions', commit_ids['change2']),
393 ('__end__', 'revisions:sequence'),
393 ('__end__', 'revisions:sequence'),
394 ('user', ''),
394 ('user', ''),
395 ('csrf_token', csrf_token),
395 ('csrf_token', csrf_token),
396 ],
396 ],
397 status=302)
397 status=302)
398
398
399 location = response.headers['Location']
399 location = response.headers['Location']
400 pull_request_id = location.rsplit('/', 1)[1]
400 pull_request_id = location.rsplit('/', 1)[1]
401 assert pull_request_id != 'new'
401 assert pull_request_id != 'new'
402 pull_request = PullRequest.get(int(pull_request_id))
402 pull_request = PullRequest.get(int(pull_request_id))
403
403
404 # check that we have now both revisions
404 # check that we have now both revisions
405 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
405 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
406 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
406 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
407 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
407 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
408 assert pull_request.target_ref == expected_target_ref
408 assert pull_request.target_ref == expected_target_ref
409
409
410 def test_reviewer_notifications(self, backend, csrf_token):
410 def test_reviewer_notifications(self, backend, csrf_token):
411 # We have to use the app.post for this test so it will create the
411 # We have to use the app.post for this test so it will create the
412 # notifications properly with the new PR
412 # notifications properly with the new PR
413 commits = [
413 commits = [
414 {'message': 'ancestor',
414 {'message': 'ancestor',
415 'added': [FileNode('file_A', content='content_of_ancestor')]},
415 'added': [FileNode('file_A', content='content_of_ancestor')]},
416 {'message': 'change',
416 {'message': 'change',
417 'added': [FileNode('file_a', content='content_of_change')]},
417 'added': [FileNode('file_a', content='content_of_change')]},
418 {'message': 'change-child'},
418 {'message': 'change-child'},
419 {'message': 'ancestor-child', 'parents': ['ancestor'],
419 {'message': 'ancestor-child', 'parents': ['ancestor'],
420 'added': [
420 'added': [
421 FileNode('file_B', content='content_of_ancestor_child')]},
421 FileNode('file_B', content='content_of_ancestor_child')]},
422 {'message': 'ancestor-child-2'},
422 {'message': 'ancestor-child-2'},
423 ]
423 ]
424 commit_ids = backend.create_master_repo(commits)
424 commit_ids = backend.create_master_repo(commits)
425 target = backend.create_repo(heads=['ancestor-child'])
425 target = backend.create_repo(heads=['ancestor-child'])
426 source = backend.create_repo(heads=['change'])
426 source = backend.create_repo(heads=['change'])
427
427
428 response = self.app.post(
428 response = self.app.post(
429 route_path('pullrequest_create', repo_name=source.repo_name),
429 route_path('pullrequest_create', repo_name=source.repo_name),
430 [
430 [
431 ('source_repo', source.repo_name),
431 ('source_repo', source.repo_name),
432 ('source_ref', 'branch:default:' + commit_ids['change']),
432 ('source_ref', 'branch:default:' + commit_ids['change']),
433 ('target_repo', target.repo_name),
433 ('target_repo', target.repo_name),
434 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
434 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
435 ('common_ancestor', commit_ids['ancestor']),
435 ('common_ancestor', commit_ids['ancestor']),
436 ('pullrequest_title', 'Title'),
436 ('pullrequest_title', 'Title'),
437 ('pullrequest_desc', 'Description'),
437 ('pullrequest_desc', 'Description'),
438 ('description_renderer', 'markdown'),
438 ('description_renderer', 'markdown'),
439 ('__start__', 'review_members:sequence'),
439 ('__start__', 'review_members:sequence'),
440 ('__start__', 'reviewer:mapping'),
440 ('__start__', 'reviewer:mapping'),
441 ('user_id', '2'),
441 ('user_id', '2'),
442 ('__start__', 'reasons:sequence'),
442 ('__start__', 'reasons:sequence'),
443 ('reason', 'Some reason'),
443 ('reason', 'Some reason'),
444 ('__end__', 'reasons:sequence'),
444 ('__end__', 'reasons:sequence'),
445 ('__start__', 'rules:sequence'),
445 ('__start__', 'rules:sequence'),
446 ('__end__', 'rules:sequence'),
446 ('__end__', 'rules:sequence'),
447 ('mandatory', 'False'),
447 ('mandatory', 'False'),
448 ('__end__', 'reviewer:mapping'),
448 ('__end__', 'reviewer:mapping'),
449 ('__end__', 'review_members:sequence'),
449 ('__end__', 'review_members:sequence'),
450 ('__start__', 'revisions:sequence'),
450 ('__start__', 'revisions:sequence'),
451 ('revisions', commit_ids['change']),
451 ('revisions', commit_ids['change']),
452 ('__end__', 'revisions:sequence'),
452 ('__end__', 'revisions:sequence'),
453 ('user', ''),
453 ('user', ''),
454 ('csrf_token', csrf_token),
454 ('csrf_token', csrf_token),
455 ],
455 ],
456 status=302)
456 status=302)
457
457
458 location = response.headers['Location']
458 location = response.headers['Location']
459
459
460 pull_request_id = location.rsplit('/', 1)[1]
460 pull_request_id = location.rsplit('/', 1)[1]
461 assert pull_request_id != 'new'
461 assert pull_request_id != 'new'
462 pull_request = PullRequest.get(int(pull_request_id))
462 pull_request = PullRequest.get(int(pull_request_id))
463
463
464 # Check that a notification was made
464 # Check that a notification was made
465 notifications = Notification.query()\
465 notifications = Notification.query()\
466 .filter(Notification.created_by == pull_request.author.user_id,
466 .filter(Notification.created_by == pull_request.author.user_id,
467 Notification.type_ == Notification.TYPE_PULL_REQUEST,
467 Notification.type_ == Notification.TYPE_PULL_REQUEST,
468 Notification.subject.contains(
468 Notification.subject.contains(
469 "requested a pull request review. !%s" % pull_request_id))
469 "requested a pull request review. !%s" % pull_request_id))
470 assert len(notifications.all()) == 1
470 assert len(notifications.all()) == 1
471
471
472 # Change reviewers and check that a notification was made
472 # Change reviewers and check that a notification was made
473 PullRequestModel().update_reviewers(
473 PullRequestModel().update_reviewers(
474 pull_request.pull_request_id, [(1, [], False, [])],
474 pull_request.pull_request_id, [(1, [], False, [])],
475 pull_request.author)
475 pull_request.author)
476 assert len(notifications.all()) == 2
476 assert len(notifications.all()) == 2
477
477
478 def test_create_pull_request_stores_ancestor_commit_id(self, backend,
478 def test_create_pull_request_stores_ancestor_commit_id(self, backend,
479 csrf_token):
479 csrf_token):
480 commits = [
480 commits = [
481 {'message': 'ancestor',
481 {'message': 'ancestor',
482 'added': [FileNode('file_A', content='content_of_ancestor')]},
482 'added': [FileNode('file_A', content='content_of_ancestor')]},
483 {'message': 'change',
483 {'message': 'change',
484 'added': [FileNode('file_a', content='content_of_change')]},
484 'added': [FileNode('file_a', content='content_of_change')]},
485 {'message': 'change-child'},
485 {'message': 'change-child'},
486 {'message': 'ancestor-child', 'parents': ['ancestor'],
486 {'message': 'ancestor-child', 'parents': ['ancestor'],
487 'added': [
487 'added': [
488 FileNode('file_B', content='content_of_ancestor_child')]},
488 FileNode('file_B', content='content_of_ancestor_child')]},
489 {'message': 'ancestor-child-2'},
489 {'message': 'ancestor-child-2'},
490 ]
490 ]
491 commit_ids = backend.create_master_repo(commits)
491 commit_ids = backend.create_master_repo(commits)
492 target = backend.create_repo(heads=['ancestor-child'])
492 target = backend.create_repo(heads=['ancestor-child'])
493 source = backend.create_repo(heads=['change'])
493 source = backend.create_repo(heads=['change'])
494
494
495 response = self.app.post(
495 response = self.app.post(
496 route_path('pullrequest_create', repo_name=source.repo_name),
496 route_path('pullrequest_create', repo_name=source.repo_name),
497 [
497 [
498 ('source_repo', source.repo_name),
498 ('source_repo', source.repo_name),
499 ('source_ref', 'branch:default:' + commit_ids['change']),
499 ('source_ref', 'branch:default:' + commit_ids['change']),
500 ('target_repo', target.repo_name),
500 ('target_repo', target.repo_name),
501 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
501 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
502 ('common_ancestor', commit_ids['ancestor']),
502 ('common_ancestor', commit_ids['ancestor']),
503 ('pullrequest_title', 'Title'),
503 ('pullrequest_title', 'Title'),
504 ('pullrequest_desc', 'Description'),
504 ('pullrequest_desc', 'Description'),
505 ('description_renderer', 'markdown'),
505 ('description_renderer', 'markdown'),
506 ('__start__', 'review_members:sequence'),
506 ('__start__', 'review_members:sequence'),
507 ('__start__', 'reviewer:mapping'),
507 ('__start__', 'reviewer:mapping'),
508 ('user_id', '1'),
508 ('user_id', '1'),
509 ('__start__', 'reasons:sequence'),
509 ('__start__', 'reasons:sequence'),
510 ('reason', 'Some reason'),
510 ('reason', 'Some reason'),
511 ('__end__', 'reasons:sequence'),
511 ('__end__', 'reasons:sequence'),
512 ('__start__', 'rules:sequence'),
512 ('__start__', 'rules:sequence'),
513 ('__end__', 'rules:sequence'),
513 ('__end__', 'rules:sequence'),
514 ('mandatory', 'False'),
514 ('mandatory', 'False'),
515 ('__end__', 'reviewer:mapping'),
515 ('__end__', 'reviewer:mapping'),
516 ('__end__', 'review_members:sequence'),
516 ('__end__', 'review_members:sequence'),
517 ('__start__', 'revisions:sequence'),
517 ('__start__', 'revisions:sequence'),
518 ('revisions', commit_ids['change']),
518 ('revisions', commit_ids['change']),
519 ('__end__', 'revisions:sequence'),
519 ('__end__', 'revisions:sequence'),
520 ('user', ''),
520 ('user', ''),
521 ('csrf_token', csrf_token),
521 ('csrf_token', csrf_token),
522 ],
522 ],
523 status=302)
523 status=302)
524
524
525 location = response.headers['Location']
525 location = response.headers['Location']
526
526
527 pull_request_id = location.rsplit('/', 1)[1]
527 pull_request_id = location.rsplit('/', 1)[1]
528 assert pull_request_id != 'new'
528 assert pull_request_id != 'new'
529 pull_request = PullRequest.get(int(pull_request_id))
529 pull_request = PullRequest.get(int(pull_request_id))
530
530
531 # target_ref has to point to the ancestor's commit_id in order to
531 # target_ref has to point to the ancestor's commit_id in order to
532 # show the correct diff
532 # show the correct diff
533 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
533 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
534 assert pull_request.target_ref == expected_target_ref
534 assert pull_request.target_ref == expected_target_ref
535
535
536 # Check generated diff contents
536 # Check generated diff contents
537 response = response.follow()
537 response = response.follow()
538 response.mustcontain(no=['content_of_ancestor'])
538 response.mustcontain(no=['content_of_ancestor'])
539 response.mustcontain(no=['content_of_ancestor-child'])
539 response.mustcontain(no=['content_of_ancestor-child'])
540 response.mustcontain('content_of_change')
540 response.mustcontain('content_of_change')
541
541
542 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
542 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
543 # Clear any previous calls to rcextensions
543 # Clear any previous calls to rcextensions
544 rhodecode.EXTENSIONS.calls.clear()
544 rhodecode.EXTENSIONS.calls.clear()
545
545
546 pull_request = pr_util.create_pull_request(
546 pull_request = pr_util.create_pull_request(
547 approved=True, mergeable=True)
547 approved=True, mergeable=True)
548 pull_request_id = pull_request.pull_request_id
548 pull_request_id = pull_request.pull_request_id
549 repo_name = pull_request.target_repo.scm_instance().name,
549 repo_name = pull_request.target_repo.scm_instance().name,
550
550
551 url = route_path('pullrequest_merge',
551 url = route_path('pullrequest_merge',
552 repo_name=str(repo_name[0]),
552 repo_name=str(repo_name[0]),
553 pull_request_id=pull_request_id)
553 pull_request_id=pull_request_id)
554 response = self.app.post(url, params={'csrf_token': csrf_token}).follow()
554 response = self.app.post(url, params={'csrf_token': csrf_token}).follow()
555
555
556 pull_request = PullRequest.get(pull_request_id)
556 pull_request = PullRequest.get(pull_request_id)
557
557
558 assert response.status_int == 200
558 assert response.status_int == 200
559 assert pull_request.is_closed()
559 assert pull_request.is_closed()
560 assert_pull_request_status(
560 assert_pull_request_status(
561 pull_request, ChangesetStatus.STATUS_APPROVED)
561 pull_request, ChangesetStatus.STATUS_APPROVED)
562
562
563 # Check the relevant log entries were added
563 # Check the relevant log entries were added
564 user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(3)
564 user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(3)
565 actions = [log.action for log in user_logs]
565 actions = [log.action for log in user_logs]
566 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
566 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
567 expected_actions = [
567 expected_actions = [
568 u'repo.pull_request.close',
568 u'repo.pull_request.close',
569 u'repo.pull_request.merge',
569 u'repo.pull_request.merge',
570 u'repo.pull_request.comment.create'
570 u'repo.pull_request.comment.create'
571 ]
571 ]
572 assert actions == expected_actions
572 assert actions == expected_actions
573
573
574 user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(4)
574 user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(4)
575 actions = [log for log in user_logs]
575 actions = [log for log in user_logs]
576 assert actions[-1].action == 'user.push'
576 assert actions[-1].action == 'user.push'
577 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
577 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
578
578
579 # Check post_push rcextension was really executed
579 # Check post_push rcextension was really executed
580 push_calls = rhodecode.EXTENSIONS.calls['_push_hook']
580 push_calls = rhodecode.EXTENSIONS.calls['_push_hook']
581 assert len(push_calls) == 1
581 assert len(push_calls) == 1
582 unused_last_call_args, last_call_kwargs = push_calls[0]
582 unused_last_call_args, last_call_kwargs = push_calls[0]
583 assert last_call_kwargs['action'] == 'push'
583 assert last_call_kwargs['action'] == 'push'
584 assert last_call_kwargs['commit_ids'] == pr_commit_ids
584 assert last_call_kwargs['commit_ids'] == pr_commit_ids
585
585
586 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
586 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
587 pull_request = pr_util.create_pull_request(mergeable=False)
587 pull_request = pr_util.create_pull_request(mergeable=False)
588 pull_request_id = pull_request.pull_request_id
588 pull_request_id = pull_request.pull_request_id
589 pull_request = PullRequest.get(pull_request_id)
589 pull_request = PullRequest.get(pull_request_id)
590
590
591 response = self.app.post(
591 response = self.app.post(
592 route_path('pullrequest_merge',
592 route_path('pullrequest_merge',
593 repo_name=pull_request.target_repo.scm_instance().name,
593 repo_name=pull_request.target_repo.scm_instance().name,
594 pull_request_id=pull_request.pull_request_id),
594 pull_request_id=pull_request.pull_request_id),
595 params={'csrf_token': csrf_token}).follow()
595 params={'csrf_token': csrf_token}).follow()
596
596
597 assert response.status_int == 200
597 assert response.status_int == 200
598 response.mustcontain(
598 response.mustcontain(
599 'Merge is not currently possible because of below failed checks.')
599 'Merge is not currently possible because of below failed checks.')
600 response.mustcontain('Server-side pull request merging is disabled.')
600 response.mustcontain('Server-side pull request merging is disabled.')
601
601
602 @pytest.mark.skip_backends('svn')
602 @pytest.mark.skip_backends('svn')
603 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
603 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
604 pull_request = pr_util.create_pull_request(mergeable=True)
604 pull_request = pr_util.create_pull_request(mergeable=True)
605 pull_request_id = pull_request.pull_request_id
605 pull_request_id = pull_request.pull_request_id
606 repo_name = pull_request.target_repo.scm_instance().name
606 repo_name = pull_request.target_repo.scm_instance().name
607
607
608 response = self.app.post(
608 response = self.app.post(
609 route_path('pullrequest_merge',
609 route_path('pullrequest_merge',
610 repo_name=repo_name, pull_request_id=pull_request_id),
610 repo_name=repo_name, pull_request_id=pull_request_id),
611 params={'csrf_token': csrf_token}).follow()
611 params={'csrf_token': csrf_token}).follow()
612
612
613 assert response.status_int == 200
613 assert response.status_int == 200
614
614
615 response.mustcontain(
615 response.mustcontain(
616 'Merge is not currently possible because of below failed checks.')
616 'Merge is not currently possible because of below failed checks.')
617 response.mustcontain('Pull request reviewer approval is pending.')
617 response.mustcontain('Pull request reviewer approval is pending.')
618
618
619 def test_merge_pull_request_renders_failure_reason(
619 def test_merge_pull_request_renders_failure_reason(
620 self, user_regular, csrf_token, pr_util):
620 self, user_regular, csrf_token, pr_util):
621 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
621 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
622 pull_request_id = pull_request.pull_request_id
622 pull_request_id = pull_request.pull_request_id
623 repo_name = pull_request.target_repo.scm_instance().name
623 repo_name = pull_request.target_repo.scm_instance().name
624
624
625 merge_resp = MergeResponse(True, False, 'STUB_COMMIT_ID',
625 merge_resp = MergeResponse(True, False, 'STUB_COMMIT_ID',
626 MergeFailureReason.PUSH_FAILED,
626 MergeFailureReason.PUSH_FAILED,
627 metadata={'target': 'shadow repo',
627 metadata={'target': 'shadow repo',
628 'merge_commit': 'xxx'})
628 'merge_commit': 'xxx'})
629 model_patcher = mock.patch.multiple(
629 model_patcher = mock.patch.multiple(
630 PullRequestModel,
630 PullRequestModel,
631 merge_repo=mock.Mock(return_value=merge_resp),
631 merge_repo=mock.Mock(return_value=merge_resp),
632 merge_status=mock.Mock(return_value=(True, 'WRONG_MESSAGE')))
632 merge_status=mock.Mock(return_value=(None, True, 'WRONG_MESSAGE')))
633
633
634 with model_patcher:
634 with model_patcher:
635 response = self.app.post(
635 response = self.app.post(
636 route_path('pullrequest_merge',
636 route_path('pullrequest_merge',
637 repo_name=repo_name,
637 repo_name=repo_name,
638 pull_request_id=pull_request_id),
638 pull_request_id=pull_request_id),
639 params={'csrf_token': csrf_token}, status=302)
639 params={'csrf_token': csrf_token}, status=302)
640
640
641 merge_resp = MergeResponse(True, True, '', MergeFailureReason.PUSH_FAILED,
641 merge_resp = MergeResponse(True, True, '', MergeFailureReason.PUSH_FAILED,
642 metadata={'target': 'shadow repo',
642 metadata={'target': 'shadow repo',
643 'merge_commit': 'xxx'})
643 'merge_commit': 'xxx'})
644 assert_session_flash(response, merge_resp.merge_status_message)
644 assert_session_flash(response, merge_resp.merge_status_message)
645
645
646 def test_update_source_revision(self, backend, csrf_token):
646 def test_update_source_revision(self, backend, csrf_token):
647 commits = [
647 commits = [
648 {'message': 'ancestor'},
648 {'message': 'ancestor'},
649 {'message': 'change'},
649 {'message': 'change'},
650 {'message': 'change-2'},
650 {'message': 'change-2'},
651 ]
651 ]
652 commit_ids = backend.create_master_repo(commits)
652 commit_ids = backend.create_master_repo(commits)
653 target = backend.create_repo(heads=['ancestor'])
653 target = backend.create_repo(heads=['ancestor'])
654 source = backend.create_repo(heads=['change'])
654 source = backend.create_repo(heads=['change'])
655
655
656 # create pr from a in source to A in target
656 # create pr from a in source to A in target
657 pull_request = PullRequest()
657 pull_request = PullRequest()
658
658
659 pull_request.source_repo = source
659 pull_request.source_repo = source
660 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
660 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
661 branch=backend.default_branch_name, commit_id=commit_ids['change'])
661 branch=backend.default_branch_name, commit_id=commit_ids['change'])
662
662
663 pull_request.target_repo = target
663 pull_request.target_repo = target
664 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
664 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
665 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
665 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
666
666
667 pull_request.revisions = [commit_ids['change']]
667 pull_request.revisions = [commit_ids['change']]
668 pull_request.title = u"Test"
668 pull_request.title = u"Test"
669 pull_request.description = u"Description"
669 pull_request.description = u"Description"
670 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
670 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
671 pull_request.pull_request_state = PullRequest.STATE_CREATED
671 pull_request.pull_request_state = PullRequest.STATE_CREATED
672 Session().add(pull_request)
672 Session().add(pull_request)
673 Session().commit()
673 Session().commit()
674 pull_request_id = pull_request.pull_request_id
674 pull_request_id = pull_request.pull_request_id
675
675
676 # source has ancestor - change - change-2
676 # source has ancestor - change - change-2
677 backend.pull_heads(source, heads=['change-2'])
677 backend.pull_heads(source, heads=['change-2'])
678
678
679 # update PR
679 # update PR
680 self.app.post(
680 self.app.post(
681 route_path('pullrequest_update',
681 route_path('pullrequest_update',
682 repo_name=target.repo_name, pull_request_id=pull_request_id),
682 repo_name=target.repo_name, pull_request_id=pull_request_id),
683 params={'update_commits': 'true', 'csrf_token': csrf_token})
683 params={'update_commits': 'true', 'csrf_token': csrf_token})
684
684
685 response = self.app.get(
685 response = self.app.get(
686 route_path('pullrequest_show',
686 route_path('pullrequest_show',
687 repo_name=target.repo_name,
687 repo_name=target.repo_name,
688 pull_request_id=pull_request.pull_request_id))
688 pull_request_id=pull_request.pull_request_id))
689
689
690 assert response.status_int == 200
690 assert response.status_int == 200
691 response.mustcontain('Pull request updated to')
691 response.mustcontain('Pull request updated to')
692 response.mustcontain('with 1 added, 0 removed commits.')
692 response.mustcontain('with 1 added, 0 removed commits.')
693
693
694 # check that we have now both revisions
694 # check that we have now both revisions
695 pull_request = PullRequest.get(pull_request_id)
695 pull_request = PullRequest.get(pull_request_id)
696 assert pull_request.revisions == [commit_ids['change-2'], commit_ids['change']]
696 assert pull_request.revisions == [commit_ids['change-2'], commit_ids['change']]
697
697
698 def test_update_target_revision(self, backend, csrf_token):
698 def test_update_target_revision(self, backend, csrf_token):
699 commits = [
699 commits = [
700 {'message': 'ancestor'},
700 {'message': 'ancestor'},
701 {'message': 'change'},
701 {'message': 'change'},
702 {'message': 'ancestor-new', 'parents': ['ancestor']},
702 {'message': 'ancestor-new', 'parents': ['ancestor']},
703 {'message': 'change-rebased'},
703 {'message': 'change-rebased'},
704 ]
704 ]
705 commit_ids = backend.create_master_repo(commits)
705 commit_ids = backend.create_master_repo(commits)
706 target = backend.create_repo(heads=['ancestor'])
706 target = backend.create_repo(heads=['ancestor'])
707 source = backend.create_repo(heads=['change'])
707 source = backend.create_repo(heads=['change'])
708
708
709 # create pr from a in source to A in target
709 # create pr from a in source to A in target
710 pull_request = PullRequest()
710 pull_request = PullRequest()
711
711
712 pull_request.source_repo = source
712 pull_request.source_repo = source
713 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
713 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
714 branch=backend.default_branch_name, commit_id=commit_ids['change'])
714 branch=backend.default_branch_name, commit_id=commit_ids['change'])
715
715
716 pull_request.target_repo = target
716 pull_request.target_repo = target
717 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
717 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
718 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
718 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
719
719
720 pull_request.revisions = [commit_ids['change']]
720 pull_request.revisions = [commit_ids['change']]
721 pull_request.title = u"Test"
721 pull_request.title = u"Test"
722 pull_request.description = u"Description"
722 pull_request.description = u"Description"
723 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
723 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
724 pull_request.pull_request_state = PullRequest.STATE_CREATED
724 pull_request.pull_request_state = PullRequest.STATE_CREATED
725
725
726 Session().add(pull_request)
726 Session().add(pull_request)
727 Session().commit()
727 Session().commit()
728 pull_request_id = pull_request.pull_request_id
728 pull_request_id = pull_request.pull_request_id
729
729
730 # target has ancestor - ancestor-new
730 # target has ancestor - ancestor-new
731 # source has ancestor - ancestor-new - change-rebased
731 # source has ancestor - ancestor-new - change-rebased
732 backend.pull_heads(target, heads=['ancestor-new'])
732 backend.pull_heads(target, heads=['ancestor-new'])
733 backend.pull_heads(source, heads=['change-rebased'])
733 backend.pull_heads(source, heads=['change-rebased'])
734
734
735 # update PR
735 # update PR
736 url = route_path('pullrequest_update',
736 url = route_path('pullrequest_update',
737 repo_name=target.repo_name,
737 repo_name=target.repo_name,
738 pull_request_id=pull_request_id)
738 pull_request_id=pull_request_id)
739 self.app.post(url,
739 self.app.post(url,
740 params={'update_commits': 'true', 'csrf_token': csrf_token},
740 params={'update_commits': 'true', 'csrf_token': csrf_token},
741 status=200)
741 status=200)
742
742
743 # check that we have now both revisions
743 # check that we have now both revisions
744 pull_request = PullRequest.get(pull_request_id)
744 pull_request = PullRequest.get(pull_request_id)
745 assert pull_request.revisions == [commit_ids['change-rebased']]
745 assert pull_request.revisions == [commit_ids['change-rebased']]
746 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
746 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
747 branch=backend.default_branch_name, commit_id=commit_ids['ancestor-new'])
747 branch=backend.default_branch_name, commit_id=commit_ids['ancestor-new'])
748
748
749 response = self.app.get(
749 response = self.app.get(
750 route_path('pullrequest_show',
750 route_path('pullrequest_show',
751 repo_name=target.repo_name,
751 repo_name=target.repo_name,
752 pull_request_id=pull_request.pull_request_id))
752 pull_request_id=pull_request.pull_request_id))
753 assert response.status_int == 200
753 assert response.status_int == 200
754 response.mustcontain('Pull request updated to')
754 response.mustcontain('Pull request updated to')
755 response.mustcontain('with 1 added, 1 removed commits.')
755 response.mustcontain('with 1 added, 1 removed commits.')
756
756
757 def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token):
757 def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token):
758 backend = backend_git
758 backend = backend_git
759 commits = [
759 commits = [
760 {'message': 'master-commit-1'},
760 {'message': 'master-commit-1'},
761 {'message': 'master-commit-2-change-1'},
761 {'message': 'master-commit-2-change-1'},
762 {'message': 'master-commit-3-change-2'},
762 {'message': 'master-commit-3-change-2'},
763
763
764 {'message': 'feat-commit-1', 'parents': ['master-commit-1']},
764 {'message': 'feat-commit-1', 'parents': ['master-commit-1']},
765 {'message': 'feat-commit-2'},
765 {'message': 'feat-commit-2'},
766 ]
766 ]
767 commit_ids = backend.create_master_repo(commits)
767 commit_ids = backend.create_master_repo(commits)
768 target = backend.create_repo(heads=['master-commit-3-change-2'])
768 target = backend.create_repo(heads=['master-commit-3-change-2'])
769 source = backend.create_repo(heads=['feat-commit-2'])
769 source = backend.create_repo(heads=['feat-commit-2'])
770
770
771 # create pr from a in source to A in target
771 # create pr from a in source to A in target
772 pull_request = PullRequest()
772 pull_request = PullRequest()
773 pull_request.source_repo = source
773 pull_request.source_repo = source
774
774
775 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
775 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
776 branch=backend.default_branch_name,
776 branch=backend.default_branch_name,
777 commit_id=commit_ids['master-commit-3-change-2'])
777 commit_id=commit_ids['master-commit-3-change-2'])
778
778
779 pull_request.target_repo = target
779 pull_request.target_repo = target
780 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
780 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
781 branch=backend.default_branch_name, commit_id=commit_ids['feat-commit-2'])
781 branch=backend.default_branch_name, commit_id=commit_ids['feat-commit-2'])
782
782
783 pull_request.revisions = [
783 pull_request.revisions = [
784 commit_ids['feat-commit-1'],
784 commit_ids['feat-commit-1'],
785 commit_ids['feat-commit-2']
785 commit_ids['feat-commit-2']
786 ]
786 ]
787 pull_request.title = u"Test"
787 pull_request.title = u"Test"
788 pull_request.description = u"Description"
788 pull_request.description = u"Description"
789 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
789 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
790 pull_request.pull_request_state = PullRequest.STATE_CREATED
790 pull_request.pull_request_state = PullRequest.STATE_CREATED
791 Session().add(pull_request)
791 Session().add(pull_request)
792 Session().commit()
792 Session().commit()
793 pull_request_id = pull_request.pull_request_id
793 pull_request_id = pull_request.pull_request_id
794
794
795 # PR is created, now we simulate a force-push into target,
795 # PR is created, now we simulate a force-push into target,
796 # that drops a 2 last commits
796 # that drops a 2 last commits
797 vcsrepo = target.scm_instance()
797 vcsrepo = target.scm_instance()
798 vcsrepo.config.clear_section('hooks')
798 vcsrepo.config.clear_section('hooks')
799 vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2'])
799 vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2'])
800
800
801 # update PR
801 # update PR
802 url = route_path('pullrequest_update',
802 url = route_path('pullrequest_update',
803 repo_name=target.repo_name,
803 repo_name=target.repo_name,
804 pull_request_id=pull_request_id)
804 pull_request_id=pull_request_id)
805 self.app.post(url,
805 self.app.post(url,
806 params={'update_commits': 'true', 'csrf_token': csrf_token},
806 params={'update_commits': 'true', 'csrf_token': csrf_token},
807 status=200)
807 status=200)
808
808
809 response = self.app.get(route_path('pullrequest_new', repo_name=target.repo_name))
809 response = self.app.get(route_path('pullrequest_new', repo_name=target.repo_name))
810 assert response.status_int == 200
810 assert response.status_int == 200
811 response.mustcontain('Pull request updated to')
811 response.mustcontain('Pull request updated to')
812 response.mustcontain('with 0 added, 0 removed commits.')
812 response.mustcontain('with 0 added, 0 removed commits.')
813
813
814 def test_update_of_ancestor_reference(self, backend, csrf_token):
814 def test_update_of_ancestor_reference(self, backend, csrf_token):
815 commits = [
815 commits = [
816 {'message': 'ancestor'},
816 {'message': 'ancestor'},
817 {'message': 'change'},
817 {'message': 'change'},
818 {'message': 'change-2'},
818 {'message': 'change-2'},
819 {'message': 'ancestor-new', 'parents': ['ancestor']},
819 {'message': 'ancestor-new', 'parents': ['ancestor']},
820 {'message': 'change-rebased'},
820 {'message': 'change-rebased'},
821 ]
821 ]
822 commit_ids = backend.create_master_repo(commits)
822 commit_ids = backend.create_master_repo(commits)
823 target = backend.create_repo(heads=['ancestor'])
823 target = backend.create_repo(heads=['ancestor'])
824 source = backend.create_repo(heads=['change'])
824 source = backend.create_repo(heads=['change'])
825
825
826 # create pr from a in source to A in target
826 # create pr from a in source to A in target
827 pull_request = PullRequest()
827 pull_request = PullRequest()
828 pull_request.source_repo = source
828 pull_request.source_repo = source
829
829
830 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
830 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
831 branch=backend.default_branch_name, commit_id=commit_ids['change'])
831 branch=backend.default_branch_name, commit_id=commit_ids['change'])
832 pull_request.target_repo = target
832 pull_request.target_repo = target
833 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
833 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
834 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
834 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
835 pull_request.revisions = [commit_ids['change']]
835 pull_request.revisions = [commit_ids['change']]
836 pull_request.title = u"Test"
836 pull_request.title = u"Test"
837 pull_request.description = u"Description"
837 pull_request.description = u"Description"
838 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
838 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
839 pull_request.pull_request_state = PullRequest.STATE_CREATED
839 pull_request.pull_request_state = PullRequest.STATE_CREATED
840 Session().add(pull_request)
840 Session().add(pull_request)
841 Session().commit()
841 Session().commit()
842 pull_request_id = pull_request.pull_request_id
842 pull_request_id = pull_request.pull_request_id
843
843
844 # target has ancestor - ancestor-new
844 # target has ancestor - ancestor-new
845 # source has ancestor - ancestor-new - change-rebased
845 # source has ancestor - ancestor-new - change-rebased
846 backend.pull_heads(target, heads=['ancestor-new'])
846 backend.pull_heads(target, heads=['ancestor-new'])
847 backend.pull_heads(source, heads=['change-rebased'])
847 backend.pull_heads(source, heads=['change-rebased'])
848
848
849 # update PR
849 # update PR
850 self.app.post(
850 self.app.post(
851 route_path('pullrequest_update',
851 route_path('pullrequest_update',
852 repo_name=target.repo_name, pull_request_id=pull_request_id),
852 repo_name=target.repo_name, pull_request_id=pull_request_id),
853 params={'update_commits': 'true', 'csrf_token': csrf_token},
853 params={'update_commits': 'true', 'csrf_token': csrf_token},
854 status=200)
854 status=200)
855
855
856 # Expect the target reference to be updated correctly
856 # Expect the target reference to be updated correctly
857 pull_request = PullRequest.get(pull_request_id)
857 pull_request = PullRequest.get(pull_request_id)
858 assert pull_request.revisions == [commit_ids['change-rebased']]
858 assert pull_request.revisions == [commit_ids['change-rebased']]
859 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
859 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
860 branch=backend.default_branch_name,
860 branch=backend.default_branch_name,
861 commit_id=commit_ids['ancestor-new'])
861 commit_id=commit_ids['ancestor-new'])
862 assert pull_request.target_ref == expected_target_ref
862 assert pull_request.target_ref == expected_target_ref
863
863
864 def test_remove_pull_request_branch(self, backend_git, csrf_token):
864 def test_remove_pull_request_branch(self, backend_git, csrf_token):
865 branch_name = 'development'
865 branch_name = 'development'
866 commits = [
866 commits = [
867 {'message': 'initial-commit'},
867 {'message': 'initial-commit'},
868 {'message': 'old-feature'},
868 {'message': 'old-feature'},
869 {'message': 'new-feature', 'branch': branch_name},
869 {'message': 'new-feature', 'branch': branch_name},
870 ]
870 ]
871 repo = backend_git.create_repo(commits)
871 repo = backend_git.create_repo(commits)
872 repo_name = repo.repo_name
872 repo_name = repo.repo_name
873 commit_ids = backend_git.commit_ids
873 commit_ids = backend_git.commit_ids
874
874
875 pull_request = PullRequest()
875 pull_request = PullRequest()
876 pull_request.source_repo = repo
876 pull_request.source_repo = repo
877 pull_request.target_repo = repo
877 pull_request.target_repo = repo
878 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
878 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
879 branch=branch_name, commit_id=commit_ids['new-feature'])
879 branch=branch_name, commit_id=commit_ids['new-feature'])
880 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
880 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
881 branch=backend_git.default_branch_name, commit_id=commit_ids['old-feature'])
881 branch=backend_git.default_branch_name, commit_id=commit_ids['old-feature'])
882 pull_request.revisions = [commit_ids['new-feature']]
882 pull_request.revisions = [commit_ids['new-feature']]
883 pull_request.title = u"Test"
883 pull_request.title = u"Test"
884 pull_request.description = u"Description"
884 pull_request.description = u"Description"
885 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
885 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
886 pull_request.pull_request_state = PullRequest.STATE_CREATED
886 pull_request.pull_request_state = PullRequest.STATE_CREATED
887 Session().add(pull_request)
887 Session().add(pull_request)
888 Session().commit()
888 Session().commit()
889
889
890 pull_request_id = pull_request.pull_request_id
890 pull_request_id = pull_request.pull_request_id
891
891
892 vcs = repo.scm_instance()
892 vcs = repo.scm_instance()
893 vcs.remove_ref('refs/heads/{}'.format(branch_name))
893 vcs.remove_ref('refs/heads/{}'.format(branch_name))
894 # NOTE(marcink): run GC to ensure the commits are gone
895 vcs.run_gc()
894
896
895 response = self.app.get(route_path(
897 response = self.app.get(route_path(
896 'pullrequest_show',
898 'pullrequest_show',
897 repo_name=repo_name,
899 repo_name=repo_name,
898 pull_request_id=pull_request_id))
900 pull_request_id=pull_request_id))
899
901
900 assert response.status_int == 200
902 assert response.status_int == 200
901
903
902 response.assert_response().element_contains(
904 response.assert_response().element_contains(
903 '#changeset_compare_view_content .alert strong',
905 '#changeset_compare_view_content .alert strong',
904 'Missing commits')
906 'Missing commits')
905 response.assert_response().element_contains(
907 response.assert_response().element_contains(
906 '#changeset_compare_view_content .alert',
908 '#changeset_compare_view_content .alert',
907 'This pull request cannot be displayed, because one or more'
909 'This pull request cannot be displayed, because one or more'
908 ' commits no longer exist in the source repository.')
910 ' commits no longer exist in the source repository.')
909
911
910 def test_strip_commits_from_pull_request(
912 def test_strip_commits_from_pull_request(
911 self, backend, pr_util, csrf_token):
913 self, backend, pr_util, csrf_token):
912 commits = [
914 commits = [
913 {'message': 'initial-commit'},
915 {'message': 'initial-commit'},
914 {'message': 'old-feature'},
916 {'message': 'old-feature'},
915 {'message': 'new-feature', 'parents': ['initial-commit']},
917 {'message': 'new-feature', 'parents': ['initial-commit']},
916 ]
918 ]
917 pull_request = pr_util.create_pull_request(
919 pull_request = pr_util.create_pull_request(
918 commits, target_head='initial-commit', source_head='new-feature',
920 commits, target_head='initial-commit', source_head='new-feature',
919 revisions=['new-feature'])
921 revisions=['new-feature'])
920
922
921 vcs = pr_util.source_repository.scm_instance()
923 vcs = pr_util.source_repository.scm_instance()
922 if backend.alias == 'git':
924 if backend.alias == 'git':
923 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
925 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
924 else:
926 else:
925 vcs.strip(pr_util.commit_ids['new-feature'])
927 vcs.strip(pr_util.commit_ids['new-feature'])
926
928
927 response = self.app.get(route_path(
929 response = self.app.get(route_path(
928 'pullrequest_show',
930 'pullrequest_show',
929 repo_name=pr_util.target_repository.repo_name,
931 repo_name=pr_util.target_repository.repo_name,
930 pull_request_id=pull_request.pull_request_id))
932 pull_request_id=pull_request.pull_request_id))
931
933
932 assert response.status_int == 200
934 assert response.status_int == 200
933
935
934 response.assert_response().element_contains(
936 response.assert_response().element_contains(
935 '#changeset_compare_view_content .alert strong',
937 '#changeset_compare_view_content .alert strong',
936 'Missing commits')
938 'Missing commits')
937 response.assert_response().element_contains(
939 response.assert_response().element_contains(
938 '#changeset_compare_view_content .alert',
940 '#changeset_compare_view_content .alert',
939 'This pull request cannot be displayed, because one or more'
941 'This pull request cannot be displayed, because one or more'
940 ' commits no longer exist in the source repository.')
942 ' commits no longer exist in the source repository.')
941 response.assert_response().element_contains(
943 response.assert_response().element_contains(
942 '#update_commits',
944 '#update_commits',
943 'Update commits')
945 'Update commits')
944
946
945 def test_strip_commits_and_update(
947 def test_strip_commits_and_update(
946 self, backend, pr_util, csrf_token):
948 self, backend, pr_util, csrf_token):
947 commits = [
949 commits = [
948 {'message': 'initial-commit'},
950 {'message': 'initial-commit'},
949 {'message': 'old-feature'},
951 {'message': 'old-feature'},
950 {'message': 'new-feature', 'parents': ['old-feature']},
952 {'message': 'new-feature', 'parents': ['old-feature']},
951 ]
953 ]
952 pull_request = pr_util.create_pull_request(
954 pull_request = pr_util.create_pull_request(
953 commits, target_head='old-feature', source_head='new-feature',
955 commits, target_head='old-feature', source_head='new-feature',
954 revisions=['new-feature'], mergeable=True)
956 revisions=['new-feature'], mergeable=True)
955
957
956 vcs = pr_util.source_repository.scm_instance()
958 vcs = pr_util.source_repository.scm_instance()
957 if backend.alias == 'git':
959 if backend.alias == 'git':
958 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
960 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
959 else:
961 else:
960 vcs.strip(pr_util.commit_ids['new-feature'])
962 vcs.strip(pr_util.commit_ids['new-feature'])
961
963
962 url = route_path('pullrequest_update',
964 url = route_path('pullrequest_update',
963 repo_name=pull_request.target_repo.repo_name,
965 repo_name=pull_request.target_repo.repo_name,
964 pull_request_id=pull_request.pull_request_id)
966 pull_request_id=pull_request.pull_request_id)
965 response = self.app.post(url,
967 response = self.app.post(url,
966 params={'update_commits': 'true',
968 params={'update_commits': 'true',
967 'csrf_token': csrf_token})
969 'csrf_token': csrf_token})
968
970
969 assert response.status_int == 200
971 assert response.status_int == 200
970 assert response.body == '{"response": true, "redirect_url": null}'
972 assert response.body == '{"response": true, "redirect_url": null}'
971
973
972 # Make sure that after update, it won't raise 500 errors
974 # Make sure that after update, it won't raise 500 errors
973 response = self.app.get(route_path(
975 response = self.app.get(route_path(
974 'pullrequest_show',
976 'pullrequest_show',
975 repo_name=pr_util.target_repository.repo_name,
977 repo_name=pr_util.target_repository.repo_name,
976 pull_request_id=pull_request.pull_request_id))
978 pull_request_id=pull_request.pull_request_id))
977
979
978 assert response.status_int == 200
980 assert response.status_int == 200
979 response.assert_response().element_contains(
981 response.assert_response().element_contains(
980 '#changeset_compare_view_content .alert strong',
982 '#changeset_compare_view_content .alert strong',
981 'Missing commits')
983 'Missing commits')
982
984
983 def test_branch_is_a_link(self, pr_util):
985 def test_branch_is_a_link(self, pr_util):
984 pull_request = pr_util.create_pull_request()
986 pull_request = pr_util.create_pull_request()
985 pull_request.source_ref = 'branch:origin:1234567890abcdef'
987 pull_request.source_ref = 'branch:origin:1234567890abcdef'
986 pull_request.target_ref = 'branch:target:abcdef1234567890'
988 pull_request.target_ref = 'branch:target:abcdef1234567890'
987 Session().add(pull_request)
989 Session().add(pull_request)
988 Session().commit()
990 Session().commit()
989
991
990 response = self.app.get(route_path(
992 response = self.app.get(route_path(
991 'pullrequest_show',
993 'pullrequest_show',
992 repo_name=pull_request.target_repo.scm_instance().name,
994 repo_name=pull_request.target_repo.scm_instance().name,
993 pull_request_id=pull_request.pull_request_id))
995 pull_request_id=pull_request.pull_request_id))
994 assert response.status_int == 200
996 assert response.status_int == 200
995
997
996 source = response.assert_response().get_element('.pr-source-info')
998 source = response.assert_response().get_element('.pr-source-info')
997 source_parent = source.getparent()
999 source_parent = source.getparent()
998 assert len(source_parent) == 1
1000 assert len(source_parent) == 1
999
1001
1000 target = response.assert_response().get_element('.pr-target-info')
1002 target = response.assert_response().get_element('.pr-target-info')
1001 target_parent = target.getparent()
1003 target_parent = target.getparent()
1002 assert len(target_parent) == 1
1004 assert len(target_parent) == 1
1003
1005
1004 expected_origin_link = route_path(
1006 expected_origin_link = route_path(
1005 'repo_commits',
1007 'repo_commits',
1006 repo_name=pull_request.source_repo.scm_instance().name,
1008 repo_name=pull_request.source_repo.scm_instance().name,
1007 params=dict(branch='origin'))
1009 params=dict(branch='origin'))
1008 expected_target_link = route_path(
1010 expected_target_link = route_path(
1009 'repo_commits',
1011 'repo_commits',
1010 repo_name=pull_request.target_repo.scm_instance().name,
1012 repo_name=pull_request.target_repo.scm_instance().name,
1011 params=dict(branch='target'))
1013 params=dict(branch='target'))
1012 assert source_parent.attrib['href'] == expected_origin_link
1014 assert source_parent.attrib['href'] == expected_origin_link
1013 assert target_parent.attrib['href'] == expected_target_link
1015 assert target_parent.attrib['href'] == expected_target_link
1014
1016
1015 def test_bookmark_is_not_a_link(self, pr_util):
1017 def test_bookmark_is_not_a_link(self, pr_util):
1016 pull_request = pr_util.create_pull_request()
1018 pull_request = pr_util.create_pull_request()
1017 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
1019 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
1018 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
1020 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
1019 Session().add(pull_request)
1021 Session().add(pull_request)
1020 Session().commit()
1022 Session().commit()
1021
1023
1022 response = self.app.get(route_path(
1024 response = self.app.get(route_path(
1023 'pullrequest_show',
1025 'pullrequest_show',
1024 repo_name=pull_request.target_repo.scm_instance().name,
1026 repo_name=pull_request.target_repo.scm_instance().name,
1025 pull_request_id=pull_request.pull_request_id))
1027 pull_request_id=pull_request.pull_request_id))
1026 assert response.status_int == 200
1028 assert response.status_int == 200
1027
1029
1028 source = response.assert_response().get_element('.pr-source-info')
1030 source = response.assert_response().get_element('.pr-source-info')
1029 assert source.text.strip() == 'bookmark:origin'
1031 assert source.text.strip() == 'bookmark:origin'
1030 assert source.getparent().attrib.get('href') is None
1032 assert source.getparent().attrib.get('href') is None
1031
1033
1032 target = response.assert_response().get_element('.pr-target-info')
1034 target = response.assert_response().get_element('.pr-target-info')
1033 assert target.text.strip() == 'bookmark:target'
1035 assert target.text.strip() == 'bookmark:target'
1034 assert target.getparent().attrib.get('href') is None
1036 assert target.getparent().attrib.get('href') is None
1035
1037
1036 def test_tag_is_not_a_link(self, pr_util):
1038 def test_tag_is_not_a_link(self, pr_util):
1037 pull_request = pr_util.create_pull_request()
1039 pull_request = pr_util.create_pull_request()
1038 pull_request.source_ref = 'tag:origin:1234567890abcdef'
1040 pull_request.source_ref = 'tag:origin:1234567890abcdef'
1039 pull_request.target_ref = 'tag:target:abcdef1234567890'
1041 pull_request.target_ref = 'tag:target:abcdef1234567890'
1040 Session().add(pull_request)
1042 Session().add(pull_request)
1041 Session().commit()
1043 Session().commit()
1042
1044
1043 response = self.app.get(route_path(
1045 response = self.app.get(route_path(
1044 'pullrequest_show',
1046 'pullrequest_show',
1045 repo_name=pull_request.target_repo.scm_instance().name,
1047 repo_name=pull_request.target_repo.scm_instance().name,
1046 pull_request_id=pull_request.pull_request_id))
1048 pull_request_id=pull_request.pull_request_id))
1047 assert response.status_int == 200
1049 assert response.status_int == 200
1048
1050
1049 source = response.assert_response().get_element('.pr-source-info')
1051 source = response.assert_response().get_element('.pr-source-info')
1050 assert source.text.strip() == 'tag:origin'
1052 assert source.text.strip() == 'tag:origin'
1051 assert source.getparent().attrib.get('href') is None
1053 assert source.getparent().attrib.get('href') is None
1052
1054
1053 target = response.assert_response().get_element('.pr-target-info')
1055 target = response.assert_response().get_element('.pr-target-info')
1054 assert target.text.strip() == 'tag:target'
1056 assert target.text.strip() == 'tag:target'
1055 assert target.getparent().attrib.get('href') is None
1057 assert target.getparent().attrib.get('href') is None
1056
1058
1057 @pytest.mark.parametrize('mergeable', [True, False])
1059 @pytest.mark.parametrize('mergeable', [True, False])
1058 def test_shadow_repository_link(
1060 def test_shadow_repository_link(
1059 self, mergeable, pr_util, http_host_only_stub):
1061 self, mergeable, pr_util, http_host_only_stub):
1060 """
1062 """
1061 Check that the pull request summary page displays a link to the shadow
1063 Check that the pull request summary page displays a link to the shadow
1062 repository if the pull request is mergeable. If it is not mergeable
1064 repository if the pull request is mergeable. If it is not mergeable
1063 the link should not be displayed.
1065 the link should not be displayed.
1064 """
1066 """
1065 pull_request = pr_util.create_pull_request(
1067 pull_request = pr_util.create_pull_request(
1066 mergeable=mergeable, enable_notifications=False)
1068 mergeable=mergeable, enable_notifications=False)
1067 target_repo = pull_request.target_repo.scm_instance()
1069 target_repo = pull_request.target_repo.scm_instance()
1068 pr_id = pull_request.pull_request_id
1070 pr_id = pull_request.pull_request_id
1069 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1071 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1070 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1072 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1071
1073
1072 response = self.app.get(route_path(
1074 response = self.app.get(route_path(
1073 'pullrequest_show',
1075 'pullrequest_show',
1074 repo_name=target_repo.name,
1076 repo_name=target_repo.name,
1075 pull_request_id=pr_id))
1077 pull_request_id=pr_id))
1076
1078
1077 if mergeable:
1079 if mergeable:
1078 response.assert_response().element_value_contains(
1080 response.assert_response().element_value_contains(
1079 'input.pr-mergeinfo', shadow_url)
1081 'input.pr-mergeinfo', shadow_url)
1080 response.assert_response().element_value_contains(
1082 response.assert_response().element_value_contains(
1081 'input.pr-mergeinfo ', 'pr-merge')
1083 'input.pr-mergeinfo ', 'pr-merge')
1082 else:
1084 else:
1083 response.assert_response().no_element_exists('.pr-mergeinfo')
1085 response.assert_response().no_element_exists('.pr-mergeinfo')
1084
1086
1085
1087
1086 @pytest.mark.usefixtures('app')
1088 @pytest.mark.usefixtures('app')
1087 @pytest.mark.backends("git", "hg")
1089 @pytest.mark.backends("git", "hg")
1088 class TestPullrequestsControllerDelete(object):
1090 class TestPullrequestsControllerDelete(object):
1089 def test_pull_request_delete_button_permissions_admin(
1091 def test_pull_request_delete_button_permissions_admin(
1090 self, autologin_user, user_admin, pr_util):
1092 self, autologin_user, user_admin, pr_util):
1091 pull_request = pr_util.create_pull_request(
1093 pull_request = pr_util.create_pull_request(
1092 author=user_admin.username, enable_notifications=False)
1094 author=user_admin.username, enable_notifications=False)
1093
1095
1094 response = self.app.get(route_path(
1096 response = self.app.get(route_path(
1095 'pullrequest_show',
1097 'pullrequest_show',
1096 repo_name=pull_request.target_repo.scm_instance().name,
1098 repo_name=pull_request.target_repo.scm_instance().name,
1097 pull_request_id=pull_request.pull_request_id))
1099 pull_request_id=pull_request.pull_request_id))
1098
1100
1099 response.mustcontain('id="delete_pullrequest"')
1101 response.mustcontain('id="delete_pullrequest"')
1100 response.mustcontain('Confirm to delete this pull request')
1102 response.mustcontain('Confirm to delete this pull request')
1101
1103
1102 def test_pull_request_delete_button_permissions_owner(
1104 def test_pull_request_delete_button_permissions_owner(
1103 self, autologin_regular_user, user_regular, pr_util):
1105 self, autologin_regular_user, user_regular, pr_util):
1104 pull_request = pr_util.create_pull_request(
1106 pull_request = pr_util.create_pull_request(
1105 author=user_regular.username, enable_notifications=False)
1107 author=user_regular.username, enable_notifications=False)
1106
1108
1107 response = self.app.get(route_path(
1109 response = self.app.get(route_path(
1108 'pullrequest_show',
1110 'pullrequest_show',
1109 repo_name=pull_request.target_repo.scm_instance().name,
1111 repo_name=pull_request.target_repo.scm_instance().name,
1110 pull_request_id=pull_request.pull_request_id))
1112 pull_request_id=pull_request.pull_request_id))
1111
1113
1112 response.mustcontain('id="delete_pullrequest"')
1114 response.mustcontain('id="delete_pullrequest"')
1113 response.mustcontain('Confirm to delete this pull request')
1115 response.mustcontain('Confirm to delete this pull request')
1114
1116
1115 def test_pull_request_delete_button_permissions_forbidden(
1117 def test_pull_request_delete_button_permissions_forbidden(
1116 self, autologin_regular_user, user_regular, user_admin, pr_util):
1118 self, autologin_regular_user, user_regular, user_admin, pr_util):
1117 pull_request = pr_util.create_pull_request(
1119 pull_request = pr_util.create_pull_request(
1118 author=user_admin.username, enable_notifications=False)
1120 author=user_admin.username, enable_notifications=False)
1119
1121
1120 response = self.app.get(route_path(
1122 response = self.app.get(route_path(
1121 'pullrequest_show',
1123 'pullrequest_show',
1122 repo_name=pull_request.target_repo.scm_instance().name,
1124 repo_name=pull_request.target_repo.scm_instance().name,
1123 pull_request_id=pull_request.pull_request_id))
1125 pull_request_id=pull_request.pull_request_id))
1124 response.mustcontain(no=['id="delete_pullrequest"'])
1126 response.mustcontain(no=['id="delete_pullrequest"'])
1125 response.mustcontain(no=['Confirm to delete this pull request'])
1127 response.mustcontain(no=['Confirm to delete this pull request'])
1126
1128
1127 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1129 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1128 self, autologin_regular_user, user_regular, user_admin, pr_util,
1130 self, autologin_regular_user, user_regular, user_admin, pr_util,
1129 user_util):
1131 user_util):
1130
1132
1131 pull_request = pr_util.create_pull_request(
1133 pull_request = pr_util.create_pull_request(
1132 author=user_admin.username, enable_notifications=False)
1134 author=user_admin.username, enable_notifications=False)
1133
1135
1134 user_util.grant_user_permission_to_repo(
1136 user_util.grant_user_permission_to_repo(
1135 pull_request.target_repo, user_regular,
1137 pull_request.target_repo, user_regular,
1136 'repository.write')
1138 'repository.write')
1137
1139
1138 response = self.app.get(route_path(
1140 response = self.app.get(route_path(
1139 'pullrequest_show',
1141 'pullrequest_show',
1140 repo_name=pull_request.target_repo.scm_instance().name,
1142 repo_name=pull_request.target_repo.scm_instance().name,
1141 pull_request_id=pull_request.pull_request_id))
1143 pull_request_id=pull_request.pull_request_id))
1142
1144
1143 response.mustcontain('id="open_edit_pullrequest"')
1145 response.mustcontain('id="open_edit_pullrequest"')
1144 response.mustcontain('id="delete_pullrequest"')
1146 response.mustcontain('id="delete_pullrequest"')
1145 response.mustcontain(no=['Confirm to delete this pull request'])
1147 response.mustcontain(no=['Confirm to delete this pull request'])
1146
1148
1147 def test_delete_comment_returns_404_if_comment_does_not_exist(
1149 def test_delete_comment_returns_404_if_comment_does_not_exist(
1148 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1150 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1149
1151
1150 pull_request = pr_util.create_pull_request(
1152 pull_request = pr_util.create_pull_request(
1151 author=user_admin.username, enable_notifications=False)
1153 author=user_admin.username, enable_notifications=False)
1152
1154
1153 self.app.post(
1155 self.app.post(
1154 route_path(
1156 route_path(
1155 'pullrequest_comment_delete',
1157 'pullrequest_comment_delete',
1156 repo_name=pull_request.target_repo.scm_instance().name,
1158 repo_name=pull_request.target_repo.scm_instance().name,
1157 pull_request_id=pull_request.pull_request_id,
1159 pull_request_id=pull_request.pull_request_id,
1158 comment_id=1024404),
1160 comment_id=1024404),
1159 extra_environ=xhr_header,
1161 extra_environ=xhr_header,
1160 params={'csrf_token': csrf_token},
1162 params={'csrf_token': csrf_token},
1161 status=404
1163 status=404
1162 )
1164 )
1163
1165
1164 def test_delete_comment(
1166 def test_delete_comment(
1165 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1167 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1166
1168
1167 pull_request = pr_util.create_pull_request(
1169 pull_request = pr_util.create_pull_request(
1168 author=user_admin.username, enable_notifications=False)
1170 author=user_admin.username, enable_notifications=False)
1169 comment = pr_util.create_comment()
1171 comment = pr_util.create_comment()
1170 comment_id = comment.comment_id
1172 comment_id = comment.comment_id
1171
1173
1172 response = self.app.post(
1174 response = self.app.post(
1173 route_path(
1175 route_path(
1174 'pullrequest_comment_delete',
1176 'pullrequest_comment_delete',
1175 repo_name=pull_request.target_repo.scm_instance().name,
1177 repo_name=pull_request.target_repo.scm_instance().name,
1176 pull_request_id=pull_request.pull_request_id,
1178 pull_request_id=pull_request.pull_request_id,
1177 comment_id=comment_id),
1179 comment_id=comment_id),
1178 extra_environ=xhr_header,
1180 extra_environ=xhr_header,
1179 params={'csrf_token': csrf_token},
1181 params={'csrf_token': csrf_token},
1180 status=200
1182 status=200
1181 )
1183 )
1182 assert response.body == 'true'
1184 assert response.body == 'true'
1183
1185
1184 @pytest.mark.parametrize('url_type', [
1186 @pytest.mark.parametrize('url_type', [
1185 'pullrequest_new',
1187 'pullrequest_new',
1186 'pullrequest_create',
1188 'pullrequest_create',
1187 'pullrequest_update',
1189 'pullrequest_update',
1188 'pullrequest_merge',
1190 'pullrequest_merge',
1189 ])
1191 ])
1190 def test_pull_request_is_forbidden_on_archived_repo(
1192 def test_pull_request_is_forbidden_on_archived_repo(
1191 self, autologin_user, backend, xhr_header, user_util, url_type):
1193 self, autologin_user, backend, xhr_header, user_util, url_type):
1192
1194
1193 # create a temporary repo
1195 # create a temporary repo
1194 source = user_util.create_repo(repo_type=backend.alias)
1196 source = user_util.create_repo(repo_type=backend.alias)
1195 repo_name = source.repo_name
1197 repo_name = source.repo_name
1196 repo = Repository.get_by_repo_name(repo_name)
1198 repo = Repository.get_by_repo_name(repo_name)
1197 repo.archived = True
1199 repo.archived = True
1198 Session().commit()
1200 Session().commit()
1199
1201
1200 response = self.app.get(
1202 response = self.app.get(
1201 route_path(url_type, repo_name=repo_name, pull_request_id=1), status=302)
1203 route_path(url_type, repo_name=repo_name, pull_request_id=1), status=302)
1202
1204
1203 msg = 'Action not supported for archived repository.'
1205 msg = 'Action not supported for archived repository.'
1204 assert_session_flash(response, msg)
1206 assert_session_flash(response, msg)
1205
1207
1206
1208
1207 def assert_pull_request_status(pull_request, expected_status):
1209 def assert_pull_request_status(pull_request, expected_status):
1208 status = ChangesetStatusModel().calculated_review_status(pull_request=pull_request)
1210 status = ChangesetStatusModel().calculated_review_status(pull_request=pull_request)
1209 assert status == expected_status
1211 assert status == expected_status
1210
1212
1211
1213
1212 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1214 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1213 @pytest.mark.usefixtures("autologin_user")
1215 @pytest.mark.usefixtures("autologin_user")
1214 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1216 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1215 app.get(route_path(route, repo_name=backend_svn.repo_name), status=404)
1217 app.get(route_path(route, repo_name=backend_svn.repo_name), status=404)
@@ -1,1493 +1,1506 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2019 RhodeCode GmbH
3 # Copyright (C) 2011-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 import collections
22 import collections
23
23
24 import formencode
24 import formencode
25 import formencode.htmlfill
25 import formencode.htmlfill
26 import peppercorn
26 import peppercorn
27 from pyramid.httpexceptions import (
27 from pyramid.httpexceptions import (
28 HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest)
28 HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest)
29 from pyramid.view import view_config
29 from pyramid.view import view_config
30 from pyramid.renderers import render
30 from pyramid.renderers import render
31
31
32 from rhodecode.apps._base import RepoAppView, DataGridAppView
32 from rhodecode.apps._base import RepoAppView, DataGridAppView
33
33
34 from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream
34 from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream
35 from rhodecode.lib.base import vcs_operation_context
35 from rhodecode.lib.base import vcs_operation_context
36 from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist
36 from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist
37 from rhodecode.lib.ext_json import json
37 from rhodecode.lib.ext_json import json
38 from rhodecode.lib.auth import (
38 from rhodecode.lib.auth import (
39 LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator,
39 LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator,
40 NotAnonymous, CSRFRequired)
40 NotAnonymous, CSRFRequired)
41 from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode
41 from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode
42 from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason
42 from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason
43 from rhodecode.lib.vcs.exceptions import (CommitDoesNotExistError,
43 from rhodecode.lib.vcs.exceptions import (CommitDoesNotExistError,
44 RepositoryRequirementError, EmptyRepositoryError)
44 RepositoryRequirementError, EmptyRepositoryError)
45 from rhodecode.model.changeset_status import ChangesetStatusModel
45 from rhodecode.model.changeset_status import ChangesetStatusModel
46 from rhodecode.model.comment import CommentsModel
46 from rhodecode.model.comment import CommentsModel
47 from rhodecode.model.db import (func, or_, PullRequest, PullRequestVersion,
47 from rhodecode.model.db import (func, or_, PullRequest, PullRequestVersion,
48 ChangesetComment, ChangesetStatus, Repository)
48 ChangesetComment, ChangesetStatus, Repository)
49 from rhodecode.model.forms import PullRequestForm
49 from rhodecode.model.forms import PullRequestForm
50 from rhodecode.model.meta import Session
50 from rhodecode.model.meta import Session
51 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
51 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
52 from rhodecode.model.scm import ScmModel
52 from rhodecode.model.scm import ScmModel
53
53
54 log = logging.getLogger(__name__)
54 log = logging.getLogger(__name__)
55
55
56
56
57 class RepoPullRequestsView(RepoAppView, DataGridAppView):
57 class RepoPullRequestsView(RepoAppView, DataGridAppView):
58
58
59 def load_default_context(self):
59 def load_default_context(self):
60 c = self._get_local_tmpl_context(include_app_defaults=True)
60 c = self._get_local_tmpl_context(include_app_defaults=True)
61 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
61 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
62 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
62 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
63 # backward compat., we use for OLD PRs a plain renderer
63 # backward compat., we use for OLD PRs a plain renderer
64 c.renderer = 'plain'
64 c.renderer = 'plain'
65 return c
65 return c
66
66
67 def _get_pull_requests_list(
67 def _get_pull_requests_list(
68 self, repo_name, source, filter_type, opened_by, statuses):
68 self, repo_name, source, filter_type, opened_by, statuses):
69
69
70 draw, start, limit = self._extract_chunk(self.request)
70 draw, start, limit = self._extract_chunk(self.request)
71 search_q, order_by, order_dir = self._extract_ordering(self.request)
71 search_q, order_by, order_dir = self._extract_ordering(self.request)
72 _render = self.request.get_partial_renderer(
72 _render = self.request.get_partial_renderer(
73 'rhodecode:templates/data_table/_dt_elements.mako')
73 'rhodecode:templates/data_table/_dt_elements.mako')
74
74
75 # pagination
75 # pagination
76
76
77 if filter_type == 'awaiting_review':
77 if filter_type == 'awaiting_review':
78 pull_requests = PullRequestModel().get_awaiting_review(
78 pull_requests = PullRequestModel().get_awaiting_review(
79 repo_name, search_q=search_q, source=source, opened_by=opened_by,
79 repo_name, search_q=search_q, source=source, opened_by=opened_by,
80 statuses=statuses, offset=start, length=limit,
80 statuses=statuses, offset=start, length=limit,
81 order_by=order_by, order_dir=order_dir)
81 order_by=order_by, order_dir=order_dir)
82 pull_requests_total_count = PullRequestModel().count_awaiting_review(
82 pull_requests_total_count = PullRequestModel().count_awaiting_review(
83 repo_name, search_q=search_q, source=source, statuses=statuses,
83 repo_name, search_q=search_q, source=source, statuses=statuses,
84 opened_by=opened_by)
84 opened_by=opened_by)
85 elif filter_type == 'awaiting_my_review':
85 elif filter_type == 'awaiting_my_review':
86 pull_requests = PullRequestModel().get_awaiting_my_review(
86 pull_requests = PullRequestModel().get_awaiting_my_review(
87 repo_name, search_q=search_q, source=source, opened_by=opened_by,
87 repo_name, search_q=search_q, source=source, opened_by=opened_by,
88 user_id=self._rhodecode_user.user_id, statuses=statuses,
88 user_id=self._rhodecode_user.user_id, statuses=statuses,
89 offset=start, length=limit, order_by=order_by,
89 offset=start, length=limit, order_by=order_by,
90 order_dir=order_dir)
90 order_dir=order_dir)
91 pull_requests_total_count = PullRequestModel().count_awaiting_my_review(
91 pull_requests_total_count = PullRequestModel().count_awaiting_my_review(
92 repo_name, search_q=search_q, source=source, user_id=self._rhodecode_user.user_id,
92 repo_name, search_q=search_q, source=source, user_id=self._rhodecode_user.user_id,
93 statuses=statuses, opened_by=opened_by)
93 statuses=statuses, opened_by=opened_by)
94 else:
94 else:
95 pull_requests = PullRequestModel().get_all(
95 pull_requests = PullRequestModel().get_all(
96 repo_name, search_q=search_q, source=source, opened_by=opened_by,
96 repo_name, search_q=search_q, source=source, opened_by=opened_by,
97 statuses=statuses, offset=start, length=limit,
97 statuses=statuses, offset=start, length=limit,
98 order_by=order_by, order_dir=order_dir)
98 order_by=order_by, order_dir=order_dir)
99 pull_requests_total_count = PullRequestModel().count_all(
99 pull_requests_total_count = PullRequestModel().count_all(
100 repo_name, search_q=search_q, source=source, statuses=statuses,
100 repo_name, search_q=search_q, source=source, statuses=statuses,
101 opened_by=opened_by)
101 opened_by=opened_by)
102
102
103 data = []
103 data = []
104 comments_model = CommentsModel()
104 comments_model = CommentsModel()
105 for pr in pull_requests:
105 for pr in pull_requests:
106 comments = comments_model.get_all_comments(
106 comments = comments_model.get_all_comments(
107 self.db_repo.repo_id, pull_request=pr)
107 self.db_repo.repo_id, pull_request=pr)
108
108
109 data.append({
109 data.append({
110 'name': _render('pullrequest_name',
110 'name': _render('pullrequest_name',
111 pr.pull_request_id, pr.pull_request_state,
111 pr.pull_request_id, pr.pull_request_state,
112 pr.work_in_progress, pr.target_repo.repo_name),
112 pr.work_in_progress, pr.target_repo.repo_name),
113 'name_raw': pr.pull_request_id,
113 'name_raw': pr.pull_request_id,
114 'status': _render('pullrequest_status',
114 'status': _render('pullrequest_status',
115 pr.calculated_review_status()),
115 pr.calculated_review_status()),
116 'title': _render('pullrequest_title', pr.title, pr.description),
116 'title': _render('pullrequest_title', pr.title, pr.description),
117 'description': h.escape(pr.description),
117 'description': h.escape(pr.description),
118 'updated_on': _render('pullrequest_updated_on',
118 'updated_on': _render('pullrequest_updated_on',
119 h.datetime_to_time(pr.updated_on)),
119 h.datetime_to_time(pr.updated_on)),
120 'updated_on_raw': h.datetime_to_time(pr.updated_on),
120 'updated_on_raw': h.datetime_to_time(pr.updated_on),
121 'created_on': _render('pullrequest_updated_on',
121 'created_on': _render('pullrequest_updated_on',
122 h.datetime_to_time(pr.created_on)),
122 h.datetime_to_time(pr.created_on)),
123 'created_on_raw': h.datetime_to_time(pr.created_on),
123 'created_on_raw': h.datetime_to_time(pr.created_on),
124 'state': pr.pull_request_state,
124 'state': pr.pull_request_state,
125 'author': _render('pullrequest_author',
125 'author': _render('pullrequest_author',
126 pr.author.full_contact, ),
126 pr.author.full_contact, ),
127 'author_raw': pr.author.full_name,
127 'author_raw': pr.author.full_name,
128 'comments': _render('pullrequest_comments', len(comments)),
128 'comments': _render('pullrequest_comments', len(comments)),
129 'comments_raw': len(comments),
129 'comments_raw': len(comments),
130 'closed': pr.is_closed(),
130 'closed': pr.is_closed(),
131 })
131 })
132
132
133 data = ({
133 data = ({
134 'draw': draw,
134 'draw': draw,
135 'data': data,
135 'data': data,
136 'recordsTotal': pull_requests_total_count,
136 'recordsTotal': pull_requests_total_count,
137 'recordsFiltered': pull_requests_total_count,
137 'recordsFiltered': pull_requests_total_count,
138 })
138 })
139 return data
139 return data
140
140
141 @LoginRequired()
141 @LoginRequired()
142 @HasRepoPermissionAnyDecorator(
142 @HasRepoPermissionAnyDecorator(
143 'repository.read', 'repository.write', 'repository.admin')
143 'repository.read', 'repository.write', 'repository.admin')
144 @view_config(
144 @view_config(
145 route_name='pullrequest_show_all', request_method='GET',
145 route_name='pullrequest_show_all', request_method='GET',
146 renderer='rhodecode:templates/pullrequests/pullrequests.mako')
146 renderer='rhodecode:templates/pullrequests/pullrequests.mako')
147 def pull_request_list(self):
147 def pull_request_list(self):
148 c = self.load_default_context()
148 c = self.load_default_context()
149
149
150 req_get = self.request.GET
150 req_get = self.request.GET
151 c.source = str2bool(req_get.get('source'))
151 c.source = str2bool(req_get.get('source'))
152 c.closed = str2bool(req_get.get('closed'))
152 c.closed = str2bool(req_get.get('closed'))
153 c.my = str2bool(req_get.get('my'))
153 c.my = str2bool(req_get.get('my'))
154 c.awaiting_review = str2bool(req_get.get('awaiting_review'))
154 c.awaiting_review = str2bool(req_get.get('awaiting_review'))
155 c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
155 c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
156
156
157 c.active = 'open'
157 c.active = 'open'
158 if c.my:
158 if c.my:
159 c.active = 'my'
159 c.active = 'my'
160 if c.closed:
160 if c.closed:
161 c.active = 'closed'
161 c.active = 'closed'
162 if c.awaiting_review and not c.source:
162 if c.awaiting_review and not c.source:
163 c.active = 'awaiting'
163 c.active = 'awaiting'
164 if c.source and not c.awaiting_review:
164 if c.source and not c.awaiting_review:
165 c.active = 'source'
165 c.active = 'source'
166 if c.awaiting_my_review:
166 if c.awaiting_my_review:
167 c.active = 'awaiting_my'
167 c.active = 'awaiting_my'
168
168
169 return self._get_template_context(c)
169 return self._get_template_context(c)
170
170
171 @LoginRequired()
171 @LoginRequired()
172 @HasRepoPermissionAnyDecorator(
172 @HasRepoPermissionAnyDecorator(
173 'repository.read', 'repository.write', 'repository.admin')
173 'repository.read', 'repository.write', 'repository.admin')
174 @view_config(
174 @view_config(
175 route_name='pullrequest_show_all_data', request_method='GET',
175 route_name='pullrequest_show_all_data', request_method='GET',
176 renderer='json_ext', xhr=True)
176 renderer='json_ext', xhr=True)
177 def pull_request_list_data(self):
177 def pull_request_list_data(self):
178 self.load_default_context()
178 self.load_default_context()
179
179
180 # additional filters
180 # additional filters
181 req_get = self.request.GET
181 req_get = self.request.GET
182 source = str2bool(req_get.get('source'))
182 source = str2bool(req_get.get('source'))
183 closed = str2bool(req_get.get('closed'))
183 closed = str2bool(req_get.get('closed'))
184 my = str2bool(req_get.get('my'))
184 my = str2bool(req_get.get('my'))
185 awaiting_review = str2bool(req_get.get('awaiting_review'))
185 awaiting_review = str2bool(req_get.get('awaiting_review'))
186 awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
186 awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
187
187
188 filter_type = 'awaiting_review' if awaiting_review \
188 filter_type = 'awaiting_review' if awaiting_review \
189 else 'awaiting_my_review' if awaiting_my_review \
189 else 'awaiting_my_review' if awaiting_my_review \
190 else None
190 else None
191
191
192 opened_by = None
192 opened_by = None
193 if my:
193 if my:
194 opened_by = [self._rhodecode_user.user_id]
194 opened_by = [self._rhodecode_user.user_id]
195
195
196 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
196 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
197 if closed:
197 if closed:
198 statuses = [PullRequest.STATUS_CLOSED]
198 statuses = [PullRequest.STATUS_CLOSED]
199
199
200 data = self._get_pull_requests_list(
200 data = self._get_pull_requests_list(
201 repo_name=self.db_repo_name, source=source,
201 repo_name=self.db_repo_name, source=source,
202 filter_type=filter_type, opened_by=opened_by, statuses=statuses)
202 filter_type=filter_type, opened_by=opened_by, statuses=statuses)
203
203
204 return data
204 return data
205
205
206 def _is_diff_cache_enabled(self, target_repo):
206 def _is_diff_cache_enabled(self, target_repo):
207 caching_enabled = self._get_general_setting(
207 caching_enabled = self._get_general_setting(
208 target_repo, 'rhodecode_diff_cache')
208 target_repo, 'rhodecode_diff_cache')
209 log.debug('Diff caching enabled: %s', caching_enabled)
209 log.debug('Diff caching enabled: %s', caching_enabled)
210 return caching_enabled
210 return caching_enabled
211
211
212 def _get_diffset(self, source_repo_name, source_repo,
212 def _get_diffset(self, source_repo_name, source_repo,
213 source_ref_id, target_ref_id,
213 source_ref_id, target_ref_id,
214 target_commit, source_commit, diff_limit, file_limit,
214 target_commit, source_commit, diff_limit, file_limit,
215 fulldiff, hide_whitespace_changes, diff_context):
215 fulldiff, hide_whitespace_changes, diff_context):
216
216
217 vcs_diff = PullRequestModel().get_diff(
217 vcs_diff = PullRequestModel().get_diff(
218 source_repo, source_ref_id, target_ref_id,
218 source_repo, source_ref_id, target_ref_id,
219 hide_whitespace_changes, diff_context)
219 hide_whitespace_changes, diff_context)
220
220
221 diff_processor = diffs.DiffProcessor(
221 diff_processor = diffs.DiffProcessor(
222 vcs_diff, format='newdiff', diff_limit=diff_limit,
222 vcs_diff, format='newdiff', diff_limit=diff_limit,
223 file_limit=file_limit, show_full_diff=fulldiff)
223 file_limit=file_limit, show_full_diff=fulldiff)
224
224
225 _parsed = diff_processor.prepare()
225 _parsed = diff_processor.prepare()
226
226
227 diffset = codeblocks.DiffSet(
227 diffset = codeblocks.DiffSet(
228 repo_name=self.db_repo_name,
228 repo_name=self.db_repo_name,
229 source_repo_name=source_repo_name,
229 source_repo_name=source_repo_name,
230 source_node_getter=codeblocks.diffset_node_getter(target_commit),
230 source_node_getter=codeblocks.diffset_node_getter(target_commit),
231 target_node_getter=codeblocks.diffset_node_getter(source_commit),
231 target_node_getter=codeblocks.diffset_node_getter(source_commit),
232 )
232 )
233 diffset = self.path_filter.render_patchset_filtered(
233 diffset = self.path_filter.render_patchset_filtered(
234 diffset, _parsed, target_commit.raw_id, source_commit.raw_id)
234 diffset, _parsed, target_commit.raw_id, source_commit.raw_id)
235
235
236 return diffset
236 return diffset
237
237
238 def _get_range_diffset(self, source_scm, source_repo,
238 def _get_range_diffset(self, source_scm, source_repo,
239 commit1, commit2, diff_limit, file_limit,
239 commit1, commit2, diff_limit, file_limit,
240 fulldiff, hide_whitespace_changes, diff_context):
240 fulldiff, hide_whitespace_changes, diff_context):
241 vcs_diff = source_scm.get_diff(
241 vcs_diff = source_scm.get_diff(
242 commit1, commit2,
242 commit1, commit2,
243 ignore_whitespace=hide_whitespace_changes,
243 ignore_whitespace=hide_whitespace_changes,
244 context=diff_context)
244 context=diff_context)
245
245
246 diff_processor = diffs.DiffProcessor(
246 diff_processor = diffs.DiffProcessor(
247 vcs_diff, format='newdiff', diff_limit=diff_limit,
247 vcs_diff, format='newdiff', diff_limit=diff_limit,
248 file_limit=file_limit, show_full_diff=fulldiff)
248 file_limit=file_limit, show_full_diff=fulldiff)
249
249
250 _parsed = diff_processor.prepare()
250 _parsed = diff_processor.prepare()
251
251
252 diffset = codeblocks.DiffSet(
252 diffset = codeblocks.DiffSet(
253 repo_name=source_repo.repo_name,
253 repo_name=source_repo.repo_name,
254 source_node_getter=codeblocks.diffset_node_getter(commit1),
254 source_node_getter=codeblocks.diffset_node_getter(commit1),
255 target_node_getter=codeblocks.diffset_node_getter(commit2))
255 target_node_getter=codeblocks.diffset_node_getter(commit2))
256
256
257 diffset = self.path_filter.render_patchset_filtered(
257 diffset = self.path_filter.render_patchset_filtered(
258 diffset, _parsed, commit1.raw_id, commit2.raw_id)
258 diffset, _parsed, commit1.raw_id, commit2.raw_id)
259
259
260 return diffset
260 return diffset
261
261
262 @LoginRequired()
262 @LoginRequired()
263 @HasRepoPermissionAnyDecorator(
263 @HasRepoPermissionAnyDecorator(
264 'repository.read', 'repository.write', 'repository.admin')
264 'repository.read', 'repository.write', 'repository.admin')
265 @view_config(
265 @view_config(
266 route_name='pullrequest_show', request_method='GET',
266 route_name='pullrequest_show', request_method='GET',
267 renderer='rhodecode:templates/pullrequests/pullrequest_show.mako')
267 renderer='rhodecode:templates/pullrequests/pullrequest_show.mako')
268 def pull_request_show(self):
268 def pull_request_show(self):
269 _ = self.request.translate
269 _ = self.request.translate
270 c = self.load_default_context()
270 c = self.load_default_context()
271
271
272 pull_request = PullRequest.get_or_404(
272 pull_request = PullRequest.get_or_404(
273 self.request.matchdict['pull_request_id'])
273 self.request.matchdict['pull_request_id'])
274 pull_request_id = pull_request.pull_request_id
274 pull_request_id = pull_request.pull_request_id
275
275
276 c.state_progressing = pull_request.is_state_changing()
276 c.state_progressing = pull_request.is_state_changing()
277
277
278 _new_state = {
278 _new_state = {
279 'created': PullRequest.STATE_CREATED,
279 'created': PullRequest.STATE_CREATED,
280 }.get(self.request.GET.get('force_state'))
280 }.get(self.request.GET.get('force_state'))
281 if c.is_super_admin and _new_state:
281 if c.is_super_admin and _new_state:
282 with pull_request.set_state(PullRequest.STATE_UPDATING, final_state=_new_state):
282 with pull_request.set_state(PullRequest.STATE_UPDATING, final_state=_new_state):
283 h.flash(
283 h.flash(
284 _('Pull Request state was force changed to `{}`').format(_new_state),
284 _('Pull Request state was force changed to `{}`').format(_new_state),
285 category='success')
285 category='success')
286 Session().commit()
286 Session().commit()
287
287
288 raise HTTPFound(h.route_path(
288 raise HTTPFound(h.route_path(
289 'pullrequest_show', repo_name=self.db_repo_name,
289 'pullrequest_show', repo_name=self.db_repo_name,
290 pull_request_id=pull_request_id))
290 pull_request_id=pull_request_id))
291
291
292 version = self.request.GET.get('version')
292 version = self.request.GET.get('version')
293 from_version = self.request.GET.get('from_version') or version
293 from_version = self.request.GET.get('from_version') or version
294 merge_checks = self.request.GET.get('merge_checks')
294 merge_checks = self.request.GET.get('merge_checks')
295 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
295 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
296
296
297 # fetch global flags of ignore ws or context lines
297 # fetch global flags of ignore ws or context lines
298 diff_context = diffs.get_diff_context(self.request)
298 diff_context = diffs.get_diff_context(self.request)
299 hide_whitespace_changes = diffs.get_diff_whitespace_flag(self.request)
299 hide_whitespace_changes = diffs.get_diff_whitespace_flag(self.request)
300
300
301 force_refresh = str2bool(self.request.GET.get('force_refresh'))
301 force_refresh = str2bool(self.request.GET.get('force_refresh'))
302
302
303 (pull_request_latest,
303 (pull_request_latest,
304 pull_request_at_ver,
304 pull_request_at_ver,
305 pull_request_display_obj,
305 pull_request_display_obj,
306 at_version) = PullRequestModel().get_pr_version(
306 at_version) = PullRequestModel().get_pr_version(
307 pull_request_id, version=version)
307 pull_request_id, version=version)
308 pr_closed = pull_request_latest.is_closed()
308 pr_closed = pull_request_latest.is_closed()
309
309
310 if pr_closed and (version or from_version):
310 if pr_closed and (version or from_version):
311 # not allow to browse versions
311 # not allow to browse versions
312 raise HTTPFound(h.route_path(
312 raise HTTPFound(h.route_path(
313 'pullrequest_show', repo_name=self.db_repo_name,
313 'pullrequest_show', repo_name=self.db_repo_name,
314 pull_request_id=pull_request_id))
314 pull_request_id=pull_request_id))
315
315
316 versions = pull_request_display_obj.versions()
316 versions = pull_request_display_obj.versions()
317 # used to store per-commit range diffs
317 # used to store per-commit range diffs
318 c.changes = collections.OrderedDict()
318 c.changes = collections.OrderedDict()
319 c.range_diff_on = self.request.GET.get('range-diff') == "1"
319 c.range_diff_on = self.request.GET.get('range-diff') == "1"
320
320
321 c.at_version = at_version
321 c.at_version = at_version
322 c.at_version_num = (at_version
322 c.at_version_num = (at_version
323 if at_version and at_version != 'latest'
323 if at_version and at_version != 'latest'
324 else None)
324 else None)
325 c.at_version_pos = ChangesetComment.get_index_from_version(
325 c.at_version_pos = ChangesetComment.get_index_from_version(
326 c.at_version_num, versions)
326 c.at_version_num, versions)
327
327
328 (prev_pull_request_latest,
328 (prev_pull_request_latest,
329 prev_pull_request_at_ver,
329 prev_pull_request_at_ver,
330 prev_pull_request_display_obj,
330 prev_pull_request_display_obj,
331 prev_at_version) = PullRequestModel().get_pr_version(
331 prev_at_version) = PullRequestModel().get_pr_version(
332 pull_request_id, version=from_version)
332 pull_request_id, version=from_version)
333
333
334 c.from_version = prev_at_version
334 c.from_version = prev_at_version
335 c.from_version_num = (prev_at_version
335 c.from_version_num = (prev_at_version
336 if prev_at_version and prev_at_version != 'latest'
336 if prev_at_version and prev_at_version != 'latest'
337 else None)
337 else None)
338 c.from_version_pos = ChangesetComment.get_index_from_version(
338 c.from_version_pos = ChangesetComment.get_index_from_version(
339 c.from_version_num, versions)
339 c.from_version_num, versions)
340
340
341 # define if we're in COMPARE mode or VIEW at version mode
341 # define if we're in COMPARE mode or VIEW at version mode
342 compare = at_version != prev_at_version
342 compare = at_version != prev_at_version
343
343
344 # pull_requests repo_name we opened it against
344 # pull_requests repo_name we opened it against
345 # ie. target_repo must match
345 # ie. target_repo must match
346 if self.db_repo_name != pull_request_at_ver.target_repo.repo_name:
346 if self.db_repo_name != pull_request_at_ver.target_repo.repo_name:
347 raise HTTPNotFound()
347 raise HTTPNotFound()
348
348
349 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(
349 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(
350 pull_request_at_ver)
350 pull_request_at_ver)
351
351
352 c.pull_request = pull_request_display_obj
352 c.pull_request = pull_request_display_obj
353 c.renderer = pull_request_at_ver.description_renderer or c.renderer
353 c.renderer = pull_request_at_ver.description_renderer or c.renderer
354 c.pull_request_latest = pull_request_latest
354 c.pull_request_latest = pull_request_latest
355
355
356 if compare or (at_version and not at_version == 'latest'):
356 if compare or (at_version and not at_version == 'latest'):
357 c.allowed_to_change_status = False
357 c.allowed_to_change_status = False
358 c.allowed_to_update = False
358 c.allowed_to_update = False
359 c.allowed_to_merge = False
359 c.allowed_to_merge = False
360 c.allowed_to_delete = False
360 c.allowed_to_delete = False
361 c.allowed_to_comment = False
361 c.allowed_to_comment = False
362 c.allowed_to_close = False
362 c.allowed_to_close = False
363 else:
363 else:
364 can_change_status = PullRequestModel().check_user_change_status(
364 can_change_status = PullRequestModel().check_user_change_status(
365 pull_request_at_ver, self._rhodecode_user)
365 pull_request_at_ver, self._rhodecode_user)
366 c.allowed_to_change_status = can_change_status and not pr_closed
366 c.allowed_to_change_status = can_change_status and not pr_closed
367
367
368 c.allowed_to_update = PullRequestModel().check_user_update(
368 c.allowed_to_update = PullRequestModel().check_user_update(
369 pull_request_latest, self._rhodecode_user) and not pr_closed
369 pull_request_latest, self._rhodecode_user) and not pr_closed
370 c.allowed_to_merge = PullRequestModel().check_user_merge(
370 c.allowed_to_merge = PullRequestModel().check_user_merge(
371 pull_request_latest, self._rhodecode_user) and not pr_closed
371 pull_request_latest, self._rhodecode_user) and not pr_closed
372 c.allowed_to_delete = PullRequestModel().check_user_delete(
372 c.allowed_to_delete = PullRequestModel().check_user_delete(
373 pull_request_latest, self._rhodecode_user) and not pr_closed
373 pull_request_latest, self._rhodecode_user) and not pr_closed
374 c.allowed_to_comment = not pr_closed
374 c.allowed_to_comment = not pr_closed
375 c.allowed_to_close = c.allowed_to_merge and not pr_closed
375 c.allowed_to_close = c.allowed_to_merge and not pr_closed
376
376
377 c.forbid_adding_reviewers = False
377 c.forbid_adding_reviewers = False
378 c.forbid_author_to_review = False
378 c.forbid_author_to_review = False
379 c.forbid_commit_author_to_review = False
379 c.forbid_commit_author_to_review = False
380
380
381 if pull_request_latest.reviewer_data and \
381 if pull_request_latest.reviewer_data and \
382 'rules' in pull_request_latest.reviewer_data:
382 'rules' in pull_request_latest.reviewer_data:
383 rules = pull_request_latest.reviewer_data['rules'] or {}
383 rules = pull_request_latest.reviewer_data['rules'] or {}
384 try:
384 try:
385 c.forbid_adding_reviewers = rules.get(
385 c.forbid_adding_reviewers = rules.get(
386 'forbid_adding_reviewers')
386 'forbid_adding_reviewers')
387 c.forbid_author_to_review = rules.get(
387 c.forbid_author_to_review = rules.get(
388 'forbid_author_to_review')
388 'forbid_author_to_review')
389 c.forbid_commit_author_to_review = rules.get(
389 c.forbid_commit_author_to_review = rules.get(
390 'forbid_commit_author_to_review')
390 'forbid_commit_author_to_review')
391 except Exception:
391 except Exception:
392 pass
392 pass
393
393
394 # check merge capabilities
394 # check merge capabilities
395 _merge_check = MergeCheck.validate(
395 _merge_check = MergeCheck.validate(
396 pull_request_latest, auth_user=self._rhodecode_user,
396 pull_request_latest, auth_user=self._rhodecode_user,
397 translator=self.request.translate,
397 translator=self.request.translate,
398 force_shadow_repo_refresh=force_refresh)
398 force_shadow_repo_refresh=force_refresh)
399
399 c.pr_merge_errors = _merge_check.error_details
400 c.pr_merge_errors = _merge_check.error_details
400 c.pr_merge_possible = not _merge_check.failed
401 c.pr_merge_possible = not _merge_check.failed
401 c.pr_merge_message = _merge_check.merge_msg
402 c.pr_merge_message = _merge_check.merge_msg
402
403
403 c.pr_merge_info = MergeCheck.get_merge_conditions(
404 c.pr_merge_info = MergeCheck.get_merge_conditions(
404 pull_request_latest, translator=self.request.translate)
405 pull_request_latest, translator=self.request.translate)
405
406
406 c.pull_request_review_status = _merge_check.review_status
407 c.pull_request_review_status = _merge_check.review_status
407 if merge_checks:
408 if merge_checks:
408 self.request.override_renderer = \
409 self.request.override_renderer = \
409 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako'
410 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako'
410 return self._get_template_context(c)
411 return self._get_template_context(c)
411
412
412 comments_model = CommentsModel()
413 comments_model = CommentsModel()
413
414
414 # reviewers and statuses
415 # reviewers and statuses
415 c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses()
416 c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses()
416 allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers]
417 allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers]
417
418
418 # GENERAL COMMENTS with versions #
419 # GENERAL COMMENTS with versions #
419 q = comments_model._all_general_comments_of_pull_request(pull_request_latest)
420 q = comments_model._all_general_comments_of_pull_request(pull_request_latest)
420 q = q.order_by(ChangesetComment.comment_id.asc())
421 q = q.order_by(ChangesetComment.comment_id.asc())
421 general_comments = q
422 general_comments = q
422
423
423 # pick comments we want to render at current version
424 # pick comments we want to render at current version
424 c.comment_versions = comments_model.aggregate_comments(
425 c.comment_versions = comments_model.aggregate_comments(
425 general_comments, versions, c.at_version_num)
426 general_comments, versions, c.at_version_num)
426 c.comments = c.comment_versions[c.at_version_num]['until']
427 c.comments = c.comment_versions[c.at_version_num]['until']
427
428
428 # INLINE COMMENTS with versions #
429 # INLINE COMMENTS with versions #
429 q = comments_model._all_inline_comments_of_pull_request(pull_request_latest)
430 q = comments_model._all_inline_comments_of_pull_request(pull_request_latest)
430 q = q.order_by(ChangesetComment.comment_id.asc())
431 q = q.order_by(ChangesetComment.comment_id.asc())
431 inline_comments = q
432 inline_comments = q
432
433
433 c.inline_versions = comments_model.aggregate_comments(
434 c.inline_versions = comments_model.aggregate_comments(
434 inline_comments, versions, c.at_version_num, inline=True)
435 inline_comments, versions, c.at_version_num, inline=True)
435
436
436 # TODOs
437 # TODOs
437 c.unresolved_comments = CommentsModel() \
438 c.unresolved_comments = CommentsModel() \
438 .get_pull_request_unresolved_todos(pull_request)
439 .get_pull_request_unresolved_todos(pull_request)
439 c.resolved_comments = CommentsModel() \
440 c.resolved_comments = CommentsModel() \
440 .get_pull_request_resolved_todos(pull_request)
441 .get_pull_request_resolved_todos(pull_request)
441
442
442 # inject latest version
443 # inject latest version
443 latest_ver = PullRequest.get_pr_display_object(
444 latest_ver = PullRequest.get_pr_display_object(
444 pull_request_latest, pull_request_latest)
445 pull_request_latest, pull_request_latest)
445
446
446 c.versions = versions + [latest_ver]
447 c.versions = versions + [latest_ver]
447
448
448 # if we use version, then do not show later comments
449 # if we use version, then do not show later comments
449 # than current version
450 # than current version
450 display_inline_comments = collections.defaultdict(
451 display_inline_comments = collections.defaultdict(
451 lambda: collections.defaultdict(list))
452 lambda: collections.defaultdict(list))
452 for co in inline_comments:
453 for co in inline_comments:
453 if c.at_version_num:
454 if c.at_version_num:
454 # pick comments that are at least UPTO given version, so we
455 # pick comments that are at least UPTO given version, so we
455 # don't render comments for higher version
456 # don't render comments for higher version
456 should_render = co.pull_request_version_id and \
457 should_render = co.pull_request_version_id and \
457 co.pull_request_version_id <= c.at_version_num
458 co.pull_request_version_id <= c.at_version_num
458 else:
459 else:
459 # showing all, for 'latest'
460 # showing all, for 'latest'
460 should_render = True
461 should_render = True
461
462
462 if should_render:
463 if should_render:
463 display_inline_comments[co.f_path][co.line_no].append(co)
464 display_inline_comments[co.f_path][co.line_no].append(co)
464
465
465 # load diff data into template context, if we use compare mode then
466 # load diff data into template context, if we use compare mode then
466 # diff is calculated based on changes between versions of PR
467 # diff is calculated based on changes between versions of PR
467
468
468 source_repo = pull_request_at_ver.source_repo
469 source_repo = pull_request_at_ver.source_repo
469 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
470 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
470
471
471 target_repo = pull_request_at_ver.target_repo
472 target_repo = pull_request_at_ver.target_repo
472 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
473 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
473
474
474 if compare:
475 if compare:
475 # in compare switch the diff base to latest commit from prev version
476 # in compare switch the diff base to latest commit from prev version
476 target_ref_id = prev_pull_request_display_obj.revisions[0]
477 target_ref_id = prev_pull_request_display_obj.revisions[0]
477
478
478 # despite opening commits for bookmarks/branches/tags, we always
479 # despite opening commits for bookmarks/branches/tags, we always
479 # convert this to rev to prevent changes after bookmark or branch change
480 # convert this to rev to prevent changes after bookmark or branch change
480 c.source_ref_type = 'rev'
481 c.source_ref_type = 'rev'
481 c.source_ref = source_ref_id
482 c.source_ref = source_ref_id
482
483
483 c.target_ref_type = 'rev'
484 c.target_ref_type = 'rev'
484 c.target_ref = target_ref_id
485 c.target_ref = target_ref_id
485
486
486 c.source_repo = source_repo
487 c.source_repo = source_repo
487 c.target_repo = target_repo
488 c.target_repo = target_repo
488
489
489 c.commit_ranges = []
490 c.commit_ranges = []
490 source_commit = EmptyCommit()
491 source_commit = EmptyCommit()
491 target_commit = EmptyCommit()
492 target_commit = EmptyCommit()
492 c.missing_requirements = False
493 c.missing_requirements = False
493
494
494 source_scm = source_repo.scm_instance()
495 source_scm = source_repo.scm_instance()
495 target_scm = target_repo.scm_instance()
496 target_scm = target_repo.scm_instance()
496
497
497 shadow_scm = None
498 shadow_scm = None
498 try:
499 try:
499 shadow_scm = pull_request_latest.get_shadow_repo()
500 shadow_scm = pull_request_latest.get_shadow_repo()
500 except Exception:
501 except Exception:
501 log.debug('Failed to get shadow repo', exc_info=True)
502 log.debug('Failed to get shadow repo', exc_info=True)
502 # try first the existing source_repo, and then shadow
503 # try first the existing source_repo, and then shadow
503 # repo if we can obtain one
504 # repo if we can obtain one
504 commits_source_repo = source_scm
505 commits_source_repo = source_scm
505 if shadow_scm:
506 if shadow_scm:
506 commits_source_repo = shadow_scm
507 commits_source_repo = shadow_scm
507
508
508 c.commits_source_repo = commits_source_repo
509 c.commits_source_repo = commits_source_repo
509 c.ancestor = None # set it to None, to hide it from PR view
510 c.ancestor = None # set it to None, to hide it from PR view
510
511
511 # empty version means latest, so we keep this to prevent
512 # empty version means latest, so we keep this to prevent
512 # double caching
513 # double caching
513 version_normalized = version or 'latest'
514 version_normalized = version or 'latest'
514 from_version_normalized = from_version or 'latest'
515 from_version_normalized = from_version or 'latest'
515
516
516 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(target_repo)
517 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(target_repo)
517 cache_file_path = diff_cache_exist(
518 cache_file_path = diff_cache_exist(
518 cache_path, 'pull_request', pull_request_id, version_normalized,
519 cache_path, 'pull_request', pull_request_id, version_normalized,
519 from_version_normalized, source_ref_id, target_ref_id,
520 from_version_normalized, source_ref_id, target_ref_id,
520 hide_whitespace_changes, diff_context, c.fulldiff)
521 hide_whitespace_changes, diff_context, c.fulldiff)
521
522
522 caching_enabled = self._is_diff_cache_enabled(c.target_repo)
523 caching_enabled = self._is_diff_cache_enabled(c.target_repo)
523 force_recache = self.get_recache_flag()
524 force_recache = self.get_recache_flag()
524
525
525 cached_diff = None
526 cached_diff = None
526 if caching_enabled:
527 if caching_enabled:
527 cached_diff = load_cached_diff(cache_file_path)
528 cached_diff = load_cached_diff(cache_file_path)
528
529
529 has_proper_commit_cache = (
530 has_proper_commit_cache = (
530 cached_diff and cached_diff.get('commits')
531 cached_diff and cached_diff.get('commits')
531 and len(cached_diff.get('commits', [])) == 5
532 and len(cached_diff.get('commits', [])) == 5
532 and cached_diff.get('commits')[0]
533 and cached_diff.get('commits')[0]
533 and cached_diff.get('commits')[3])
534 and cached_diff.get('commits')[3])
534
535
535 if not force_recache and not c.range_diff_on and has_proper_commit_cache:
536 if not force_recache and not c.range_diff_on and has_proper_commit_cache:
536 diff_commit_cache = \
537 diff_commit_cache = \
537 (ancestor_commit, commit_cache, missing_requirements,
538 (ancestor_commit, commit_cache, missing_requirements,
538 source_commit, target_commit) = cached_diff['commits']
539 source_commit, target_commit) = cached_diff['commits']
539 else:
540 else:
541 # NOTE(marcink): we reach potentially unreachable errors when a PR has
542 # merge errors resulting in potentially hidden commits in the shadow repo.
543 maybe_unreachable = _merge_check.MERGE_CHECK in _merge_check.error_details \
544 and _merge_check.merge_response
545 maybe_unreachable = maybe_unreachable \
546 and _merge_check.merge_response.metadata.get('unresolved_files')
547 log.debug("Using unreachable commits due to MERGE_CHECK in merge simulation")
540 diff_commit_cache = \
548 diff_commit_cache = \
541 (ancestor_commit, commit_cache, missing_requirements,
549 (ancestor_commit, commit_cache, missing_requirements,
542 source_commit, target_commit) = self.get_commits(
550 source_commit, target_commit) = self.get_commits(
543 commits_source_repo,
551 commits_source_repo,
544 pull_request_at_ver,
552 pull_request_at_ver,
545 source_commit,
553 source_commit,
546 source_ref_id,
554 source_ref_id,
547 source_scm,
555 source_scm,
548 target_commit,
556 target_commit,
549 target_ref_id,
557 target_ref_id,
550 target_scm)
558 target_scm, maybe_unreachable=maybe_unreachable)
551
559
552 # register our commit range
560 # register our commit range
553 for comm in commit_cache.values():
561 for comm in commit_cache.values():
554 c.commit_ranges.append(comm)
562 c.commit_ranges.append(comm)
555
563
556 c.missing_requirements = missing_requirements
564 c.missing_requirements = missing_requirements
557 c.ancestor_commit = ancestor_commit
565 c.ancestor_commit = ancestor_commit
558 c.statuses = source_repo.statuses(
566 c.statuses = source_repo.statuses(
559 [x.raw_id for x in c.commit_ranges])
567 [x.raw_id for x in c.commit_ranges])
560
568
561 # auto collapse if we have more than limit
569 # auto collapse if we have more than limit
562 collapse_limit = diffs.DiffProcessor._collapse_commits_over
570 collapse_limit = diffs.DiffProcessor._collapse_commits_over
563 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
571 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
564 c.compare_mode = compare
572 c.compare_mode = compare
565
573
566 # diff_limit is the old behavior, will cut off the whole diff
574 # diff_limit is the old behavior, will cut off the whole diff
567 # if the limit is applied otherwise will just hide the
575 # if the limit is applied otherwise will just hide the
568 # big files from the front-end
576 # big files from the front-end
569 diff_limit = c.visual.cut_off_limit_diff
577 diff_limit = c.visual.cut_off_limit_diff
570 file_limit = c.visual.cut_off_limit_file
578 file_limit = c.visual.cut_off_limit_file
571
579
572 c.missing_commits = False
580 c.missing_commits = False
573 if (c.missing_requirements
581 if (c.missing_requirements
574 or isinstance(source_commit, EmptyCommit)
582 or isinstance(source_commit, EmptyCommit)
575 or source_commit == target_commit):
583 or source_commit == target_commit):
576
584
577 c.missing_commits = True
585 c.missing_commits = True
578 else:
586 else:
579 c.inline_comments = display_inline_comments
587 c.inline_comments = display_inline_comments
580
588
581 has_proper_diff_cache = cached_diff and cached_diff.get('commits')
589 has_proper_diff_cache = cached_diff and cached_diff.get('commits')
582 if not force_recache and has_proper_diff_cache:
590 if not force_recache and has_proper_diff_cache:
583 c.diffset = cached_diff['diff']
591 c.diffset = cached_diff['diff']
584 (ancestor_commit, commit_cache, missing_requirements,
592 (ancestor_commit, commit_cache, missing_requirements,
585 source_commit, target_commit) = cached_diff['commits']
593 source_commit, target_commit) = cached_diff['commits']
586 else:
594 else:
587 c.diffset = self._get_diffset(
595 c.diffset = self._get_diffset(
588 c.source_repo.repo_name, commits_source_repo,
596 c.source_repo.repo_name, commits_source_repo,
589 source_ref_id, target_ref_id,
597 source_ref_id, target_ref_id,
590 target_commit, source_commit,
598 target_commit, source_commit,
591 diff_limit, file_limit, c.fulldiff,
599 diff_limit, file_limit, c.fulldiff,
592 hide_whitespace_changes, diff_context)
600 hide_whitespace_changes, diff_context)
593
601
594 # save cached diff
602 # save cached diff
595 if caching_enabled:
603 if caching_enabled:
596 cache_diff(cache_file_path, c.diffset, diff_commit_cache)
604 cache_diff(cache_file_path, c.diffset, diff_commit_cache)
597
605
598 c.limited_diff = c.diffset.limited_diff
606 c.limited_diff = c.diffset.limited_diff
599
607
600 # calculate removed files that are bound to comments
608 # calculate removed files that are bound to comments
601 comment_deleted_files = [
609 comment_deleted_files = [
602 fname for fname in display_inline_comments
610 fname for fname in display_inline_comments
603 if fname not in c.diffset.file_stats]
611 if fname not in c.diffset.file_stats]
604
612
605 c.deleted_files_comments = collections.defaultdict(dict)
613 c.deleted_files_comments = collections.defaultdict(dict)
606 for fname, per_line_comments in display_inline_comments.items():
614 for fname, per_line_comments in display_inline_comments.items():
607 if fname in comment_deleted_files:
615 if fname in comment_deleted_files:
608 c.deleted_files_comments[fname]['stats'] = 0
616 c.deleted_files_comments[fname]['stats'] = 0
609 c.deleted_files_comments[fname]['comments'] = list()
617 c.deleted_files_comments[fname]['comments'] = list()
610 for lno, comments in per_line_comments.items():
618 for lno, comments in per_line_comments.items():
611 c.deleted_files_comments[fname]['comments'].extend(comments)
619 c.deleted_files_comments[fname]['comments'].extend(comments)
612
620
613 # maybe calculate the range diff
621 # maybe calculate the range diff
614 if c.range_diff_on:
622 if c.range_diff_on:
615 # TODO(marcink): set whitespace/context
623 # TODO(marcink): set whitespace/context
616 context_lcl = 3
624 context_lcl = 3
617 ign_whitespace_lcl = False
625 ign_whitespace_lcl = False
618
626
619 for commit in c.commit_ranges:
627 for commit in c.commit_ranges:
620 commit2 = commit
628 commit2 = commit
621 commit1 = commit.first_parent
629 commit1 = commit.first_parent
622
630
623 range_diff_cache_file_path = diff_cache_exist(
631 range_diff_cache_file_path = diff_cache_exist(
624 cache_path, 'diff', commit.raw_id,
632 cache_path, 'diff', commit.raw_id,
625 ign_whitespace_lcl, context_lcl, c.fulldiff)
633 ign_whitespace_lcl, context_lcl, c.fulldiff)
626
634
627 cached_diff = None
635 cached_diff = None
628 if caching_enabled:
636 if caching_enabled:
629 cached_diff = load_cached_diff(range_diff_cache_file_path)
637 cached_diff = load_cached_diff(range_diff_cache_file_path)
630
638
631 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
639 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
632 if not force_recache and has_proper_diff_cache:
640 if not force_recache and has_proper_diff_cache:
633 diffset = cached_diff['diff']
641 diffset = cached_diff['diff']
634 else:
642 else:
635 diffset = self._get_range_diffset(
643 diffset = self._get_range_diffset(
636 commits_source_repo, source_repo,
644 commits_source_repo, source_repo,
637 commit1, commit2, diff_limit, file_limit,
645 commit1, commit2, diff_limit, file_limit,
638 c.fulldiff, ign_whitespace_lcl, context_lcl
646 c.fulldiff, ign_whitespace_lcl, context_lcl
639 )
647 )
640
648
641 # save cached diff
649 # save cached diff
642 if caching_enabled:
650 if caching_enabled:
643 cache_diff(range_diff_cache_file_path, diffset, None)
651 cache_diff(range_diff_cache_file_path, diffset, None)
644
652
645 c.changes[commit.raw_id] = diffset
653 c.changes[commit.raw_id] = diffset
646
654
647 # this is a hack to properly display links, when creating PR, the
655 # this is a hack to properly display links, when creating PR, the
648 # compare view and others uses different notation, and
656 # compare view and others uses different notation, and
649 # compare_commits.mako renders links based on the target_repo.
657 # compare_commits.mako renders links based on the target_repo.
650 # We need to swap that here to generate it properly on the html side
658 # We need to swap that here to generate it properly on the html side
651 c.target_repo = c.source_repo
659 c.target_repo = c.source_repo
652
660
653 c.commit_statuses = ChangesetStatus.STATUSES
661 c.commit_statuses = ChangesetStatus.STATUSES
654
662
655 c.show_version_changes = not pr_closed
663 c.show_version_changes = not pr_closed
656 if c.show_version_changes:
664 if c.show_version_changes:
657 cur_obj = pull_request_at_ver
665 cur_obj = pull_request_at_ver
658 prev_obj = prev_pull_request_at_ver
666 prev_obj = prev_pull_request_at_ver
659
667
660 old_commit_ids = prev_obj.revisions
668 old_commit_ids = prev_obj.revisions
661 new_commit_ids = cur_obj.revisions
669 new_commit_ids = cur_obj.revisions
662 commit_changes = PullRequestModel()._calculate_commit_id_changes(
670 commit_changes = PullRequestModel()._calculate_commit_id_changes(
663 old_commit_ids, new_commit_ids)
671 old_commit_ids, new_commit_ids)
664 c.commit_changes_summary = commit_changes
672 c.commit_changes_summary = commit_changes
665
673
666 # calculate the diff for commits between versions
674 # calculate the diff for commits between versions
667 c.commit_changes = []
675 c.commit_changes = []
668 mark = lambda cs, fw: list(
676 mark = lambda cs, fw: list(
669 h.itertools.izip_longest([], cs, fillvalue=fw))
677 h.itertools.izip_longest([], cs, fillvalue=fw))
670 for c_type, raw_id in mark(commit_changes.added, 'a') \
678 for c_type, raw_id in mark(commit_changes.added, 'a') \
671 + mark(commit_changes.removed, 'r') \
679 + mark(commit_changes.removed, 'r') \
672 + mark(commit_changes.common, 'c'):
680 + mark(commit_changes.common, 'c'):
673
681
674 if raw_id in commit_cache:
682 if raw_id in commit_cache:
675 commit = commit_cache[raw_id]
683 commit = commit_cache[raw_id]
676 else:
684 else:
677 try:
685 try:
678 commit = commits_source_repo.get_commit(raw_id)
686 commit = commits_source_repo.get_commit(raw_id)
679 except CommitDoesNotExistError:
687 except CommitDoesNotExistError:
680 # in case we fail extracting still use "dummy" commit
688 # in case we fail extracting still use "dummy" commit
681 # for display in commit diff
689 # for display in commit diff
682 commit = h.AttributeDict(
690 commit = h.AttributeDict(
683 {'raw_id': raw_id,
691 {'raw_id': raw_id,
684 'message': 'EMPTY or MISSING COMMIT'})
692 'message': 'EMPTY or MISSING COMMIT'})
685 c.commit_changes.append([c_type, commit])
693 c.commit_changes.append([c_type, commit])
686
694
687 # current user review statuses for each version
695 # current user review statuses for each version
688 c.review_versions = {}
696 c.review_versions = {}
689 if self._rhodecode_user.user_id in allowed_reviewers:
697 if self._rhodecode_user.user_id in allowed_reviewers:
690 for co in general_comments:
698 for co in general_comments:
691 if co.author.user_id == self._rhodecode_user.user_id:
699 if co.author.user_id == self._rhodecode_user.user_id:
692 status = co.status_change
700 status = co.status_change
693 if status:
701 if status:
694 _ver_pr = status[0].comment.pull_request_version_id
702 _ver_pr = status[0].comment.pull_request_version_id
695 c.review_versions[_ver_pr] = status[0]
703 c.review_versions[_ver_pr] = status[0]
696
704
697 return self._get_template_context(c)
705 return self._get_template_context(c)
698
706
699 def get_commits(
707 def get_commits(
700 self, commits_source_repo, pull_request_at_ver, source_commit,
708 self, commits_source_repo, pull_request_at_ver, source_commit,
701 source_ref_id, source_scm, target_commit, target_ref_id, target_scm):
709 source_ref_id, source_scm, target_commit, target_ref_id, target_scm,
710 maybe_unreachable=False):
711
702 commit_cache = collections.OrderedDict()
712 commit_cache = collections.OrderedDict()
703 missing_requirements = False
713 missing_requirements = False
714
704 try:
715 try:
705 pre_load = ["author", "date", "message", "branch", "parents"]
716 pre_load = ["author", "date", "message", "branch", "parents"]
706 show_revs = pull_request_at_ver.revisions
717
707 for rev in show_revs:
718 pull_request_commits = pull_request_at_ver.revisions
708 comm = commits_source_repo.get_commit(
719 log.debug('Loading %s commits from %s',
709 commit_id=rev, pre_load=pre_load)
720 len(pull_request_commits), commits_source_repo)
721
722 for rev in pull_request_commits:
723 comm = commits_source_repo.get_commit(commit_id=rev, pre_load=pre_load,
724 maybe_unreachable=maybe_unreachable)
710 commit_cache[comm.raw_id] = comm
725 commit_cache[comm.raw_id] = comm
711
726
712 # Order here matters, we first need to get target, and then
727 # Order here matters, we first need to get target, and then
713 # the source
728 # the source
714 target_commit = commits_source_repo.get_commit(
729 target_commit = commits_source_repo.get_commit(
715 commit_id=safe_str(target_ref_id))
730 commit_id=safe_str(target_ref_id))
716
731
717 source_commit = commits_source_repo.get_commit(
732 source_commit = commits_source_repo.get_commit(
718 commit_id=safe_str(source_ref_id))
733 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
719 except CommitDoesNotExistError:
734 except CommitDoesNotExistError:
720 log.warning(
735 log.warning('Failed to get commit from `{}` repo'.format(
721 'Failed to get commit from `{}` repo'.format(
736 commits_source_repo), exc_info=True)
722 commits_source_repo), exc_info=True)
723 except RepositoryRequirementError:
737 except RepositoryRequirementError:
724 log.warning(
738 log.warning('Failed to get all required data from repo', exc_info=True)
725 'Failed to get all required data from repo', exc_info=True)
726 missing_requirements = True
739 missing_requirements = True
727 ancestor_commit = None
740 ancestor_commit = None
728 try:
741 try:
729 ancestor_id = source_scm.get_common_ancestor(
742 ancestor_id = source_scm.get_common_ancestor(
730 source_commit.raw_id, target_commit.raw_id, target_scm)
743 source_commit.raw_id, target_commit.raw_id, target_scm)
731 ancestor_commit = source_scm.get_commit(ancestor_id)
744 ancestor_commit = source_scm.get_commit(ancestor_id)
732 except Exception:
745 except Exception:
733 ancestor_commit = None
746 ancestor_commit = None
734 return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit
747 return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit
735
748
736 def assure_not_empty_repo(self):
749 def assure_not_empty_repo(self):
737 _ = self.request.translate
750 _ = self.request.translate
738
751
739 try:
752 try:
740 self.db_repo.scm_instance().get_commit()
753 self.db_repo.scm_instance().get_commit()
741 except EmptyRepositoryError:
754 except EmptyRepositoryError:
742 h.flash(h.literal(_('There are no commits yet')),
755 h.flash(h.literal(_('There are no commits yet')),
743 category='warning')
756 category='warning')
744 raise HTTPFound(
757 raise HTTPFound(
745 h.route_path('repo_summary', repo_name=self.db_repo.repo_name))
758 h.route_path('repo_summary', repo_name=self.db_repo.repo_name))
746
759
747 @LoginRequired()
760 @LoginRequired()
748 @NotAnonymous()
761 @NotAnonymous()
749 @HasRepoPermissionAnyDecorator(
762 @HasRepoPermissionAnyDecorator(
750 'repository.read', 'repository.write', 'repository.admin')
763 'repository.read', 'repository.write', 'repository.admin')
751 @view_config(
764 @view_config(
752 route_name='pullrequest_new', request_method='GET',
765 route_name='pullrequest_new', request_method='GET',
753 renderer='rhodecode:templates/pullrequests/pullrequest.mako')
766 renderer='rhodecode:templates/pullrequests/pullrequest.mako')
754 def pull_request_new(self):
767 def pull_request_new(self):
755 _ = self.request.translate
768 _ = self.request.translate
756 c = self.load_default_context()
769 c = self.load_default_context()
757
770
758 self.assure_not_empty_repo()
771 self.assure_not_empty_repo()
759 source_repo = self.db_repo
772 source_repo = self.db_repo
760
773
761 commit_id = self.request.GET.get('commit')
774 commit_id = self.request.GET.get('commit')
762 branch_ref = self.request.GET.get('branch')
775 branch_ref = self.request.GET.get('branch')
763 bookmark_ref = self.request.GET.get('bookmark')
776 bookmark_ref = self.request.GET.get('bookmark')
764
777
765 try:
778 try:
766 source_repo_data = PullRequestModel().generate_repo_data(
779 source_repo_data = PullRequestModel().generate_repo_data(
767 source_repo, commit_id=commit_id,
780 source_repo, commit_id=commit_id,
768 branch=branch_ref, bookmark=bookmark_ref,
781 branch=branch_ref, bookmark=bookmark_ref,
769 translator=self.request.translate)
782 translator=self.request.translate)
770 except CommitDoesNotExistError as e:
783 except CommitDoesNotExistError as e:
771 log.exception(e)
784 log.exception(e)
772 h.flash(_('Commit does not exist'), 'error')
785 h.flash(_('Commit does not exist'), 'error')
773 raise HTTPFound(
786 raise HTTPFound(
774 h.route_path('pullrequest_new', repo_name=source_repo.repo_name))
787 h.route_path('pullrequest_new', repo_name=source_repo.repo_name))
775
788
776 default_target_repo = source_repo
789 default_target_repo = source_repo
777
790
778 if source_repo.parent and c.has_origin_repo_read_perm:
791 if source_repo.parent and c.has_origin_repo_read_perm:
779 parent_vcs_obj = source_repo.parent.scm_instance()
792 parent_vcs_obj = source_repo.parent.scm_instance()
780 if parent_vcs_obj and not parent_vcs_obj.is_empty():
793 if parent_vcs_obj and not parent_vcs_obj.is_empty():
781 # change default if we have a parent repo
794 # change default if we have a parent repo
782 default_target_repo = source_repo.parent
795 default_target_repo = source_repo.parent
783
796
784 target_repo_data = PullRequestModel().generate_repo_data(
797 target_repo_data = PullRequestModel().generate_repo_data(
785 default_target_repo, translator=self.request.translate)
798 default_target_repo, translator=self.request.translate)
786
799
787 selected_source_ref = source_repo_data['refs']['selected_ref']
800 selected_source_ref = source_repo_data['refs']['selected_ref']
788 title_source_ref = ''
801 title_source_ref = ''
789 if selected_source_ref:
802 if selected_source_ref:
790 title_source_ref = selected_source_ref.split(':', 2)[1]
803 title_source_ref = selected_source_ref.split(':', 2)[1]
791 c.default_title = PullRequestModel().generate_pullrequest_title(
804 c.default_title = PullRequestModel().generate_pullrequest_title(
792 source=source_repo.repo_name,
805 source=source_repo.repo_name,
793 source_ref=title_source_ref,
806 source_ref=title_source_ref,
794 target=default_target_repo.repo_name
807 target=default_target_repo.repo_name
795 )
808 )
796
809
797 c.default_repo_data = {
810 c.default_repo_data = {
798 'source_repo_name': source_repo.repo_name,
811 'source_repo_name': source_repo.repo_name,
799 'source_refs_json': json.dumps(source_repo_data),
812 'source_refs_json': json.dumps(source_repo_data),
800 'target_repo_name': default_target_repo.repo_name,
813 'target_repo_name': default_target_repo.repo_name,
801 'target_refs_json': json.dumps(target_repo_data),
814 'target_refs_json': json.dumps(target_repo_data),
802 }
815 }
803 c.default_source_ref = selected_source_ref
816 c.default_source_ref = selected_source_ref
804
817
805 return self._get_template_context(c)
818 return self._get_template_context(c)
806
819
807 @LoginRequired()
820 @LoginRequired()
808 @NotAnonymous()
821 @NotAnonymous()
809 @HasRepoPermissionAnyDecorator(
822 @HasRepoPermissionAnyDecorator(
810 'repository.read', 'repository.write', 'repository.admin')
823 'repository.read', 'repository.write', 'repository.admin')
811 @view_config(
824 @view_config(
812 route_name='pullrequest_repo_refs', request_method='GET',
825 route_name='pullrequest_repo_refs', request_method='GET',
813 renderer='json_ext', xhr=True)
826 renderer='json_ext', xhr=True)
814 def pull_request_repo_refs(self):
827 def pull_request_repo_refs(self):
815 self.load_default_context()
828 self.load_default_context()
816 target_repo_name = self.request.matchdict['target_repo_name']
829 target_repo_name = self.request.matchdict['target_repo_name']
817 repo = Repository.get_by_repo_name(target_repo_name)
830 repo = Repository.get_by_repo_name(target_repo_name)
818 if not repo:
831 if not repo:
819 raise HTTPNotFound()
832 raise HTTPNotFound()
820
833
821 target_perm = HasRepoPermissionAny(
834 target_perm = HasRepoPermissionAny(
822 'repository.read', 'repository.write', 'repository.admin')(
835 'repository.read', 'repository.write', 'repository.admin')(
823 target_repo_name)
836 target_repo_name)
824 if not target_perm:
837 if not target_perm:
825 raise HTTPNotFound()
838 raise HTTPNotFound()
826
839
827 return PullRequestModel().generate_repo_data(
840 return PullRequestModel().generate_repo_data(
828 repo, translator=self.request.translate)
841 repo, translator=self.request.translate)
829
842
830 @LoginRequired()
843 @LoginRequired()
831 @NotAnonymous()
844 @NotAnonymous()
832 @HasRepoPermissionAnyDecorator(
845 @HasRepoPermissionAnyDecorator(
833 'repository.read', 'repository.write', 'repository.admin')
846 'repository.read', 'repository.write', 'repository.admin')
834 @view_config(
847 @view_config(
835 route_name='pullrequest_repo_targets', request_method='GET',
848 route_name='pullrequest_repo_targets', request_method='GET',
836 renderer='json_ext', xhr=True)
849 renderer='json_ext', xhr=True)
837 def pullrequest_repo_targets(self):
850 def pullrequest_repo_targets(self):
838 _ = self.request.translate
851 _ = self.request.translate
839 filter_query = self.request.GET.get('query')
852 filter_query = self.request.GET.get('query')
840
853
841 # get the parents
854 # get the parents
842 parent_target_repos = []
855 parent_target_repos = []
843 if self.db_repo.parent:
856 if self.db_repo.parent:
844 parents_query = Repository.query() \
857 parents_query = Repository.query() \
845 .order_by(func.length(Repository.repo_name)) \
858 .order_by(func.length(Repository.repo_name)) \
846 .filter(Repository.fork_id == self.db_repo.parent.repo_id)
859 .filter(Repository.fork_id == self.db_repo.parent.repo_id)
847
860
848 if filter_query:
861 if filter_query:
849 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
862 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
850 parents_query = parents_query.filter(
863 parents_query = parents_query.filter(
851 Repository.repo_name.ilike(ilike_expression))
864 Repository.repo_name.ilike(ilike_expression))
852 parents = parents_query.limit(20).all()
865 parents = parents_query.limit(20).all()
853
866
854 for parent in parents:
867 for parent in parents:
855 parent_vcs_obj = parent.scm_instance()
868 parent_vcs_obj = parent.scm_instance()
856 if parent_vcs_obj and not parent_vcs_obj.is_empty():
869 if parent_vcs_obj and not parent_vcs_obj.is_empty():
857 parent_target_repos.append(parent)
870 parent_target_repos.append(parent)
858
871
859 # get other forks, and repo itself
872 # get other forks, and repo itself
860 query = Repository.query() \
873 query = Repository.query() \
861 .order_by(func.length(Repository.repo_name)) \
874 .order_by(func.length(Repository.repo_name)) \
862 .filter(
875 .filter(
863 or_(Repository.repo_id == self.db_repo.repo_id, # repo itself
876 or_(Repository.repo_id == self.db_repo.repo_id, # repo itself
864 Repository.fork_id == self.db_repo.repo_id) # forks of this repo
877 Repository.fork_id == self.db_repo.repo_id) # forks of this repo
865 ) \
878 ) \
866 .filter(~Repository.repo_id.in_([x.repo_id for x in parent_target_repos]))
879 .filter(~Repository.repo_id.in_([x.repo_id for x in parent_target_repos]))
867
880
868 if filter_query:
881 if filter_query:
869 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
882 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
870 query = query.filter(Repository.repo_name.ilike(ilike_expression))
883 query = query.filter(Repository.repo_name.ilike(ilike_expression))
871
884
872 limit = max(20 - len(parent_target_repos), 5) # not less then 5
885 limit = max(20 - len(parent_target_repos), 5) # not less then 5
873 target_repos = query.limit(limit).all()
886 target_repos = query.limit(limit).all()
874
887
875 all_target_repos = target_repos + parent_target_repos
888 all_target_repos = target_repos + parent_target_repos
876
889
877 repos = []
890 repos = []
878 # This checks permissions to the repositories
891 # This checks permissions to the repositories
879 for obj in ScmModel().get_repos(all_target_repos):
892 for obj in ScmModel().get_repos(all_target_repos):
880 repos.append({
893 repos.append({
881 'id': obj['name'],
894 'id': obj['name'],
882 'text': obj['name'],
895 'text': obj['name'],
883 'type': 'repo',
896 'type': 'repo',
884 'repo_id': obj['dbrepo']['repo_id'],
897 'repo_id': obj['dbrepo']['repo_id'],
885 'repo_type': obj['dbrepo']['repo_type'],
898 'repo_type': obj['dbrepo']['repo_type'],
886 'private': obj['dbrepo']['private'],
899 'private': obj['dbrepo']['private'],
887
900
888 })
901 })
889
902
890 data = {
903 data = {
891 'more': False,
904 'more': False,
892 'results': [{
905 'results': [{
893 'text': _('Repositories'),
906 'text': _('Repositories'),
894 'children': repos
907 'children': repos
895 }] if repos else []
908 }] if repos else []
896 }
909 }
897 return data
910 return data
898
911
899 @LoginRequired()
912 @LoginRequired()
900 @NotAnonymous()
913 @NotAnonymous()
901 @HasRepoPermissionAnyDecorator(
914 @HasRepoPermissionAnyDecorator(
902 'repository.read', 'repository.write', 'repository.admin')
915 'repository.read', 'repository.write', 'repository.admin')
903 @CSRFRequired()
916 @CSRFRequired()
904 @view_config(
917 @view_config(
905 route_name='pullrequest_create', request_method='POST',
918 route_name='pullrequest_create', request_method='POST',
906 renderer=None)
919 renderer=None)
907 def pull_request_create(self):
920 def pull_request_create(self):
908 _ = self.request.translate
921 _ = self.request.translate
909 self.assure_not_empty_repo()
922 self.assure_not_empty_repo()
910 self.load_default_context()
923 self.load_default_context()
911
924
912 controls = peppercorn.parse(self.request.POST.items())
925 controls = peppercorn.parse(self.request.POST.items())
913
926
914 try:
927 try:
915 form = PullRequestForm(
928 form = PullRequestForm(
916 self.request.translate, self.db_repo.repo_id)()
929 self.request.translate, self.db_repo.repo_id)()
917 _form = form.to_python(controls)
930 _form = form.to_python(controls)
918 except formencode.Invalid as errors:
931 except formencode.Invalid as errors:
919 if errors.error_dict.get('revisions'):
932 if errors.error_dict.get('revisions'):
920 msg = 'Revisions: %s' % errors.error_dict['revisions']
933 msg = 'Revisions: %s' % errors.error_dict['revisions']
921 elif errors.error_dict.get('pullrequest_title'):
934 elif errors.error_dict.get('pullrequest_title'):
922 msg = errors.error_dict.get('pullrequest_title')
935 msg = errors.error_dict.get('pullrequest_title')
923 else:
936 else:
924 msg = _('Error creating pull request: {}').format(errors)
937 msg = _('Error creating pull request: {}').format(errors)
925 log.exception(msg)
938 log.exception(msg)
926 h.flash(msg, 'error')
939 h.flash(msg, 'error')
927
940
928 # would rather just go back to form ...
941 # would rather just go back to form ...
929 raise HTTPFound(
942 raise HTTPFound(
930 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
943 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
931
944
932 source_repo = _form['source_repo']
945 source_repo = _form['source_repo']
933 source_ref = _form['source_ref']
946 source_ref = _form['source_ref']
934 target_repo = _form['target_repo']
947 target_repo = _form['target_repo']
935 target_ref = _form['target_ref']
948 target_ref = _form['target_ref']
936 commit_ids = _form['revisions'][::-1]
949 commit_ids = _form['revisions'][::-1]
937
950
938 # find the ancestor for this pr
951 # find the ancestor for this pr
939 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
952 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
940 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
953 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
941
954
942 if not (source_db_repo or target_db_repo):
955 if not (source_db_repo or target_db_repo):
943 h.flash(_('source_repo or target repo not found'), category='error')
956 h.flash(_('source_repo or target repo not found'), category='error')
944 raise HTTPFound(
957 raise HTTPFound(
945 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
958 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
946
959
947 # re-check permissions again here
960 # re-check permissions again here
948 # source_repo we must have read permissions
961 # source_repo we must have read permissions
949
962
950 source_perm = HasRepoPermissionAny(
963 source_perm = HasRepoPermissionAny(
951 'repository.read', 'repository.write', 'repository.admin')(
964 'repository.read', 'repository.write', 'repository.admin')(
952 source_db_repo.repo_name)
965 source_db_repo.repo_name)
953 if not source_perm:
966 if not source_perm:
954 msg = _('Not Enough permissions to source repo `{}`.'.format(
967 msg = _('Not Enough permissions to source repo `{}`.'.format(
955 source_db_repo.repo_name))
968 source_db_repo.repo_name))
956 h.flash(msg, category='error')
969 h.flash(msg, category='error')
957 # copy the args back to redirect
970 # copy the args back to redirect
958 org_query = self.request.GET.mixed()
971 org_query = self.request.GET.mixed()
959 raise HTTPFound(
972 raise HTTPFound(
960 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
973 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
961 _query=org_query))
974 _query=org_query))
962
975
963 # target repo we must have read permissions, and also later on
976 # target repo we must have read permissions, and also later on
964 # we want to check branch permissions here
977 # we want to check branch permissions here
965 target_perm = HasRepoPermissionAny(
978 target_perm = HasRepoPermissionAny(
966 'repository.read', 'repository.write', 'repository.admin')(
979 'repository.read', 'repository.write', 'repository.admin')(
967 target_db_repo.repo_name)
980 target_db_repo.repo_name)
968 if not target_perm:
981 if not target_perm:
969 msg = _('Not Enough permissions to target repo `{}`.'.format(
982 msg = _('Not Enough permissions to target repo `{}`.'.format(
970 target_db_repo.repo_name))
983 target_db_repo.repo_name))
971 h.flash(msg, category='error')
984 h.flash(msg, category='error')
972 # copy the args back to redirect
985 # copy the args back to redirect
973 org_query = self.request.GET.mixed()
986 org_query = self.request.GET.mixed()
974 raise HTTPFound(
987 raise HTTPFound(
975 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
988 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
976 _query=org_query))
989 _query=org_query))
977
990
978 source_scm = source_db_repo.scm_instance()
991 source_scm = source_db_repo.scm_instance()
979 target_scm = target_db_repo.scm_instance()
992 target_scm = target_db_repo.scm_instance()
980
993
981 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
994 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
982 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
995 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
983
996
984 ancestor = source_scm.get_common_ancestor(
997 ancestor = source_scm.get_common_ancestor(
985 source_commit.raw_id, target_commit.raw_id, target_scm)
998 source_commit.raw_id, target_commit.raw_id, target_scm)
986
999
987 # recalculate target ref based on ancestor
1000 # recalculate target ref based on ancestor
988 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
1001 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
989 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
1002 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
990
1003
991 get_default_reviewers_data, validate_default_reviewers = \
1004 get_default_reviewers_data, validate_default_reviewers = \
992 PullRequestModel().get_reviewer_functions()
1005 PullRequestModel().get_reviewer_functions()
993
1006
994 # recalculate reviewers logic, to make sure we can validate this
1007 # recalculate reviewers logic, to make sure we can validate this
995 reviewer_rules = get_default_reviewers_data(
1008 reviewer_rules = get_default_reviewers_data(
996 self._rhodecode_db_user, source_db_repo,
1009 self._rhodecode_db_user, source_db_repo,
997 source_commit, target_db_repo, target_commit)
1010 source_commit, target_db_repo, target_commit)
998
1011
999 given_reviewers = _form['review_members']
1012 given_reviewers = _form['review_members']
1000 reviewers = validate_default_reviewers(
1013 reviewers = validate_default_reviewers(
1001 given_reviewers, reviewer_rules)
1014 given_reviewers, reviewer_rules)
1002
1015
1003 pullrequest_title = _form['pullrequest_title']
1016 pullrequest_title = _form['pullrequest_title']
1004 title_source_ref = source_ref.split(':', 2)[1]
1017 title_source_ref = source_ref.split(':', 2)[1]
1005 if not pullrequest_title:
1018 if not pullrequest_title:
1006 pullrequest_title = PullRequestModel().generate_pullrequest_title(
1019 pullrequest_title = PullRequestModel().generate_pullrequest_title(
1007 source=source_repo,
1020 source=source_repo,
1008 source_ref=title_source_ref,
1021 source_ref=title_source_ref,
1009 target=target_repo
1022 target=target_repo
1010 )
1023 )
1011
1024
1012 description = _form['pullrequest_desc']
1025 description = _form['pullrequest_desc']
1013 description_renderer = _form['description_renderer']
1026 description_renderer = _form['description_renderer']
1014
1027
1015 try:
1028 try:
1016 pull_request = PullRequestModel().create(
1029 pull_request = PullRequestModel().create(
1017 created_by=self._rhodecode_user.user_id,
1030 created_by=self._rhodecode_user.user_id,
1018 source_repo=source_repo,
1031 source_repo=source_repo,
1019 source_ref=source_ref,
1032 source_ref=source_ref,
1020 target_repo=target_repo,
1033 target_repo=target_repo,
1021 target_ref=target_ref,
1034 target_ref=target_ref,
1022 revisions=commit_ids,
1035 revisions=commit_ids,
1023 reviewers=reviewers,
1036 reviewers=reviewers,
1024 title=pullrequest_title,
1037 title=pullrequest_title,
1025 description=description,
1038 description=description,
1026 description_renderer=description_renderer,
1039 description_renderer=description_renderer,
1027 reviewer_data=reviewer_rules,
1040 reviewer_data=reviewer_rules,
1028 auth_user=self._rhodecode_user
1041 auth_user=self._rhodecode_user
1029 )
1042 )
1030 Session().commit()
1043 Session().commit()
1031
1044
1032 h.flash(_('Successfully opened new pull request'),
1045 h.flash(_('Successfully opened new pull request'),
1033 category='success')
1046 category='success')
1034 except Exception:
1047 except Exception:
1035 msg = _('Error occurred during creation of this pull request.')
1048 msg = _('Error occurred during creation of this pull request.')
1036 log.exception(msg)
1049 log.exception(msg)
1037 h.flash(msg, category='error')
1050 h.flash(msg, category='error')
1038
1051
1039 # copy the args back to redirect
1052 # copy the args back to redirect
1040 org_query = self.request.GET.mixed()
1053 org_query = self.request.GET.mixed()
1041 raise HTTPFound(
1054 raise HTTPFound(
1042 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1055 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1043 _query=org_query))
1056 _query=org_query))
1044
1057
1045 raise HTTPFound(
1058 raise HTTPFound(
1046 h.route_path('pullrequest_show', repo_name=target_repo,
1059 h.route_path('pullrequest_show', repo_name=target_repo,
1047 pull_request_id=pull_request.pull_request_id))
1060 pull_request_id=pull_request.pull_request_id))
1048
1061
1049 @LoginRequired()
1062 @LoginRequired()
1050 @NotAnonymous()
1063 @NotAnonymous()
1051 @HasRepoPermissionAnyDecorator(
1064 @HasRepoPermissionAnyDecorator(
1052 'repository.read', 'repository.write', 'repository.admin')
1065 'repository.read', 'repository.write', 'repository.admin')
1053 @CSRFRequired()
1066 @CSRFRequired()
1054 @view_config(
1067 @view_config(
1055 route_name='pullrequest_update', request_method='POST',
1068 route_name='pullrequest_update', request_method='POST',
1056 renderer='json_ext')
1069 renderer='json_ext')
1057 def pull_request_update(self):
1070 def pull_request_update(self):
1058 pull_request = PullRequest.get_or_404(
1071 pull_request = PullRequest.get_or_404(
1059 self.request.matchdict['pull_request_id'])
1072 self.request.matchdict['pull_request_id'])
1060 _ = self.request.translate
1073 _ = self.request.translate
1061
1074
1062 self.load_default_context()
1075 self.load_default_context()
1063 redirect_url = None
1076 redirect_url = None
1064
1077
1065 if pull_request.is_closed():
1078 if pull_request.is_closed():
1066 log.debug('update: forbidden because pull request is closed')
1079 log.debug('update: forbidden because pull request is closed')
1067 msg = _(u'Cannot update closed pull requests.')
1080 msg = _(u'Cannot update closed pull requests.')
1068 h.flash(msg, category='error')
1081 h.flash(msg, category='error')
1069 return {'response': True,
1082 return {'response': True,
1070 'redirect_url': redirect_url}
1083 'redirect_url': redirect_url}
1071
1084
1072 is_state_changing = pull_request.is_state_changing()
1085 is_state_changing = pull_request.is_state_changing()
1073
1086
1074 # only owner or admin can update it
1087 # only owner or admin can update it
1075 allowed_to_update = PullRequestModel().check_user_update(
1088 allowed_to_update = PullRequestModel().check_user_update(
1076 pull_request, self._rhodecode_user)
1089 pull_request, self._rhodecode_user)
1077 if allowed_to_update:
1090 if allowed_to_update:
1078 controls = peppercorn.parse(self.request.POST.items())
1091 controls = peppercorn.parse(self.request.POST.items())
1079 force_refresh = str2bool(self.request.POST.get('force_refresh'))
1092 force_refresh = str2bool(self.request.POST.get('force_refresh'))
1080
1093
1081 if 'review_members' in controls:
1094 if 'review_members' in controls:
1082 self._update_reviewers(
1095 self._update_reviewers(
1083 pull_request, controls['review_members'],
1096 pull_request, controls['review_members'],
1084 pull_request.reviewer_data)
1097 pull_request.reviewer_data)
1085 elif str2bool(self.request.POST.get('update_commits', 'false')):
1098 elif str2bool(self.request.POST.get('update_commits', 'false')):
1086 if is_state_changing:
1099 if is_state_changing:
1087 log.debug('commits update: forbidden because pull request is in state %s',
1100 log.debug('commits update: forbidden because pull request is in state %s',
1088 pull_request.pull_request_state)
1101 pull_request.pull_request_state)
1089 msg = _(u'Cannot update pull requests commits in state other than `{}`. '
1102 msg = _(u'Cannot update pull requests commits in state other than `{}`. '
1090 u'Current state is: `{}`').format(
1103 u'Current state is: `{}`').format(
1091 PullRequest.STATE_CREATED, pull_request.pull_request_state)
1104 PullRequest.STATE_CREATED, pull_request.pull_request_state)
1092 h.flash(msg, category='error')
1105 h.flash(msg, category='error')
1093 return {'response': True,
1106 return {'response': True,
1094 'redirect_url': redirect_url}
1107 'redirect_url': redirect_url}
1095
1108
1096 self._update_commits(pull_request)
1109 self._update_commits(pull_request)
1097 if force_refresh:
1110 if force_refresh:
1098 redirect_url = h.route_path(
1111 redirect_url = h.route_path(
1099 'pullrequest_show', repo_name=self.db_repo_name,
1112 'pullrequest_show', repo_name=self.db_repo_name,
1100 pull_request_id=pull_request.pull_request_id,
1113 pull_request_id=pull_request.pull_request_id,
1101 _query={"force_refresh": 1})
1114 _query={"force_refresh": 1})
1102 elif str2bool(self.request.POST.get('edit_pull_request', 'false')):
1115 elif str2bool(self.request.POST.get('edit_pull_request', 'false')):
1103 self._edit_pull_request(pull_request)
1116 self._edit_pull_request(pull_request)
1104 else:
1117 else:
1105 raise HTTPBadRequest()
1118 raise HTTPBadRequest()
1106
1119
1107 return {'response': True,
1120 return {'response': True,
1108 'redirect_url': redirect_url}
1121 'redirect_url': redirect_url}
1109 raise HTTPForbidden()
1122 raise HTTPForbidden()
1110
1123
1111 def _edit_pull_request(self, pull_request):
1124 def _edit_pull_request(self, pull_request):
1112 _ = self.request.translate
1125 _ = self.request.translate
1113
1126
1114 try:
1127 try:
1115 PullRequestModel().edit(
1128 PullRequestModel().edit(
1116 pull_request,
1129 pull_request,
1117 self.request.POST.get('title'),
1130 self.request.POST.get('title'),
1118 self.request.POST.get('description'),
1131 self.request.POST.get('description'),
1119 self.request.POST.get('description_renderer'),
1132 self.request.POST.get('description_renderer'),
1120 self._rhodecode_user)
1133 self._rhodecode_user)
1121 except ValueError:
1134 except ValueError:
1122 msg = _(u'Cannot update closed pull requests.')
1135 msg = _(u'Cannot update closed pull requests.')
1123 h.flash(msg, category='error')
1136 h.flash(msg, category='error')
1124 return
1137 return
1125 else:
1138 else:
1126 Session().commit()
1139 Session().commit()
1127
1140
1128 msg = _(u'Pull request title & description updated.')
1141 msg = _(u'Pull request title & description updated.')
1129 h.flash(msg, category='success')
1142 h.flash(msg, category='success')
1130 return
1143 return
1131
1144
1132 def _update_commits(self, pull_request):
1145 def _update_commits(self, pull_request):
1133 _ = self.request.translate
1146 _ = self.request.translate
1134
1147
1135 with pull_request.set_state(PullRequest.STATE_UPDATING):
1148 with pull_request.set_state(PullRequest.STATE_UPDATING):
1136 resp = PullRequestModel().update_commits(
1149 resp = PullRequestModel().update_commits(
1137 pull_request, self._rhodecode_db_user)
1150 pull_request, self._rhodecode_db_user)
1138
1151
1139 if resp.executed:
1152 if resp.executed:
1140
1153
1141 if resp.target_changed and resp.source_changed:
1154 if resp.target_changed and resp.source_changed:
1142 changed = 'target and source repositories'
1155 changed = 'target and source repositories'
1143 elif resp.target_changed and not resp.source_changed:
1156 elif resp.target_changed and not resp.source_changed:
1144 changed = 'target repository'
1157 changed = 'target repository'
1145 elif not resp.target_changed and resp.source_changed:
1158 elif not resp.target_changed and resp.source_changed:
1146 changed = 'source repository'
1159 changed = 'source repository'
1147 else:
1160 else:
1148 changed = 'nothing'
1161 changed = 'nothing'
1149
1162
1150 msg = _(u'Pull request updated to "{source_commit_id}" with '
1163 msg = _(u'Pull request updated to "{source_commit_id}" with '
1151 u'{count_added} added, {count_removed} removed commits. '
1164 u'{count_added} added, {count_removed} removed commits. '
1152 u'Source of changes: {change_source}')
1165 u'Source of changes: {change_source}')
1153 msg = msg.format(
1166 msg = msg.format(
1154 source_commit_id=pull_request.source_ref_parts.commit_id,
1167 source_commit_id=pull_request.source_ref_parts.commit_id,
1155 count_added=len(resp.changes.added),
1168 count_added=len(resp.changes.added),
1156 count_removed=len(resp.changes.removed),
1169 count_removed=len(resp.changes.removed),
1157 change_source=changed)
1170 change_source=changed)
1158 h.flash(msg, category='success')
1171 h.flash(msg, category='success')
1159
1172
1160 channel = '/repo${}$/pr/{}'.format(
1173 channel = '/repo${}$/pr/{}'.format(
1161 pull_request.target_repo.repo_name, pull_request.pull_request_id)
1174 pull_request.target_repo.repo_name, pull_request.pull_request_id)
1162 message = msg + (
1175 message = msg + (
1163 ' - <a onclick="window.location.reload()">'
1176 ' - <a onclick="window.location.reload()">'
1164 '<strong>{}</strong></a>'.format(_('Reload page')))
1177 '<strong>{}</strong></a>'.format(_('Reload page')))
1165 channelstream.post_message(
1178 channelstream.post_message(
1166 channel, message, self._rhodecode_user.username,
1179 channel, message, self._rhodecode_user.username,
1167 registry=self.request.registry)
1180 registry=self.request.registry)
1168 else:
1181 else:
1169 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
1182 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
1170 warning_reasons = [
1183 warning_reasons = [
1171 UpdateFailureReason.NO_CHANGE,
1184 UpdateFailureReason.NO_CHANGE,
1172 UpdateFailureReason.WRONG_REF_TYPE,
1185 UpdateFailureReason.WRONG_REF_TYPE,
1173 ]
1186 ]
1174 category = 'warning' if resp.reason in warning_reasons else 'error'
1187 category = 'warning' if resp.reason in warning_reasons else 'error'
1175 h.flash(msg, category=category)
1188 h.flash(msg, category=category)
1176
1189
1177 @LoginRequired()
1190 @LoginRequired()
1178 @NotAnonymous()
1191 @NotAnonymous()
1179 @HasRepoPermissionAnyDecorator(
1192 @HasRepoPermissionAnyDecorator(
1180 'repository.read', 'repository.write', 'repository.admin')
1193 'repository.read', 'repository.write', 'repository.admin')
1181 @CSRFRequired()
1194 @CSRFRequired()
1182 @view_config(
1195 @view_config(
1183 route_name='pullrequest_merge', request_method='POST',
1196 route_name='pullrequest_merge', request_method='POST',
1184 renderer='json_ext')
1197 renderer='json_ext')
1185 def pull_request_merge(self):
1198 def pull_request_merge(self):
1186 """
1199 """
1187 Merge will perform a server-side merge of the specified
1200 Merge will perform a server-side merge of the specified
1188 pull request, if the pull request is approved and mergeable.
1201 pull request, if the pull request is approved and mergeable.
1189 After successful merging, the pull request is automatically
1202 After successful merging, the pull request is automatically
1190 closed, with a relevant comment.
1203 closed, with a relevant comment.
1191 """
1204 """
1192 pull_request = PullRequest.get_or_404(
1205 pull_request = PullRequest.get_or_404(
1193 self.request.matchdict['pull_request_id'])
1206 self.request.matchdict['pull_request_id'])
1194 _ = self.request.translate
1207 _ = self.request.translate
1195
1208
1196 if pull_request.is_state_changing():
1209 if pull_request.is_state_changing():
1197 log.debug('show: forbidden because pull request is in state %s',
1210 log.debug('show: forbidden because pull request is in state %s',
1198 pull_request.pull_request_state)
1211 pull_request.pull_request_state)
1199 msg = _(u'Cannot merge pull requests in state other than `{}`. '
1212 msg = _(u'Cannot merge pull requests in state other than `{}`. '
1200 u'Current state is: `{}`').format(PullRequest.STATE_CREATED,
1213 u'Current state is: `{}`').format(PullRequest.STATE_CREATED,
1201 pull_request.pull_request_state)
1214 pull_request.pull_request_state)
1202 h.flash(msg, category='error')
1215 h.flash(msg, category='error')
1203 raise HTTPFound(
1216 raise HTTPFound(
1204 h.route_path('pullrequest_show',
1217 h.route_path('pullrequest_show',
1205 repo_name=pull_request.target_repo.repo_name,
1218 repo_name=pull_request.target_repo.repo_name,
1206 pull_request_id=pull_request.pull_request_id))
1219 pull_request_id=pull_request.pull_request_id))
1207
1220
1208 self.load_default_context()
1221 self.load_default_context()
1209
1222
1210 with pull_request.set_state(PullRequest.STATE_UPDATING):
1223 with pull_request.set_state(PullRequest.STATE_UPDATING):
1211 check = MergeCheck.validate(
1224 check = MergeCheck.validate(
1212 pull_request, auth_user=self._rhodecode_user,
1225 pull_request, auth_user=self._rhodecode_user,
1213 translator=self.request.translate)
1226 translator=self.request.translate)
1214 merge_possible = not check.failed
1227 merge_possible = not check.failed
1215
1228
1216 for err_type, error_msg in check.errors:
1229 for err_type, error_msg in check.errors:
1217 h.flash(error_msg, category=err_type)
1230 h.flash(error_msg, category=err_type)
1218
1231
1219 if merge_possible:
1232 if merge_possible:
1220 log.debug("Pre-conditions checked, trying to merge.")
1233 log.debug("Pre-conditions checked, trying to merge.")
1221 extras = vcs_operation_context(
1234 extras = vcs_operation_context(
1222 self.request.environ, repo_name=pull_request.target_repo.repo_name,
1235 self.request.environ, repo_name=pull_request.target_repo.repo_name,
1223 username=self._rhodecode_db_user.username, action='push',
1236 username=self._rhodecode_db_user.username, action='push',
1224 scm=pull_request.target_repo.repo_type)
1237 scm=pull_request.target_repo.repo_type)
1225 with pull_request.set_state(PullRequest.STATE_UPDATING):
1238 with pull_request.set_state(PullRequest.STATE_UPDATING):
1226 self._merge_pull_request(
1239 self._merge_pull_request(
1227 pull_request, self._rhodecode_db_user, extras)
1240 pull_request, self._rhodecode_db_user, extras)
1228 else:
1241 else:
1229 log.debug("Pre-conditions failed, NOT merging.")
1242 log.debug("Pre-conditions failed, NOT merging.")
1230
1243
1231 raise HTTPFound(
1244 raise HTTPFound(
1232 h.route_path('pullrequest_show',
1245 h.route_path('pullrequest_show',
1233 repo_name=pull_request.target_repo.repo_name,
1246 repo_name=pull_request.target_repo.repo_name,
1234 pull_request_id=pull_request.pull_request_id))
1247 pull_request_id=pull_request.pull_request_id))
1235
1248
1236 def _merge_pull_request(self, pull_request, user, extras):
1249 def _merge_pull_request(self, pull_request, user, extras):
1237 _ = self.request.translate
1250 _ = self.request.translate
1238 merge_resp = PullRequestModel().merge_repo(pull_request, user, extras=extras)
1251 merge_resp = PullRequestModel().merge_repo(pull_request, user, extras=extras)
1239
1252
1240 if merge_resp.executed:
1253 if merge_resp.executed:
1241 log.debug("The merge was successful, closing the pull request.")
1254 log.debug("The merge was successful, closing the pull request.")
1242 PullRequestModel().close_pull_request(
1255 PullRequestModel().close_pull_request(
1243 pull_request.pull_request_id, user)
1256 pull_request.pull_request_id, user)
1244 Session().commit()
1257 Session().commit()
1245 msg = _('Pull request was successfully merged and closed.')
1258 msg = _('Pull request was successfully merged and closed.')
1246 h.flash(msg, category='success')
1259 h.flash(msg, category='success')
1247 else:
1260 else:
1248 log.debug(
1261 log.debug(
1249 "The merge was not successful. Merge response: %s", merge_resp)
1262 "The merge was not successful. Merge response: %s", merge_resp)
1250 msg = merge_resp.merge_status_message
1263 msg = merge_resp.merge_status_message
1251 h.flash(msg, category='error')
1264 h.flash(msg, category='error')
1252
1265
1253 def _update_reviewers(self, pull_request, review_members, reviewer_rules):
1266 def _update_reviewers(self, pull_request, review_members, reviewer_rules):
1254 _ = self.request.translate
1267 _ = self.request.translate
1255
1268
1256 get_default_reviewers_data, validate_default_reviewers = \
1269 get_default_reviewers_data, validate_default_reviewers = \
1257 PullRequestModel().get_reviewer_functions()
1270 PullRequestModel().get_reviewer_functions()
1258
1271
1259 try:
1272 try:
1260 reviewers = validate_default_reviewers(review_members, reviewer_rules)
1273 reviewers = validate_default_reviewers(review_members, reviewer_rules)
1261 except ValueError as e:
1274 except ValueError as e:
1262 log.error('Reviewers Validation: {}'.format(e))
1275 log.error('Reviewers Validation: {}'.format(e))
1263 h.flash(e, category='error')
1276 h.flash(e, category='error')
1264 return
1277 return
1265
1278
1266 old_calculated_status = pull_request.calculated_review_status()
1279 old_calculated_status = pull_request.calculated_review_status()
1267 PullRequestModel().update_reviewers(
1280 PullRequestModel().update_reviewers(
1268 pull_request, reviewers, self._rhodecode_user)
1281 pull_request, reviewers, self._rhodecode_user)
1269 h.flash(_('Pull request reviewers updated.'), category='success')
1282 h.flash(_('Pull request reviewers updated.'), category='success')
1270 Session().commit()
1283 Session().commit()
1271
1284
1272 # trigger status changed if change in reviewers changes the status
1285 # trigger status changed if change in reviewers changes the status
1273 calculated_status = pull_request.calculated_review_status()
1286 calculated_status = pull_request.calculated_review_status()
1274 if old_calculated_status != calculated_status:
1287 if old_calculated_status != calculated_status:
1275 PullRequestModel().trigger_pull_request_hook(
1288 PullRequestModel().trigger_pull_request_hook(
1276 pull_request, self._rhodecode_user, 'review_status_change',
1289 pull_request, self._rhodecode_user, 'review_status_change',
1277 data={'status': calculated_status})
1290 data={'status': calculated_status})
1278
1291
1279 @LoginRequired()
1292 @LoginRequired()
1280 @NotAnonymous()
1293 @NotAnonymous()
1281 @HasRepoPermissionAnyDecorator(
1294 @HasRepoPermissionAnyDecorator(
1282 'repository.read', 'repository.write', 'repository.admin')
1295 'repository.read', 'repository.write', 'repository.admin')
1283 @CSRFRequired()
1296 @CSRFRequired()
1284 @view_config(
1297 @view_config(
1285 route_name='pullrequest_delete', request_method='POST',
1298 route_name='pullrequest_delete', request_method='POST',
1286 renderer='json_ext')
1299 renderer='json_ext')
1287 def pull_request_delete(self):
1300 def pull_request_delete(self):
1288 _ = self.request.translate
1301 _ = self.request.translate
1289
1302
1290 pull_request = PullRequest.get_or_404(
1303 pull_request = PullRequest.get_or_404(
1291 self.request.matchdict['pull_request_id'])
1304 self.request.matchdict['pull_request_id'])
1292 self.load_default_context()
1305 self.load_default_context()
1293
1306
1294 pr_closed = pull_request.is_closed()
1307 pr_closed = pull_request.is_closed()
1295 allowed_to_delete = PullRequestModel().check_user_delete(
1308 allowed_to_delete = PullRequestModel().check_user_delete(
1296 pull_request, self._rhodecode_user) and not pr_closed
1309 pull_request, self._rhodecode_user) and not pr_closed
1297
1310
1298 # only owner can delete it !
1311 # only owner can delete it !
1299 if allowed_to_delete:
1312 if allowed_to_delete:
1300 PullRequestModel().delete(pull_request, self._rhodecode_user)
1313 PullRequestModel().delete(pull_request, self._rhodecode_user)
1301 Session().commit()
1314 Session().commit()
1302 h.flash(_('Successfully deleted pull request'),
1315 h.flash(_('Successfully deleted pull request'),
1303 category='success')
1316 category='success')
1304 raise HTTPFound(h.route_path('pullrequest_show_all',
1317 raise HTTPFound(h.route_path('pullrequest_show_all',
1305 repo_name=self.db_repo_name))
1318 repo_name=self.db_repo_name))
1306
1319
1307 log.warning('user %s tried to delete pull request without access',
1320 log.warning('user %s tried to delete pull request without access',
1308 self._rhodecode_user)
1321 self._rhodecode_user)
1309 raise HTTPNotFound()
1322 raise HTTPNotFound()
1310
1323
1311 @LoginRequired()
1324 @LoginRequired()
1312 @NotAnonymous()
1325 @NotAnonymous()
1313 @HasRepoPermissionAnyDecorator(
1326 @HasRepoPermissionAnyDecorator(
1314 'repository.read', 'repository.write', 'repository.admin')
1327 'repository.read', 'repository.write', 'repository.admin')
1315 @CSRFRequired()
1328 @CSRFRequired()
1316 @view_config(
1329 @view_config(
1317 route_name='pullrequest_comment_create', request_method='POST',
1330 route_name='pullrequest_comment_create', request_method='POST',
1318 renderer='json_ext')
1331 renderer='json_ext')
1319 def pull_request_comment_create(self):
1332 def pull_request_comment_create(self):
1320 _ = self.request.translate
1333 _ = self.request.translate
1321
1334
1322 pull_request = PullRequest.get_or_404(
1335 pull_request = PullRequest.get_or_404(
1323 self.request.matchdict['pull_request_id'])
1336 self.request.matchdict['pull_request_id'])
1324 pull_request_id = pull_request.pull_request_id
1337 pull_request_id = pull_request.pull_request_id
1325
1338
1326 if pull_request.is_closed():
1339 if pull_request.is_closed():
1327 log.debug('comment: forbidden because pull request is closed')
1340 log.debug('comment: forbidden because pull request is closed')
1328 raise HTTPForbidden()
1341 raise HTTPForbidden()
1329
1342
1330 allowed_to_comment = PullRequestModel().check_user_comment(
1343 allowed_to_comment = PullRequestModel().check_user_comment(
1331 pull_request, self._rhodecode_user)
1344 pull_request, self._rhodecode_user)
1332 if not allowed_to_comment:
1345 if not allowed_to_comment:
1333 log.debug(
1346 log.debug(
1334 'comment: forbidden because pull request is from forbidden repo')
1347 'comment: forbidden because pull request is from forbidden repo')
1335 raise HTTPForbidden()
1348 raise HTTPForbidden()
1336
1349
1337 c = self.load_default_context()
1350 c = self.load_default_context()
1338
1351
1339 status = self.request.POST.get('changeset_status', None)
1352 status = self.request.POST.get('changeset_status', None)
1340 text = self.request.POST.get('text')
1353 text = self.request.POST.get('text')
1341 comment_type = self.request.POST.get('comment_type')
1354 comment_type = self.request.POST.get('comment_type')
1342 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
1355 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
1343 close_pull_request = self.request.POST.get('close_pull_request')
1356 close_pull_request = self.request.POST.get('close_pull_request')
1344
1357
1345 # the logic here should work like following, if we submit close
1358 # the logic here should work like following, if we submit close
1346 # pr comment, use `close_pull_request_with_comment` function
1359 # pr comment, use `close_pull_request_with_comment` function
1347 # else handle regular comment logic
1360 # else handle regular comment logic
1348
1361
1349 if close_pull_request:
1362 if close_pull_request:
1350 # only owner or admin or person with write permissions
1363 # only owner or admin or person with write permissions
1351 allowed_to_close = PullRequestModel().check_user_update(
1364 allowed_to_close = PullRequestModel().check_user_update(
1352 pull_request, self._rhodecode_user)
1365 pull_request, self._rhodecode_user)
1353 if not allowed_to_close:
1366 if not allowed_to_close:
1354 log.debug('comment: forbidden because not allowed to close '
1367 log.debug('comment: forbidden because not allowed to close '
1355 'pull request %s', pull_request_id)
1368 'pull request %s', pull_request_id)
1356 raise HTTPForbidden()
1369 raise HTTPForbidden()
1357
1370
1358 # This also triggers `review_status_change`
1371 # This also triggers `review_status_change`
1359 comment, status = PullRequestModel().close_pull_request_with_comment(
1372 comment, status = PullRequestModel().close_pull_request_with_comment(
1360 pull_request, self._rhodecode_user, self.db_repo, message=text,
1373 pull_request, self._rhodecode_user, self.db_repo, message=text,
1361 auth_user=self._rhodecode_user)
1374 auth_user=self._rhodecode_user)
1362 Session().flush()
1375 Session().flush()
1363
1376
1364 PullRequestModel().trigger_pull_request_hook(
1377 PullRequestModel().trigger_pull_request_hook(
1365 pull_request, self._rhodecode_user, 'comment',
1378 pull_request, self._rhodecode_user, 'comment',
1366 data={'comment': comment})
1379 data={'comment': comment})
1367
1380
1368 else:
1381 else:
1369 # regular comment case, could be inline, or one with status.
1382 # regular comment case, could be inline, or one with status.
1370 # for that one we check also permissions
1383 # for that one we check also permissions
1371
1384
1372 allowed_to_change_status = PullRequestModel().check_user_change_status(
1385 allowed_to_change_status = PullRequestModel().check_user_change_status(
1373 pull_request, self._rhodecode_user)
1386 pull_request, self._rhodecode_user)
1374
1387
1375 if status and allowed_to_change_status:
1388 if status and allowed_to_change_status:
1376 message = (_('Status change %(transition_icon)s %(status)s')
1389 message = (_('Status change %(transition_icon)s %(status)s')
1377 % {'transition_icon': '>',
1390 % {'transition_icon': '>',
1378 'status': ChangesetStatus.get_status_lbl(status)})
1391 'status': ChangesetStatus.get_status_lbl(status)})
1379 text = text or message
1392 text = text or message
1380
1393
1381 comment = CommentsModel().create(
1394 comment = CommentsModel().create(
1382 text=text,
1395 text=text,
1383 repo=self.db_repo.repo_id,
1396 repo=self.db_repo.repo_id,
1384 user=self._rhodecode_user.user_id,
1397 user=self._rhodecode_user.user_id,
1385 pull_request=pull_request,
1398 pull_request=pull_request,
1386 f_path=self.request.POST.get('f_path'),
1399 f_path=self.request.POST.get('f_path'),
1387 line_no=self.request.POST.get('line'),
1400 line_no=self.request.POST.get('line'),
1388 status_change=(ChangesetStatus.get_status_lbl(status)
1401 status_change=(ChangesetStatus.get_status_lbl(status)
1389 if status and allowed_to_change_status else None),
1402 if status and allowed_to_change_status else None),
1390 status_change_type=(status
1403 status_change_type=(status
1391 if status and allowed_to_change_status else None),
1404 if status and allowed_to_change_status else None),
1392 comment_type=comment_type,
1405 comment_type=comment_type,
1393 resolves_comment_id=resolves_comment_id,
1406 resolves_comment_id=resolves_comment_id,
1394 auth_user=self._rhodecode_user
1407 auth_user=self._rhodecode_user
1395 )
1408 )
1396
1409
1397 if allowed_to_change_status:
1410 if allowed_to_change_status:
1398 # calculate old status before we change it
1411 # calculate old status before we change it
1399 old_calculated_status = pull_request.calculated_review_status()
1412 old_calculated_status = pull_request.calculated_review_status()
1400
1413
1401 # get status if set !
1414 # get status if set !
1402 if status:
1415 if status:
1403 ChangesetStatusModel().set_status(
1416 ChangesetStatusModel().set_status(
1404 self.db_repo.repo_id,
1417 self.db_repo.repo_id,
1405 status,
1418 status,
1406 self._rhodecode_user.user_id,
1419 self._rhodecode_user.user_id,
1407 comment,
1420 comment,
1408 pull_request=pull_request
1421 pull_request=pull_request
1409 )
1422 )
1410
1423
1411 Session().flush()
1424 Session().flush()
1412 # this is somehow required to get access to some relationship
1425 # this is somehow required to get access to some relationship
1413 # loaded on comment
1426 # loaded on comment
1414 Session().refresh(comment)
1427 Session().refresh(comment)
1415
1428
1416 PullRequestModel().trigger_pull_request_hook(
1429 PullRequestModel().trigger_pull_request_hook(
1417 pull_request, self._rhodecode_user, 'comment',
1430 pull_request, self._rhodecode_user, 'comment',
1418 data={'comment': comment})
1431 data={'comment': comment})
1419
1432
1420 # we now calculate the status of pull request, and based on that
1433 # we now calculate the status of pull request, and based on that
1421 # calculation we set the commits status
1434 # calculation we set the commits status
1422 calculated_status = pull_request.calculated_review_status()
1435 calculated_status = pull_request.calculated_review_status()
1423 if old_calculated_status != calculated_status:
1436 if old_calculated_status != calculated_status:
1424 PullRequestModel().trigger_pull_request_hook(
1437 PullRequestModel().trigger_pull_request_hook(
1425 pull_request, self._rhodecode_user, 'review_status_change',
1438 pull_request, self._rhodecode_user, 'review_status_change',
1426 data={'status': calculated_status})
1439 data={'status': calculated_status})
1427
1440
1428 Session().commit()
1441 Session().commit()
1429
1442
1430 data = {
1443 data = {
1431 'target_id': h.safeid(h.safe_unicode(
1444 'target_id': h.safeid(h.safe_unicode(
1432 self.request.POST.get('f_path'))),
1445 self.request.POST.get('f_path'))),
1433 }
1446 }
1434 if comment:
1447 if comment:
1435 c.co = comment
1448 c.co = comment
1436 rendered_comment = render(
1449 rendered_comment = render(
1437 'rhodecode:templates/changeset/changeset_comment_block.mako',
1450 'rhodecode:templates/changeset/changeset_comment_block.mako',
1438 self._get_template_context(c), self.request)
1451 self._get_template_context(c), self.request)
1439
1452
1440 data.update(comment.get_dict())
1453 data.update(comment.get_dict())
1441 data.update({'rendered_text': rendered_comment})
1454 data.update({'rendered_text': rendered_comment})
1442
1455
1443 return data
1456 return data
1444
1457
1445 @LoginRequired()
1458 @LoginRequired()
1446 @NotAnonymous()
1459 @NotAnonymous()
1447 @HasRepoPermissionAnyDecorator(
1460 @HasRepoPermissionAnyDecorator(
1448 'repository.read', 'repository.write', 'repository.admin')
1461 'repository.read', 'repository.write', 'repository.admin')
1449 @CSRFRequired()
1462 @CSRFRequired()
1450 @view_config(
1463 @view_config(
1451 route_name='pullrequest_comment_delete', request_method='POST',
1464 route_name='pullrequest_comment_delete', request_method='POST',
1452 renderer='json_ext')
1465 renderer='json_ext')
1453 def pull_request_comment_delete(self):
1466 def pull_request_comment_delete(self):
1454 pull_request = PullRequest.get_or_404(
1467 pull_request = PullRequest.get_or_404(
1455 self.request.matchdict['pull_request_id'])
1468 self.request.matchdict['pull_request_id'])
1456
1469
1457 comment = ChangesetComment.get_or_404(
1470 comment = ChangesetComment.get_or_404(
1458 self.request.matchdict['comment_id'])
1471 self.request.matchdict['comment_id'])
1459 comment_id = comment.comment_id
1472 comment_id = comment.comment_id
1460
1473
1461 if pull_request.is_closed():
1474 if pull_request.is_closed():
1462 log.debug('comment: forbidden because pull request is closed')
1475 log.debug('comment: forbidden because pull request is closed')
1463 raise HTTPForbidden()
1476 raise HTTPForbidden()
1464
1477
1465 if not comment:
1478 if not comment:
1466 log.debug('Comment with id:%s not found, skipping', comment_id)
1479 log.debug('Comment with id:%s not found, skipping', comment_id)
1467 # comment already deleted in another call probably
1480 # comment already deleted in another call probably
1468 return True
1481 return True
1469
1482
1470 if comment.pull_request.is_closed():
1483 if comment.pull_request.is_closed():
1471 # don't allow deleting comments on closed pull request
1484 # don't allow deleting comments on closed pull request
1472 raise HTTPForbidden()
1485 raise HTTPForbidden()
1473
1486
1474 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1487 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1475 super_admin = h.HasPermissionAny('hg.admin')()
1488 super_admin = h.HasPermissionAny('hg.admin')()
1476 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1489 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1477 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1490 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1478 comment_repo_admin = is_repo_admin and is_repo_comment
1491 comment_repo_admin = is_repo_admin and is_repo_comment
1479
1492
1480 if super_admin or comment_owner or comment_repo_admin:
1493 if super_admin or comment_owner or comment_repo_admin:
1481 old_calculated_status = comment.pull_request.calculated_review_status()
1494 old_calculated_status = comment.pull_request.calculated_review_status()
1482 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
1495 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
1483 Session().commit()
1496 Session().commit()
1484 calculated_status = comment.pull_request.calculated_review_status()
1497 calculated_status = comment.pull_request.calculated_review_status()
1485 if old_calculated_status != calculated_status:
1498 if old_calculated_status != calculated_status:
1486 PullRequestModel().trigger_pull_request_hook(
1499 PullRequestModel().trigger_pull_request_hook(
1487 comment.pull_request, self._rhodecode_user, 'review_status_change',
1500 comment.pull_request, self._rhodecode_user, 'review_status_change',
1488 data={'status': calculated_status})
1501 data={'status': calculated_status})
1489 return True
1502 return True
1490 else:
1503 else:
1491 log.warning('No permissions for user %s to delete comment_id: %s',
1504 log.warning('No permissions for user %s to delete comment_id: %s',
1492 self._rhodecode_db_user, comment_id)
1505 self._rhodecode_db_user, comment_id)
1493 raise HTTPNotFound()
1506 raise HTTPNotFound()
@@ -1,1100 +1,1104 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2019 RhodeCode GmbH
3 # Copyright (C) 2011-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 Some simple helper functions
23 Some simple helper functions
24 """
24 """
25
25
26 import collections
26 import collections
27 import datetime
27 import datetime
28 import dateutil.relativedelta
28 import dateutil.relativedelta
29 import hashlib
29 import hashlib
30 import logging
30 import logging
31 import re
31 import re
32 import sys
32 import sys
33 import time
33 import time
34 import urllib
34 import urllib
35 import urlobject
35 import urlobject
36 import uuid
36 import uuid
37 import getpass
37 import getpass
38 from functools import update_wrapper, partial
38 from functools import update_wrapper, partial
39
39
40 import pygments.lexers
40 import pygments.lexers
41 import sqlalchemy
41 import sqlalchemy
42 import sqlalchemy.engine.url
42 import sqlalchemy.engine.url
43 import sqlalchemy.exc
43 import sqlalchemy.exc
44 import sqlalchemy.sql
44 import sqlalchemy.sql
45 import webob
45 import webob
46 import pyramid.threadlocal
46 import pyramid.threadlocal
47 from pyramid import compat
47 from pyramid import compat
48 from pyramid.settings import asbool
48 from pyramid.settings import asbool
49
49
50 import rhodecode
50 import rhodecode
51 from rhodecode.translation import _, _pluralize
51 from rhodecode.translation import _, _pluralize
52
52
53
53
54 def md5(s):
54 def md5(s):
55 return hashlib.md5(s).hexdigest()
55 return hashlib.md5(s).hexdigest()
56
56
57
57
58 def md5_safe(s):
58 def md5_safe(s):
59 return md5(safe_str(s))
59 return md5(safe_str(s))
60
60
61
61
62 def sha1(s):
62 def sha1(s):
63 return hashlib.sha1(s).hexdigest()
63 return hashlib.sha1(s).hexdigest()
64
64
65
65
66 def sha1_safe(s):
66 def sha1_safe(s):
67 return sha1(safe_str(s))
67 return sha1(safe_str(s))
68
68
69
69
70 def __get_lem(extra_mapping=None):
70 def __get_lem(extra_mapping=None):
71 """
71 """
72 Get language extension map based on what's inside pygments lexers
72 Get language extension map based on what's inside pygments lexers
73 """
73 """
74 d = collections.defaultdict(lambda: [])
74 d = collections.defaultdict(lambda: [])
75
75
76 def __clean(s):
76 def __clean(s):
77 s = s.lstrip('*')
77 s = s.lstrip('*')
78 s = s.lstrip('.')
78 s = s.lstrip('.')
79
79
80 if s.find('[') != -1:
80 if s.find('[') != -1:
81 exts = []
81 exts = []
82 start, stop = s.find('['), s.find(']')
82 start, stop = s.find('['), s.find(']')
83
83
84 for suffix in s[start + 1:stop]:
84 for suffix in s[start + 1:stop]:
85 exts.append(s[:s.find('[')] + suffix)
85 exts.append(s[:s.find('[')] + suffix)
86 return [e.lower() for e in exts]
86 return [e.lower() for e in exts]
87 else:
87 else:
88 return [s.lower()]
88 return [s.lower()]
89
89
90 for lx, t in sorted(pygments.lexers.LEXERS.items()):
90 for lx, t in sorted(pygments.lexers.LEXERS.items()):
91 m = map(__clean, t[-2])
91 m = map(__clean, t[-2])
92 if m:
92 if m:
93 m = reduce(lambda x, y: x + y, m)
93 m = reduce(lambda x, y: x + y, m)
94 for ext in m:
94 for ext in m:
95 desc = lx.replace('Lexer', '')
95 desc = lx.replace('Lexer', '')
96 d[ext].append(desc)
96 d[ext].append(desc)
97
97
98 data = dict(d)
98 data = dict(d)
99
99
100 extra_mapping = extra_mapping or {}
100 extra_mapping = extra_mapping or {}
101 if extra_mapping:
101 if extra_mapping:
102 for k, v in extra_mapping.items():
102 for k, v in extra_mapping.items():
103 if k not in data:
103 if k not in data:
104 # register new mapping2lexer
104 # register new mapping2lexer
105 data[k] = [v]
105 data[k] = [v]
106
106
107 return data
107 return data
108
108
109
109
110 def str2bool(_str):
110 def str2bool(_str):
111 """
111 """
112 returns True/False value from given string, it tries to translate the
112 returns True/False value from given string, it tries to translate the
113 string into boolean
113 string into boolean
114
114
115 :param _str: string value to translate into boolean
115 :param _str: string value to translate into boolean
116 :rtype: boolean
116 :rtype: boolean
117 :returns: boolean from given string
117 :returns: boolean from given string
118 """
118 """
119 if _str is None:
119 if _str is None:
120 return False
120 return False
121 if _str in (True, False):
121 if _str in (True, False):
122 return _str
122 return _str
123 _str = str(_str).strip().lower()
123 _str = str(_str).strip().lower()
124 return _str in ('t', 'true', 'y', 'yes', 'on', '1')
124 return _str in ('t', 'true', 'y', 'yes', 'on', '1')
125
125
126
126
127 def aslist(obj, sep=None, strip=True):
127 def aslist(obj, sep=None, strip=True):
128 """
128 """
129 Returns given string separated by sep as list
129 Returns given string separated by sep as list
130
130
131 :param obj:
131 :param obj:
132 :param sep:
132 :param sep:
133 :param strip:
133 :param strip:
134 """
134 """
135 if isinstance(obj, (basestring,)):
135 if isinstance(obj, (basestring,)):
136 lst = obj.split(sep)
136 lst = obj.split(sep)
137 if strip:
137 if strip:
138 lst = [v.strip() for v in lst]
138 lst = [v.strip() for v in lst]
139 return lst
139 return lst
140 elif isinstance(obj, (list, tuple)):
140 elif isinstance(obj, (list, tuple)):
141 return obj
141 return obj
142 elif obj is None:
142 elif obj is None:
143 return []
143 return []
144 else:
144 else:
145 return [obj]
145 return [obj]
146
146
147
147
148 def convert_line_endings(line, mode):
148 def convert_line_endings(line, mode):
149 """
149 """
150 Converts a given line "line end" accordingly to given mode
150 Converts a given line "line end" accordingly to given mode
151
151
152 Available modes are::
152 Available modes are::
153 0 - Unix
153 0 - Unix
154 1 - Mac
154 1 - Mac
155 2 - DOS
155 2 - DOS
156
156
157 :param line: given line to convert
157 :param line: given line to convert
158 :param mode: mode to convert to
158 :param mode: mode to convert to
159 :rtype: str
159 :rtype: str
160 :return: converted line according to mode
160 :return: converted line according to mode
161 """
161 """
162 if mode == 0:
162 if mode == 0:
163 line = line.replace('\r\n', '\n')
163 line = line.replace('\r\n', '\n')
164 line = line.replace('\r', '\n')
164 line = line.replace('\r', '\n')
165 elif mode == 1:
165 elif mode == 1:
166 line = line.replace('\r\n', '\r')
166 line = line.replace('\r\n', '\r')
167 line = line.replace('\n', '\r')
167 line = line.replace('\n', '\r')
168 elif mode == 2:
168 elif mode == 2:
169 line = re.sub('\r(?!\n)|(?<!\r)\n', '\r\n', line)
169 line = re.sub('\r(?!\n)|(?<!\r)\n', '\r\n', line)
170 return line
170 return line
171
171
172
172
173 def detect_mode(line, default):
173 def detect_mode(line, default):
174 """
174 """
175 Detects line break for given line, if line break couldn't be found
175 Detects line break for given line, if line break couldn't be found
176 given default value is returned
176 given default value is returned
177
177
178 :param line: str line
178 :param line: str line
179 :param default: default
179 :param default: default
180 :rtype: int
180 :rtype: int
181 :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
181 :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
182 """
182 """
183 if line.endswith('\r\n'):
183 if line.endswith('\r\n'):
184 return 2
184 return 2
185 elif line.endswith('\n'):
185 elif line.endswith('\n'):
186 return 0
186 return 0
187 elif line.endswith('\r'):
187 elif line.endswith('\r'):
188 return 1
188 return 1
189 else:
189 else:
190 return default
190 return default
191
191
192
192
193 def safe_int(val, default=None):
193 def safe_int(val, default=None):
194 """
194 """
195 Returns int() of val if val is not convertable to int use default
195 Returns int() of val if val is not convertable to int use default
196 instead
196 instead
197
197
198 :param val:
198 :param val:
199 :param default:
199 :param default:
200 """
200 """
201
201
202 try:
202 try:
203 val = int(val)
203 val = int(val)
204 except (ValueError, TypeError):
204 except (ValueError, TypeError):
205 val = default
205 val = default
206
206
207 return val
207 return val
208
208
209
209
210 def safe_unicode(str_, from_encoding=None, use_chardet=False):
210 def safe_unicode(str_, from_encoding=None, use_chardet=False):
211 """
211 """
212 safe unicode function. Does few trick to turn str_ into unicode
212 safe unicode function. Does few trick to turn str_ into unicode
213
213
214 In case of UnicodeDecode error, we try to return it with encoding detected
214 In case of UnicodeDecode error, we try to return it with encoding detected
215 by chardet library if it fails fallback to unicode with errors replaced
215 by chardet library if it fails fallback to unicode with errors replaced
216
216
217 :param str_: string to decode
217 :param str_: string to decode
218 :rtype: unicode
218 :rtype: unicode
219 :returns: unicode object
219 :returns: unicode object
220 """
220 """
221 if isinstance(str_, unicode):
221 if isinstance(str_, unicode):
222 return str_
222 return str_
223
223
224 if not from_encoding:
224 if not from_encoding:
225 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
225 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
226 'utf8'), sep=',')
226 'utf8'), sep=',')
227 from_encoding = DEFAULT_ENCODINGS
227 from_encoding = DEFAULT_ENCODINGS
228
228
229 if not isinstance(from_encoding, (list, tuple)):
229 if not isinstance(from_encoding, (list, tuple)):
230 from_encoding = [from_encoding]
230 from_encoding = [from_encoding]
231
231
232 try:
232 try:
233 return unicode(str_)
233 return unicode(str_)
234 except UnicodeDecodeError:
234 except UnicodeDecodeError:
235 pass
235 pass
236
236
237 for enc in from_encoding:
237 for enc in from_encoding:
238 try:
238 try:
239 return unicode(str_, enc)
239 return unicode(str_, enc)
240 except UnicodeDecodeError:
240 except UnicodeDecodeError:
241 pass
241 pass
242
242
243 if use_chardet:
243 if use_chardet:
244 try:
244 try:
245 import chardet
245 import chardet
246 encoding = chardet.detect(str_)['encoding']
246 encoding = chardet.detect(str_)['encoding']
247 if encoding is None:
247 if encoding is None:
248 raise Exception()
248 raise Exception()
249 return str_.decode(encoding)
249 return str_.decode(encoding)
250 except (ImportError, UnicodeDecodeError, Exception):
250 except (ImportError, UnicodeDecodeError, Exception):
251 return unicode(str_, from_encoding[0], 'replace')
251 return unicode(str_, from_encoding[0], 'replace')
252 else:
252 else:
253 return unicode(str_, from_encoding[0], 'replace')
253 return unicode(str_, from_encoding[0], 'replace')
254
254
255 def safe_str(unicode_, to_encoding=None, use_chardet=False):
255 def safe_str(unicode_, to_encoding=None, use_chardet=False):
256 """
256 """
257 safe str function. Does few trick to turn unicode_ into string
257 safe str function. Does few trick to turn unicode_ into string
258
258
259 In case of UnicodeEncodeError, we try to return it with encoding detected
259 In case of UnicodeEncodeError, we try to return it with encoding detected
260 by chardet library if it fails fallback to string with errors replaced
260 by chardet library if it fails fallback to string with errors replaced
261
261
262 :param unicode_: unicode to encode
262 :param unicode_: unicode to encode
263 :rtype: str
263 :rtype: str
264 :returns: str object
264 :returns: str object
265 """
265 """
266
266
267 # if it's not basestr cast to str
267 # if it's not basestr cast to str
268 if not isinstance(unicode_, compat.string_types):
268 if not isinstance(unicode_, compat.string_types):
269 return str(unicode_)
269 return str(unicode_)
270
270
271 if isinstance(unicode_, str):
271 if isinstance(unicode_, str):
272 return unicode_
272 return unicode_
273
273
274 if not to_encoding:
274 if not to_encoding:
275 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
275 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
276 'utf8'), sep=',')
276 'utf8'), sep=',')
277 to_encoding = DEFAULT_ENCODINGS
277 to_encoding = DEFAULT_ENCODINGS
278
278
279 if not isinstance(to_encoding, (list, tuple)):
279 if not isinstance(to_encoding, (list, tuple)):
280 to_encoding = [to_encoding]
280 to_encoding = [to_encoding]
281
281
282 for enc in to_encoding:
282 for enc in to_encoding:
283 try:
283 try:
284 return unicode_.encode(enc)
284 return unicode_.encode(enc)
285 except UnicodeEncodeError:
285 except UnicodeEncodeError:
286 pass
286 pass
287
287
288 if use_chardet:
288 if use_chardet:
289 try:
289 try:
290 import chardet
290 import chardet
291 encoding = chardet.detect(unicode_)['encoding']
291 encoding = chardet.detect(unicode_)['encoding']
292 if encoding is None:
292 if encoding is None:
293 raise UnicodeEncodeError()
293 raise UnicodeEncodeError()
294
294
295 return unicode_.encode(encoding)
295 return unicode_.encode(encoding)
296 except (ImportError, UnicodeEncodeError):
296 except (ImportError, UnicodeEncodeError):
297 return unicode_.encode(to_encoding[0], 'replace')
297 return unicode_.encode(to_encoding[0], 'replace')
298 else:
298 else:
299 return unicode_.encode(to_encoding[0], 'replace')
299 return unicode_.encode(to_encoding[0], 'replace')
300
300
301
301
302 def remove_suffix(s, suffix):
302 def remove_suffix(s, suffix):
303 if s.endswith(suffix):
303 if s.endswith(suffix):
304 s = s[:-1 * len(suffix)]
304 s = s[:-1 * len(suffix)]
305 return s
305 return s
306
306
307
307
308 def remove_prefix(s, prefix):
308 def remove_prefix(s, prefix):
309 if s.startswith(prefix):
309 if s.startswith(prefix):
310 s = s[len(prefix):]
310 s = s[len(prefix):]
311 return s
311 return s
312
312
313
313
314 def find_calling_context(ignore_modules=None):
314 def find_calling_context(ignore_modules=None):
315 """
315 """
316 Look through the calling stack and return the frame which called
316 Look through the calling stack and return the frame which called
317 this function and is part of core module ( ie. rhodecode.* )
317 this function and is part of core module ( ie. rhodecode.* )
318
318
319 :param ignore_modules: list of modules to ignore eg. ['rhodecode.lib']
319 :param ignore_modules: list of modules to ignore eg. ['rhodecode.lib']
320 """
320 """
321
321
322 ignore_modules = ignore_modules or []
322 ignore_modules = ignore_modules or []
323
323
324 f = sys._getframe(2)
324 f = sys._getframe(2)
325 while f.f_back is not None:
325 while f.f_back is not None:
326 name = f.f_globals.get('__name__')
326 name = f.f_globals.get('__name__')
327 if name and name.startswith(__name__.split('.')[0]):
327 if name and name.startswith(__name__.split('.')[0]):
328 if name not in ignore_modules:
328 if name not in ignore_modules:
329 return f
329 return f
330 f = f.f_back
330 f = f.f_back
331 return None
331 return None
332
332
333
333
334 def ping_connection(connection, branch):
334 def ping_connection(connection, branch):
335 if branch:
335 if branch:
336 # "branch" refers to a sub-connection of a connection,
336 # "branch" refers to a sub-connection of a connection,
337 # we don't want to bother pinging on these.
337 # we don't want to bother pinging on these.
338 return
338 return
339
339
340 # turn off "close with result". This flag is only used with
340 # turn off "close with result". This flag is only used with
341 # "connectionless" execution, otherwise will be False in any case
341 # "connectionless" execution, otherwise will be False in any case
342 save_should_close_with_result = connection.should_close_with_result
342 save_should_close_with_result = connection.should_close_with_result
343 connection.should_close_with_result = False
343 connection.should_close_with_result = False
344
344
345 try:
345 try:
346 # run a SELECT 1. use a core select() so that
346 # run a SELECT 1. use a core select() so that
347 # the SELECT of a scalar value without a table is
347 # the SELECT of a scalar value without a table is
348 # appropriately formatted for the backend
348 # appropriately formatted for the backend
349 connection.scalar(sqlalchemy.sql.select([1]))
349 connection.scalar(sqlalchemy.sql.select([1]))
350 except sqlalchemy.exc.DBAPIError as err:
350 except sqlalchemy.exc.DBAPIError as err:
351 # catch SQLAlchemy's DBAPIError, which is a wrapper
351 # catch SQLAlchemy's DBAPIError, which is a wrapper
352 # for the DBAPI's exception. It includes a .connection_invalidated
352 # for the DBAPI's exception. It includes a .connection_invalidated
353 # attribute which specifies if this connection is a "disconnect"
353 # attribute which specifies if this connection is a "disconnect"
354 # condition, which is based on inspection of the original exception
354 # condition, which is based on inspection of the original exception
355 # by the dialect in use.
355 # by the dialect in use.
356 if err.connection_invalidated:
356 if err.connection_invalidated:
357 # run the same SELECT again - the connection will re-validate
357 # run the same SELECT again - the connection will re-validate
358 # itself and establish a new connection. The disconnect detection
358 # itself and establish a new connection. The disconnect detection
359 # here also causes the whole connection pool to be invalidated
359 # here also causes the whole connection pool to be invalidated
360 # so that all stale connections are discarded.
360 # so that all stale connections are discarded.
361 connection.scalar(sqlalchemy.sql.select([1]))
361 connection.scalar(sqlalchemy.sql.select([1]))
362 else:
362 else:
363 raise
363 raise
364 finally:
364 finally:
365 # restore "close with result"
365 # restore "close with result"
366 connection.should_close_with_result = save_should_close_with_result
366 connection.should_close_with_result = save_should_close_with_result
367
367
368
368
369 def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
369 def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
370 """Custom engine_from_config functions."""
370 """Custom engine_from_config functions."""
371 log = logging.getLogger('sqlalchemy.engine')
371 log = logging.getLogger('sqlalchemy.engine')
372 use_ping_connection = asbool(configuration.pop('sqlalchemy.db1.ping_connection', None))
372 use_ping_connection = asbool(configuration.pop('sqlalchemy.db1.ping_connection', None))
373 debug = asbool(configuration.pop('sqlalchemy.db1.debug_query', None))
373 debug = asbool(configuration.pop('sqlalchemy.db1.debug_query', None))
374
374
375 engine = sqlalchemy.engine_from_config(configuration, prefix, **kwargs)
375 engine = sqlalchemy.engine_from_config(configuration, prefix, **kwargs)
376
376
377 def color_sql(sql):
377 def color_sql(sql):
378 color_seq = '\033[1;33m' # This is yellow: code 33
378 color_seq = '\033[1;33m' # This is yellow: code 33
379 normal = '\x1b[0m'
379 normal = '\x1b[0m'
380 return ''.join([color_seq, sql, normal])
380 return ''.join([color_seq, sql, normal])
381
381
382 if use_ping_connection:
382 if use_ping_connection:
383 log.debug('Adding ping_connection on the engine config.')
383 log.debug('Adding ping_connection on the engine config.')
384 sqlalchemy.event.listen(engine, "engine_connect", ping_connection)
384 sqlalchemy.event.listen(engine, "engine_connect", ping_connection)
385
385
386 if debug:
386 if debug:
387 # attach events only for debug configuration
387 # attach events only for debug configuration
388 def before_cursor_execute(conn, cursor, statement,
388 def before_cursor_execute(conn, cursor, statement,
389 parameters, context, executemany):
389 parameters, context, executemany):
390 setattr(conn, 'query_start_time', time.time())
390 setattr(conn, 'query_start_time', time.time())
391 log.info(color_sql(">>>>> STARTING QUERY >>>>>"))
391 log.info(color_sql(">>>>> STARTING QUERY >>>>>"))
392 calling_context = find_calling_context(ignore_modules=[
392 calling_context = find_calling_context(ignore_modules=[
393 'rhodecode.lib.caching_query',
393 'rhodecode.lib.caching_query',
394 'rhodecode.model.settings',
394 'rhodecode.model.settings',
395 ])
395 ])
396 if calling_context:
396 if calling_context:
397 log.info(color_sql('call context %s:%s' % (
397 log.info(color_sql('call context %s:%s' % (
398 calling_context.f_code.co_filename,
398 calling_context.f_code.co_filename,
399 calling_context.f_lineno,
399 calling_context.f_lineno,
400 )))
400 )))
401
401
402 def after_cursor_execute(conn, cursor, statement,
402 def after_cursor_execute(conn, cursor, statement,
403 parameters, context, executemany):
403 parameters, context, executemany):
404 delattr(conn, 'query_start_time')
404 delattr(conn, 'query_start_time')
405
405
406 sqlalchemy.event.listen(engine, "before_cursor_execute", before_cursor_execute)
406 sqlalchemy.event.listen(engine, "before_cursor_execute", before_cursor_execute)
407 sqlalchemy.event.listen(engine, "after_cursor_execute", after_cursor_execute)
407 sqlalchemy.event.listen(engine, "after_cursor_execute", after_cursor_execute)
408
408
409 return engine
409 return engine
410
410
411
411
412 def get_encryption_key(config):
412 def get_encryption_key(config):
413 secret = config.get('rhodecode.encrypted_values.secret')
413 secret = config.get('rhodecode.encrypted_values.secret')
414 default = config['beaker.session.secret']
414 default = config['beaker.session.secret']
415 return secret or default
415 return secret or default
416
416
417
417
418 def age(prevdate, now=None, show_short_version=False, show_suffix=True,
418 def age(prevdate, now=None, show_short_version=False, show_suffix=True,
419 short_format=False):
419 short_format=False):
420 """
420 """
421 Turns a datetime into an age string.
421 Turns a datetime into an age string.
422 If show_short_version is True, this generates a shorter string with
422 If show_short_version is True, this generates a shorter string with
423 an approximate age; ex. '1 day ago', rather than '1 day and 23 hours ago'.
423 an approximate age; ex. '1 day ago', rather than '1 day and 23 hours ago'.
424
424
425 * IMPORTANT*
425 * IMPORTANT*
426 Code of this function is written in special way so it's easier to
426 Code of this function is written in special way so it's easier to
427 backport it to javascript. If you mean to update it, please also update
427 backport it to javascript. If you mean to update it, please also update
428 `jquery.timeago-extension.js` file
428 `jquery.timeago-extension.js` file
429
429
430 :param prevdate: datetime object
430 :param prevdate: datetime object
431 :param now: get current time, if not define we use
431 :param now: get current time, if not define we use
432 `datetime.datetime.now()`
432 `datetime.datetime.now()`
433 :param show_short_version: if it should approximate the date and
433 :param show_short_version: if it should approximate the date and
434 return a shorter string
434 return a shorter string
435 :param show_suffix:
435 :param show_suffix:
436 :param short_format: show short format, eg 2D instead of 2 days
436 :param short_format: show short format, eg 2D instead of 2 days
437 :rtype: unicode
437 :rtype: unicode
438 :returns: unicode words describing age
438 :returns: unicode words describing age
439 """
439 """
440
440
441 def _get_relative_delta(now, prevdate):
441 def _get_relative_delta(now, prevdate):
442 base = dateutil.relativedelta.relativedelta(now, prevdate)
442 base = dateutil.relativedelta.relativedelta(now, prevdate)
443 return {
443 return {
444 'year': base.years,
444 'year': base.years,
445 'month': base.months,
445 'month': base.months,
446 'day': base.days,
446 'day': base.days,
447 'hour': base.hours,
447 'hour': base.hours,
448 'minute': base.minutes,
448 'minute': base.minutes,
449 'second': base.seconds,
449 'second': base.seconds,
450 }
450 }
451
451
452 def _is_leap_year(year):
452 def _is_leap_year(year):
453 return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
453 return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
454
454
455 def get_month(prevdate):
455 def get_month(prevdate):
456 return prevdate.month
456 return prevdate.month
457
457
458 def get_year(prevdate):
458 def get_year(prevdate):
459 return prevdate.year
459 return prevdate.year
460
460
461 now = now or datetime.datetime.now()
461 now = now or datetime.datetime.now()
462 order = ['year', 'month', 'day', 'hour', 'minute', 'second']
462 order = ['year', 'month', 'day', 'hour', 'minute', 'second']
463 deltas = {}
463 deltas = {}
464 future = False
464 future = False
465
465
466 if prevdate > now:
466 if prevdate > now:
467 now_old = now
467 now_old = now
468 now = prevdate
468 now = prevdate
469 prevdate = now_old
469 prevdate = now_old
470 future = True
470 future = True
471 if future:
471 if future:
472 prevdate = prevdate.replace(microsecond=0)
472 prevdate = prevdate.replace(microsecond=0)
473 # Get date parts deltas
473 # Get date parts deltas
474 for part in order:
474 for part in order:
475 rel_delta = _get_relative_delta(now, prevdate)
475 rel_delta = _get_relative_delta(now, prevdate)
476 deltas[part] = rel_delta[part]
476 deltas[part] = rel_delta[part]
477
477
478 # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
478 # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
479 # not 1 hour, -59 minutes and -59 seconds)
479 # not 1 hour, -59 minutes and -59 seconds)
480 offsets = [[5, 60], [4, 60], [3, 24]]
480 offsets = [[5, 60], [4, 60], [3, 24]]
481 for element in offsets: # seconds, minutes, hours
481 for element in offsets: # seconds, minutes, hours
482 num = element[0]
482 num = element[0]
483 length = element[1]
483 length = element[1]
484
484
485 part = order[num]
485 part = order[num]
486 carry_part = order[num - 1]
486 carry_part = order[num - 1]
487
487
488 if deltas[part] < 0:
488 if deltas[part] < 0:
489 deltas[part] += length
489 deltas[part] += length
490 deltas[carry_part] -= 1
490 deltas[carry_part] -= 1
491
491
492 # Same thing for days except that the increment depends on the (variable)
492 # Same thing for days except that the increment depends on the (variable)
493 # number of days in the month
493 # number of days in the month
494 month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
494 month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
495 if deltas['day'] < 0:
495 if deltas['day'] < 0:
496 if get_month(prevdate) == 2 and _is_leap_year(get_year(prevdate)):
496 if get_month(prevdate) == 2 and _is_leap_year(get_year(prevdate)):
497 deltas['day'] += 29
497 deltas['day'] += 29
498 else:
498 else:
499 deltas['day'] += month_lengths[get_month(prevdate) - 1]
499 deltas['day'] += month_lengths[get_month(prevdate) - 1]
500
500
501 deltas['month'] -= 1
501 deltas['month'] -= 1
502
502
503 if deltas['month'] < 0:
503 if deltas['month'] < 0:
504 deltas['month'] += 12
504 deltas['month'] += 12
505 deltas['year'] -= 1
505 deltas['year'] -= 1
506
506
507 # Format the result
507 # Format the result
508 if short_format:
508 if short_format:
509 fmt_funcs = {
509 fmt_funcs = {
510 'year': lambda d: u'%dy' % d,
510 'year': lambda d: u'%dy' % d,
511 'month': lambda d: u'%dm' % d,
511 'month': lambda d: u'%dm' % d,
512 'day': lambda d: u'%dd' % d,
512 'day': lambda d: u'%dd' % d,
513 'hour': lambda d: u'%dh' % d,
513 'hour': lambda d: u'%dh' % d,
514 'minute': lambda d: u'%dmin' % d,
514 'minute': lambda d: u'%dmin' % d,
515 'second': lambda d: u'%dsec' % d,
515 'second': lambda d: u'%dsec' % d,
516 }
516 }
517 else:
517 else:
518 fmt_funcs = {
518 fmt_funcs = {
519 'year': lambda d: _pluralize(u'${num} year', u'${num} years', d, mapping={'num': d}).interpolate(),
519 'year': lambda d: _pluralize(u'${num} year', u'${num} years', d, mapping={'num': d}).interpolate(),
520 'month': lambda d: _pluralize(u'${num} month', u'${num} months', d, mapping={'num': d}).interpolate(),
520 'month': lambda d: _pluralize(u'${num} month', u'${num} months', d, mapping={'num': d}).interpolate(),
521 'day': lambda d: _pluralize(u'${num} day', u'${num} days', d, mapping={'num': d}).interpolate(),
521 'day': lambda d: _pluralize(u'${num} day', u'${num} days', d, mapping={'num': d}).interpolate(),
522 'hour': lambda d: _pluralize(u'${num} hour', u'${num} hours', d, mapping={'num': d}).interpolate(),
522 'hour': lambda d: _pluralize(u'${num} hour', u'${num} hours', d, mapping={'num': d}).interpolate(),
523 'minute': lambda d: _pluralize(u'${num} minute', u'${num} minutes', d, mapping={'num': d}).interpolate(),
523 'minute': lambda d: _pluralize(u'${num} minute', u'${num} minutes', d, mapping={'num': d}).interpolate(),
524 'second': lambda d: _pluralize(u'${num} second', u'${num} seconds', d, mapping={'num': d}).interpolate(),
524 'second': lambda d: _pluralize(u'${num} second', u'${num} seconds', d, mapping={'num': d}).interpolate(),
525 }
525 }
526
526
527 i = 0
527 i = 0
528 for part in order:
528 for part in order:
529 value = deltas[part]
529 value = deltas[part]
530 if value != 0:
530 if value != 0:
531
531
532 if i < 5:
532 if i < 5:
533 sub_part = order[i + 1]
533 sub_part = order[i + 1]
534 sub_value = deltas[sub_part]
534 sub_value = deltas[sub_part]
535 else:
535 else:
536 sub_value = 0
536 sub_value = 0
537
537
538 if sub_value == 0 or show_short_version:
538 if sub_value == 0 or show_short_version:
539 _val = fmt_funcs[part](value)
539 _val = fmt_funcs[part](value)
540 if future:
540 if future:
541 if show_suffix:
541 if show_suffix:
542 return _(u'in ${ago}', mapping={'ago': _val})
542 return _(u'in ${ago}', mapping={'ago': _val})
543 else:
543 else:
544 return _(_val)
544 return _(_val)
545
545
546 else:
546 else:
547 if show_suffix:
547 if show_suffix:
548 return _(u'${ago} ago', mapping={'ago': _val})
548 return _(u'${ago} ago', mapping={'ago': _val})
549 else:
549 else:
550 return _(_val)
550 return _(_val)
551
551
552 val = fmt_funcs[part](value)
552 val = fmt_funcs[part](value)
553 val_detail = fmt_funcs[sub_part](sub_value)
553 val_detail = fmt_funcs[sub_part](sub_value)
554 mapping = {'val': val, 'detail': val_detail}
554 mapping = {'val': val, 'detail': val_detail}
555
555
556 if short_format:
556 if short_format:
557 datetime_tmpl = _(u'${val}, ${detail}', mapping=mapping)
557 datetime_tmpl = _(u'${val}, ${detail}', mapping=mapping)
558 if show_suffix:
558 if show_suffix:
559 datetime_tmpl = _(u'${val}, ${detail} ago', mapping=mapping)
559 datetime_tmpl = _(u'${val}, ${detail} ago', mapping=mapping)
560 if future:
560 if future:
561 datetime_tmpl = _(u'in ${val}, ${detail}', mapping=mapping)
561 datetime_tmpl = _(u'in ${val}, ${detail}', mapping=mapping)
562 else:
562 else:
563 datetime_tmpl = _(u'${val} and ${detail}', mapping=mapping)
563 datetime_tmpl = _(u'${val} and ${detail}', mapping=mapping)
564 if show_suffix:
564 if show_suffix:
565 datetime_tmpl = _(u'${val} and ${detail} ago', mapping=mapping)
565 datetime_tmpl = _(u'${val} and ${detail} ago', mapping=mapping)
566 if future:
566 if future:
567 datetime_tmpl = _(u'in ${val} and ${detail}', mapping=mapping)
567 datetime_tmpl = _(u'in ${val} and ${detail}', mapping=mapping)
568
568
569 return datetime_tmpl
569 return datetime_tmpl
570 i += 1
570 i += 1
571 return _(u'just now')
571 return _(u'just now')
572
572
573
573
574 def age_from_seconds(seconds):
574 def age_from_seconds(seconds):
575 seconds = safe_int(seconds) or 0
575 seconds = safe_int(seconds) or 0
576 prevdate = time_to_datetime(time.time() + seconds)
576 prevdate = time_to_datetime(time.time() + seconds)
577 return age(prevdate, show_suffix=False, show_short_version=True)
577 return age(prevdate, show_suffix=False, show_short_version=True)
578
578
579
579
580 def cleaned_uri(uri):
580 def cleaned_uri(uri):
581 """
581 """
582 Quotes '[' and ']' from uri if there is only one of them.
582 Quotes '[' and ']' from uri if there is only one of them.
583 according to RFC3986 we cannot use such chars in uri
583 according to RFC3986 we cannot use such chars in uri
584 :param uri:
584 :param uri:
585 :return: uri without this chars
585 :return: uri without this chars
586 """
586 """
587 return urllib.quote(uri, safe='@$:/')
587 return urllib.quote(uri, safe='@$:/')
588
588
589
589
590 def uri_filter(uri):
590 def uri_filter(uri):
591 """
591 """
592 Removes user:password from given url string
592 Removes user:password from given url string
593
593
594 :param uri:
594 :param uri:
595 :rtype: unicode
595 :rtype: unicode
596 :returns: filtered list of strings
596 :returns: filtered list of strings
597 """
597 """
598 if not uri:
598 if not uri:
599 return ''
599 return ''
600
600
601 proto = ''
601 proto = ''
602
602
603 for pat in ('https://', 'http://'):
603 for pat in ('https://', 'http://'):
604 if uri.startswith(pat):
604 if uri.startswith(pat):
605 uri = uri[len(pat):]
605 uri = uri[len(pat):]
606 proto = pat
606 proto = pat
607 break
607 break
608
608
609 # remove passwords and username
609 # remove passwords and username
610 uri = uri[uri.find('@') + 1:]
610 uri = uri[uri.find('@') + 1:]
611
611
612 # get the port
612 # get the port
613 cred_pos = uri.find(':')
613 cred_pos = uri.find(':')
614 if cred_pos == -1:
614 if cred_pos == -1:
615 host, port = uri, None
615 host, port = uri, None
616 else:
616 else:
617 host, port = uri[:cred_pos], uri[cred_pos + 1:]
617 host, port = uri[:cred_pos], uri[cred_pos + 1:]
618
618
619 return filter(None, [proto, host, port])
619 return filter(None, [proto, host, port])
620
620
621
621
622 def credentials_filter(uri):
622 def credentials_filter(uri):
623 """
623 """
624 Returns a url with removed credentials
624 Returns a url with removed credentials
625
625
626 :param uri:
626 :param uri:
627 """
627 """
628
628
629 uri = uri_filter(uri)
629 uri = uri_filter(uri)
630 # check if we have port
630 # check if we have port
631 if len(uri) > 2 and uri[2]:
631 if len(uri) > 2 and uri[2]:
632 uri[2] = ':' + uri[2]
632 uri[2] = ':' + uri[2]
633
633
634 return ''.join(uri)
634 return ''.join(uri)
635
635
636
636
637 def get_host_info(request):
637 def get_host_info(request):
638 """
638 """
639 Generate host info, to obtain full url e.g https://server.com
639 Generate host info, to obtain full url e.g https://server.com
640 use this
640 use this
641 `{scheme}://{netloc}`
641 `{scheme}://{netloc}`
642 """
642 """
643 if not request:
643 if not request:
644 return {}
644 return {}
645
645
646 qualified_home_url = request.route_url('home')
646 qualified_home_url = request.route_url('home')
647 parsed_url = urlobject.URLObject(qualified_home_url)
647 parsed_url = urlobject.URLObject(qualified_home_url)
648 decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/')))
648 decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/')))
649
649
650 return {
650 return {
651 'scheme': parsed_url.scheme,
651 'scheme': parsed_url.scheme,
652 'netloc': parsed_url.netloc+decoded_path,
652 'netloc': parsed_url.netloc+decoded_path,
653 'hostname': parsed_url.hostname,
653 'hostname': parsed_url.hostname,
654 }
654 }
655
655
656
656
657 def get_clone_url(request, uri_tmpl, repo_name, repo_id, repo_type, **override):
657 def get_clone_url(request, uri_tmpl, repo_name, repo_id, repo_type, **override):
658 qualified_home_url = request.route_url('home')
658 qualified_home_url = request.route_url('home')
659 parsed_url = urlobject.URLObject(qualified_home_url)
659 parsed_url = urlobject.URLObject(qualified_home_url)
660 decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/')))
660 decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/')))
661
661
662 args = {
662 args = {
663 'scheme': parsed_url.scheme,
663 'scheme': parsed_url.scheme,
664 'user': '',
664 'user': '',
665 'sys_user': getpass.getuser(),
665 'sys_user': getpass.getuser(),
666 # path if we use proxy-prefix
666 # path if we use proxy-prefix
667 'netloc': parsed_url.netloc+decoded_path,
667 'netloc': parsed_url.netloc+decoded_path,
668 'hostname': parsed_url.hostname,
668 'hostname': parsed_url.hostname,
669 'prefix': decoded_path,
669 'prefix': decoded_path,
670 'repo': repo_name,
670 'repo': repo_name,
671 'repoid': str(repo_id),
671 'repoid': str(repo_id),
672 'repo_type': repo_type
672 'repo_type': repo_type
673 }
673 }
674 args.update(override)
674 args.update(override)
675 args['user'] = urllib.quote(safe_str(args['user']))
675 args['user'] = urllib.quote(safe_str(args['user']))
676
676
677 for k, v in args.items():
677 for k, v in args.items():
678 uri_tmpl = uri_tmpl.replace('{%s}' % k, v)
678 uri_tmpl = uri_tmpl.replace('{%s}' % k, v)
679
679
680 # special case for SVN clone url
680 # special case for SVN clone url
681 if repo_type == 'svn':
681 if repo_type == 'svn':
682 uri_tmpl = uri_tmpl.replace('ssh://', 'svn+ssh://')
682 uri_tmpl = uri_tmpl.replace('ssh://', 'svn+ssh://')
683
683
684 # remove leading @ sign if it's present. Case of empty user
684 # remove leading @ sign if it's present. Case of empty user
685 url_obj = urlobject.URLObject(uri_tmpl)
685 url_obj = urlobject.URLObject(uri_tmpl)
686 url = url_obj.with_netloc(url_obj.netloc.lstrip('@'))
686 url = url_obj.with_netloc(url_obj.netloc.lstrip('@'))
687
687
688 return safe_unicode(url)
688 return safe_unicode(url)
689
689
690
690
691 def get_commit_safe(repo, commit_id=None, commit_idx=None, pre_load=None):
691 def get_commit_safe(repo, commit_id=None, commit_idx=None, pre_load=None,
692 maybe_unreachable=False):
692 """
693 """
693 Safe version of get_commit if this commit doesn't exists for a
694 Safe version of get_commit if this commit doesn't exists for a
694 repository it returns a Dummy one instead
695 repository it returns a Dummy one instead
695
696
696 :param repo: repository instance
697 :param repo: repository instance
697 :param commit_id: commit id as str
698 :param commit_id: commit id as str
699 :param commit_idx: numeric commit index
698 :param pre_load: optional list of commit attributes to load
700 :param pre_load: optional list of commit attributes to load
701 :param maybe_unreachable: translate unreachable commits on git repos
699 """
702 """
700 # TODO(skreft): remove these circular imports
703 # TODO(skreft): remove these circular imports
701 from rhodecode.lib.vcs.backends.base import BaseRepository, EmptyCommit
704 from rhodecode.lib.vcs.backends.base import BaseRepository, EmptyCommit
702 from rhodecode.lib.vcs.exceptions import RepositoryError
705 from rhodecode.lib.vcs.exceptions import RepositoryError
703 if not isinstance(repo, BaseRepository):
706 if not isinstance(repo, BaseRepository):
704 raise Exception('You must pass an Repository '
707 raise Exception('You must pass an Repository '
705 'object as first argument got %s', type(repo))
708 'object as first argument got %s', type(repo))
706
709
707 try:
710 try:
708 commit = repo.get_commit(
711 commit = repo.get_commit(
709 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
712 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load,
713 maybe_unreachable=maybe_unreachable)
710 except (RepositoryError, LookupError):
714 except (RepositoryError, LookupError):
711 commit = EmptyCommit()
715 commit = EmptyCommit()
712 return commit
716 return commit
713
717
714
718
715 def datetime_to_time(dt):
719 def datetime_to_time(dt):
716 if dt:
720 if dt:
717 return time.mktime(dt.timetuple())
721 return time.mktime(dt.timetuple())
718
722
719
723
720 def time_to_datetime(tm):
724 def time_to_datetime(tm):
721 if tm:
725 if tm:
722 if isinstance(tm, compat.string_types):
726 if isinstance(tm, compat.string_types):
723 try:
727 try:
724 tm = float(tm)
728 tm = float(tm)
725 except ValueError:
729 except ValueError:
726 return
730 return
727 return datetime.datetime.fromtimestamp(tm)
731 return datetime.datetime.fromtimestamp(tm)
728
732
729
733
730 def time_to_utcdatetime(tm):
734 def time_to_utcdatetime(tm):
731 if tm:
735 if tm:
732 if isinstance(tm, compat.string_types):
736 if isinstance(tm, compat.string_types):
733 try:
737 try:
734 tm = float(tm)
738 tm = float(tm)
735 except ValueError:
739 except ValueError:
736 return
740 return
737 return datetime.datetime.utcfromtimestamp(tm)
741 return datetime.datetime.utcfromtimestamp(tm)
738
742
739
743
740 MENTIONS_REGEX = re.compile(
744 MENTIONS_REGEX = re.compile(
741 # ^@ or @ without any special chars in front
745 # ^@ or @ without any special chars in front
742 r'(?:^@|[^a-zA-Z0-9\-\_\.]@)'
746 r'(?:^@|[^a-zA-Z0-9\-\_\.]@)'
743 # main body starts with letter, then can be . - _
747 # main body starts with letter, then can be . - _
744 r'([a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+)',
748 r'([a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+)',
745 re.VERBOSE | re.MULTILINE)
749 re.VERBOSE | re.MULTILINE)
746
750
747
751
748 def extract_mentioned_users(s):
752 def extract_mentioned_users(s):
749 """
753 """
750 Returns unique usernames from given string s that have @mention
754 Returns unique usernames from given string s that have @mention
751
755
752 :param s: string to get mentions
756 :param s: string to get mentions
753 """
757 """
754 usrs = set()
758 usrs = set()
755 for username in MENTIONS_REGEX.findall(s):
759 for username in MENTIONS_REGEX.findall(s):
756 usrs.add(username)
760 usrs.add(username)
757
761
758 return sorted(list(usrs), key=lambda k: k.lower())
762 return sorted(list(usrs), key=lambda k: k.lower())
759
763
760
764
761 class AttributeDictBase(dict):
765 class AttributeDictBase(dict):
762 def __getstate__(self):
766 def __getstate__(self):
763 odict = self.__dict__ # get attribute dictionary
767 odict = self.__dict__ # get attribute dictionary
764 return odict
768 return odict
765
769
766 def __setstate__(self, dict):
770 def __setstate__(self, dict):
767 self.__dict__ = dict
771 self.__dict__ = dict
768
772
769 __setattr__ = dict.__setitem__
773 __setattr__ = dict.__setitem__
770 __delattr__ = dict.__delitem__
774 __delattr__ = dict.__delitem__
771
775
772
776
773 class StrictAttributeDict(AttributeDictBase):
777 class StrictAttributeDict(AttributeDictBase):
774 """
778 """
775 Strict Version of Attribute dict which raises an Attribute error when
779 Strict Version of Attribute dict which raises an Attribute error when
776 requested attribute is not set
780 requested attribute is not set
777 """
781 """
778 def __getattr__(self, attr):
782 def __getattr__(self, attr):
779 try:
783 try:
780 return self[attr]
784 return self[attr]
781 except KeyError:
785 except KeyError:
782 raise AttributeError('%s object has no attribute %s' % (
786 raise AttributeError('%s object has no attribute %s' % (
783 self.__class__, attr))
787 self.__class__, attr))
784
788
785
789
786 class AttributeDict(AttributeDictBase):
790 class AttributeDict(AttributeDictBase):
787 def __getattr__(self, attr):
791 def __getattr__(self, attr):
788 return self.get(attr, None)
792 return self.get(attr, None)
789
793
790
794
791
795
792 class OrderedDefaultDict(collections.OrderedDict, collections.defaultdict):
796 class OrderedDefaultDict(collections.OrderedDict, collections.defaultdict):
793 def __init__(self, default_factory=None, *args, **kwargs):
797 def __init__(self, default_factory=None, *args, **kwargs):
794 # in python3 you can omit the args to super
798 # in python3 you can omit the args to super
795 super(OrderedDefaultDict, self).__init__(*args, **kwargs)
799 super(OrderedDefaultDict, self).__init__(*args, **kwargs)
796 self.default_factory = default_factory
800 self.default_factory = default_factory
797
801
798
802
799 def fix_PATH(os_=None):
803 def fix_PATH(os_=None):
800 """
804 """
801 Get current active python path, and append it to PATH variable to fix
805 Get current active python path, and append it to PATH variable to fix
802 issues of subprocess calls and different python versions
806 issues of subprocess calls and different python versions
803 """
807 """
804 if os_ is None:
808 if os_ is None:
805 import os
809 import os
806 else:
810 else:
807 os = os_
811 os = os_
808
812
809 cur_path = os.path.split(sys.executable)[0]
813 cur_path = os.path.split(sys.executable)[0]
810 if not os.environ['PATH'].startswith(cur_path):
814 if not os.environ['PATH'].startswith(cur_path):
811 os.environ['PATH'] = '%s:%s' % (cur_path, os.environ['PATH'])
815 os.environ['PATH'] = '%s:%s' % (cur_path, os.environ['PATH'])
812
816
813
817
814 def obfuscate_url_pw(engine):
818 def obfuscate_url_pw(engine):
815 _url = engine or ''
819 _url = engine or ''
816 try:
820 try:
817 _url = sqlalchemy.engine.url.make_url(engine)
821 _url = sqlalchemy.engine.url.make_url(engine)
818 if _url.password:
822 if _url.password:
819 _url.password = 'XXXXX'
823 _url.password = 'XXXXX'
820 except Exception:
824 except Exception:
821 pass
825 pass
822 return unicode(_url)
826 return unicode(_url)
823
827
824
828
825 def get_server_url(environ):
829 def get_server_url(environ):
826 req = webob.Request(environ)
830 req = webob.Request(environ)
827 return req.host_url + req.script_name
831 return req.host_url + req.script_name
828
832
829
833
830 def unique_id(hexlen=32):
834 def unique_id(hexlen=32):
831 alphabet = "23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz"
835 alphabet = "23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz"
832 return suuid(truncate_to=hexlen, alphabet=alphabet)
836 return suuid(truncate_to=hexlen, alphabet=alphabet)
833
837
834
838
835 def suuid(url=None, truncate_to=22, alphabet=None):
839 def suuid(url=None, truncate_to=22, alphabet=None):
836 """
840 """
837 Generate and return a short URL safe UUID.
841 Generate and return a short URL safe UUID.
838
842
839 If the url parameter is provided, set the namespace to the provided
843 If the url parameter is provided, set the namespace to the provided
840 URL and generate a UUID.
844 URL and generate a UUID.
841
845
842 :param url to get the uuid for
846 :param url to get the uuid for
843 :truncate_to: truncate the basic 22 UUID to shorter version
847 :truncate_to: truncate the basic 22 UUID to shorter version
844
848
845 The IDs won't be universally unique any longer, but the probability of
849 The IDs won't be universally unique any longer, but the probability of
846 a collision will still be very low.
850 a collision will still be very low.
847 """
851 """
848 # Define our alphabet.
852 # Define our alphabet.
849 _ALPHABET = alphabet or "23456789ABCDEFGHJKLMNPQRSTUVWXYZ"
853 _ALPHABET = alphabet or "23456789ABCDEFGHJKLMNPQRSTUVWXYZ"
850
854
851 # If no URL is given, generate a random UUID.
855 # If no URL is given, generate a random UUID.
852 if url is None:
856 if url is None:
853 unique_id = uuid.uuid4().int
857 unique_id = uuid.uuid4().int
854 else:
858 else:
855 unique_id = uuid.uuid3(uuid.NAMESPACE_URL, url).int
859 unique_id = uuid.uuid3(uuid.NAMESPACE_URL, url).int
856
860
857 alphabet_length = len(_ALPHABET)
861 alphabet_length = len(_ALPHABET)
858 output = []
862 output = []
859 while unique_id > 0:
863 while unique_id > 0:
860 digit = unique_id % alphabet_length
864 digit = unique_id % alphabet_length
861 output.append(_ALPHABET[digit])
865 output.append(_ALPHABET[digit])
862 unique_id = int(unique_id / alphabet_length)
866 unique_id = int(unique_id / alphabet_length)
863 return "".join(output)[:truncate_to]
867 return "".join(output)[:truncate_to]
864
868
865
869
866 def get_current_rhodecode_user(request=None):
870 def get_current_rhodecode_user(request=None):
867 """
871 """
868 Gets rhodecode user from request
872 Gets rhodecode user from request
869 """
873 """
870 pyramid_request = request or pyramid.threadlocal.get_current_request()
874 pyramid_request = request or pyramid.threadlocal.get_current_request()
871
875
872 # web case
876 # web case
873 if pyramid_request and hasattr(pyramid_request, 'user'):
877 if pyramid_request and hasattr(pyramid_request, 'user'):
874 return pyramid_request.user
878 return pyramid_request.user
875
879
876 # api case
880 # api case
877 if pyramid_request and hasattr(pyramid_request, 'rpc_user'):
881 if pyramid_request and hasattr(pyramid_request, 'rpc_user'):
878 return pyramid_request.rpc_user
882 return pyramid_request.rpc_user
879
883
880 return None
884 return None
881
885
882
886
883 def action_logger_generic(action, namespace=''):
887 def action_logger_generic(action, namespace=''):
884 """
888 """
885 A generic logger for actions useful to the system overview, tries to find
889 A generic logger for actions useful to the system overview, tries to find
886 an acting user for the context of the call otherwise reports unknown user
890 an acting user for the context of the call otherwise reports unknown user
887
891
888 :param action: logging message eg 'comment 5 deleted'
892 :param action: logging message eg 'comment 5 deleted'
889 :param type: string
893 :param type: string
890
894
891 :param namespace: namespace of the logging message eg. 'repo.comments'
895 :param namespace: namespace of the logging message eg. 'repo.comments'
892 :param type: string
896 :param type: string
893
897
894 """
898 """
895
899
896 logger_name = 'rhodecode.actions'
900 logger_name = 'rhodecode.actions'
897
901
898 if namespace:
902 if namespace:
899 logger_name += '.' + namespace
903 logger_name += '.' + namespace
900
904
901 log = logging.getLogger(logger_name)
905 log = logging.getLogger(logger_name)
902
906
903 # get a user if we can
907 # get a user if we can
904 user = get_current_rhodecode_user()
908 user = get_current_rhodecode_user()
905
909
906 logfunc = log.info
910 logfunc = log.info
907
911
908 if not user:
912 if not user:
909 user = '<unknown user>'
913 user = '<unknown user>'
910 logfunc = log.warning
914 logfunc = log.warning
911
915
912 logfunc('Logging action by {}: {}'.format(user, action))
916 logfunc('Logging action by {}: {}'.format(user, action))
913
917
914
918
915 def escape_split(text, sep=',', maxsplit=-1):
919 def escape_split(text, sep=',', maxsplit=-1):
916 r"""
920 r"""
917 Allows for escaping of the separator: e.g. arg='foo\, bar'
921 Allows for escaping of the separator: e.g. arg='foo\, bar'
918
922
919 It should be noted that the way bash et. al. do command line parsing, those
923 It should be noted that the way bash et. al. do command line parsing, those
920 single quotes are required.
924 single quotes are required.
921 """
925 """
922 escaped_sep = r'\%s' % sep
926 escaped_sep = r'\%s' % sep
923
927
924 if escaped_sep not in text:
928 if escaped_sep not in text:
925 return text.split(sep, maxsplit)
929 return text.split(sep, maxsplit)
926
930
927 before, _mid, after = text.partition(escaped_sep)
931 before, _mid, after = text.partition(escaped_sep)
928 startlist = before.split(sep, maxsplit) # a regular split is fine here
932 startlist = before.split(sep, maxsplit) # a regular split is fine here
929 unfinished = startlist[-1]
933 unfinished = startlist[-1]
930 startlist = startlist[:-1]
934 startlist = startlist[:-1]
931
935
932 # recurse because there may be more escaped separators
936 # recurse because there may be more escaped separators
933 endlist = escape_split(after, sep, maxsplit)
937 endlist = escape_split(after, sep, maxsplit)
934
938
935 # finish building the escaped value. we use endlist[0] becaue the first
939 # finish building the escaped value. we use endlist[0] becaue the first
936 # part of the string sent in recursion is the rest of the escaped value.
940 # part of the string sent in recursion is the rest of the escaped value.
937 unfinished += sep + endlist[0]
941 unfinished += sep + endlist[0]
938
942
939 return startlist + [unfinished] + endlist[1:] # put together all the parts
943 return startlist + [unfinished] + endlist[1:] # put together all the parts
940
944
941
945
942 class OptionalAttr(object):
946 class OptionalAttr(object):
943 """
947 """
944 Special Optional Option that defines other attribute. Example::
948 Special Optional Option that defines other attribute. Example::
945
949
946 def test(apiuser, userid=Optional(OAttr('apiuser')):
950 def test(apiuser, userid=Optional(OAttr('apiuser')):
947 user = Optional.extract(userid)
951 user = Optional.extract(userid)
948 # calls
952 # calls
949
953
950 """
954 """
951
955
952 def __init__(self, attr_name):
956 def __init__(self, attr_name):
953 self.attr_name = attr_name
957 self.attr_name = attr_name
954
958
955 def __repr__(self):
959 def __repr__(self):
956 return '<OptionalAttr:%s>' % self.attr_name
960 return '<OptionalAttr:%s>' % self.attr_name
957
961
958 def __call__(self):
962 def __call__(self):
959 return self
963 return self
960
964
961
965
962 # alias
966 # alias
963 OAttr = OptionalAttr
967 OAttr = OptionalAttr
964
968
965
969
966 class Optional(object):
970 class Optional(object):
967 """
971 """
968 Defines an optional parameter::
972 Defines an optional parameter::
969
973
970 param = param.getval() if isinstance(param, Optional) else param
974 param = param.getval() if isinstance(param, Optional) else param
971 param = param() if isinstance(param, Optional) else param
975 param = param() if isinstance(param, Optional) else param
972
976
973 is equivalent of::
977 is equivalent of::
974
978
975 param = Optional.extract(param)
979 param = Optional.extract(param)
976
980
977 """
981 """
978
982
979 def __init__(self, type_):
983 def __init__(self, type_):
980 self.type_ = type_
984 self.type_ = type_
981
985
982 def __repr__(self):
986 def __repr__(self):
983 return '<Optional:%s>' % self.type_.__repr__()
987 return '<Optional:%s>' % self.type_.__repr__()
984
988
985 def __call__(self):
989 def __call__(self):
986 return self.getval()
990 return self.getval()
987
991
988 def getval(self):
992 def getval(self):
989 """
993 """
990 returns value from this Optional instance
994 returns value from this Optional instance
991 """
995 """
992 if isinstance(self.type_, OAttr):
996 if isinstance(self.type_, OAttr):
993 # use params name
997 # use params name
994 return self.type_.attr_name
998 return self.type_.attr_name
995 return self.type_
999 return self.type_
996
1000
997 @classmethod
1001 @classmethod
998 def extract(cls, val):
1002 def extract(cls, val):
999 """
1003 """
1000 Extracts value from Optional() instance
1004 Extracts value from Optional() instance
1001
1005
1002 :param val:
1006 :param val:
1003 :return: original value if it's not Optional instance else
1007 :return: original value if it's not Optional instance else
1004 value of instance
1008 value of instance
1005 """
1009 """
1006 if isinstance(val, cls):
1010 if isinstance(val, cls):
1007 return val.getval()
1011 return val.getval()
1008 return val
1012 return val
1009
1013
1010
1014
1011 def glob2re(pat):
1015 def glob2re(pat):
1012 """
1016 """
1013 Translate a shell PATTERN to a regular expression.
1017 Translate a shell PATTERN to a regular expression.
1014
1018
1015 There is no way to quote meta-characters.
1019 There is no way to quote meta-characters.
1016 """
1020 """
1017
1021
1018 i, n = 0, len(pat)
1022 i, n = 0, len(pat)
1019 res = ''
1023 res = ''
1020 while i < n:
1024 while i < n:
1021 c = pat[i]
1025 c = pat[i]
1022 i = i+1
1026 i = i+1
1023 if c == '*':
1027 if c == '*':
1024 #res = res + '.*'
1028 #res = res + '.*'
1025 res = res + '[^/]*'
1029 res = res + '[^/]*'
1026 elif c == '?':
1030 elif c == '?':
1027 #res = res + '.'
1031 #res = res + '.'
1028 res = res + '[^/]'
1032 res = res + '[^/]'
1029 elif c == '[':
1033 elif c == '[':
1030 j = i
1034 j = i
1031 if j < n and pat[j] == '!':
1035 if j < n and pat[j] == '!':
1032 j = j+1
1036 j = j+1
1033 if j < n and pat[j] == ']':
1037 if j < n and pat[j] == ']':
1034 j = j+1
1038 j = j+1
1035 while j < n and pat[j] != ']':
1039 while j < n and pat[j] != ']':
1036 j = j+1
1040 j = j+1
1037 if j >= n:
1041 if j >= n:
1038 res = res + '\\['
1042 res = res + '\\['
1039 else:
1043 else:
1040 stuff = pat[i:j].replace('\\','\\\\')
1044 stuff = pat[i:j].replace('\\','\\\\')
1041 i = j+1
1045 i = j+1
1042 if stuff[0] == '!':
1046 if stuff[0] == '!':
1043 stuff = '^' + stuff[1:]
1047 stuff = '^' + stuff[1:]
1044 elif stuff[0] == '^':
1048 elif stuff[0] == '^':
1045 stuff = '\\' + stuff
1049 stuff = '\\' + stuff
1046 res = '%s[%s]' % (res, stuff)
1050 res = '%s[%s]' % (res, stuff)
1047 else:
1051 else:
1048 res = res + re.escape(c)
1052 res = res + re.escape(c)
1049 return res + '\Z(?ms)'
1053 return res + '\Z(?ms)'
1050
1054
1051
1055
1052 def parse_byte_string(size_str):
1056 def parse_byte_string(size_str):
1053 match = re.match(r'(\d+)(MB|KB)', size_str, re.IGNORECASE)
1057 match = re.match(r'(\d+)(MB|KB)', size_str, re.IGNORECASE)
1054 if not match:
1058 if not match:
1055 raise ValueError('Given size:%s is invalid, please make sure '
1059 raise ValueError('Given size:%s is invalid, please make sure '
1056 'to use format of <num>(MB|KB)' % size_str)
1060 'to use format of <num>(MB|KB)' % size_str)
1057
1061
1058 _parts = match.groups()
1062 _parts = match.groups()
1059 num, type_ = _parts
1063 num, type_ = _parts
1060 return long(num) * {'mb': 1024*1024, 'kb': 1024}[type_.lower()]
1064 return long(num) * {'mb': 1024*1024, 'kb': 1024}[type_.lower()]
1061
1065
1062
1066
1063 class CachedProperty(object):
1067 class CachedProperty(object):
1064 """
1068 """
1065 Lazy Attributes. With option to invalidate the cache by running a method
1069 Lazy Attributes. With option to invalidate the cache by running a method
1066
1070
1067 class Foo():
1071 class Foo():
1068
1072
1069 @CachedProperty
1073 @CachedProperty
1070 def heavy_func():
1074 def heavy_func():
1071 return 'super-calculation'
1075 return 'super-calculation'
1072
1076
1073 foo = Foo()
1077 foo = Foo()
1074 foo.heavy_func() # first computions
1078 foo.heavy_func() # first computions
1075 foo.heavy_func() # fetch from cache
1079 foo.heavy_func() # fetch from cache
1076 foo._invalidate_prop_cache('heavy_func')
1080 foo._invalidate_prop_cache('heavy_func')
1077 # at this point calling foo.heavy_func() will be re-computed
1081 # at this point calling foo.heavy_func() will be re-computed
1078 """
1082 """
1079
1083
1080 def __init__(self, func, func_name=None):
1084 def __init__(self, func, func_name=None):
1081
1085
1082 if func_name is None:
1086 if func_name is None:
1083 func_name = func.__name__
1087 func_name = func.__name__
1084 self.data = (func, func_name)
1088 self.data = (func, func_name)
1085 update_wrapper(self, func)
1089 update_wrapper(self, func)
1086
1090
1087 def __get__(self, inst, class_):
1091 def __get__(self, inst, class_):
1088 if inst is None:
1092 if inst is None:
1089 return self
1093 return self
1090
1094
1091 func, func_name = self.data
1095 func, func_name = self.data
1092 value = func(inst)
1096 value = func(inst)
1093 inst.__dict__[func_name] = value
1097 inst.__dict__[func_name] = value
1094 if '_invalidate_prop_cache' not in inst.__dict__:
1098 if '_invalidate_prop_cache' not in inst.__dict__:
1095 inst.__dict__['_invalidate_prop_cache'] = partial(
1099 inst.__dict__['_invalidate_prop_cache'] = partial(
1096 self._invalidate_prop_cache, inst)
1100 self._invalidate_prop_cache, inst)
1097 return value
1101 return value
1098
1102
1099 def _invalidate_prop_cache(self, inst, name):
1103 def _invalidate_prop_cache(self, inst, name):
1100 inst.__dict__.pop(name, None)
1104 inst.__dict__.pop(name, None)
@@ -1,1899 +1,1901 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Base module for all VCS systems
22 Base module for all VCS systems
23 """
23 """
24 import os
24 import os
25 import re
25 import re
26 import time
26 import time
27 import shutil
27 import shutil
28 import datetime
28 import datetime
29 import fnmatch
29 import fnmatch
30 import itertools
30 import itertools
31 import logging
31 import logging
32 import collections
32 import collections
33 import warnings
33 import warnings
34
34
35 from zope.cachedescriptors.property import Lazy as LazyProperty
35 from zope.cachedescriptors.property import Lazy as LazyProperty
36
36
37 from pyramid import compat
37 from pyramid import compat
38
38
39 import rhodecode
39 import rhodecode
40 from rhodecode.translation import lazy_ugettext
40 from rhodecode.translation import lazy_ugettext
41 from rhodecode.lib.utils2 import safe_str, safe_unicode, CachedProperty
41 from rhodecode.lib.utils2 import safe_str, safe_unicode, CachedProperty
42 from rhodecode.lib.vcs import connection
42 from rhodecode.lib.vcs import connection
43 from rhodecode.lib.vcs.utils import author_name, author_email
43 from rhodecode.lib.vcs.utils import author_name, author_email
44 from rhodecode.lib.vcs.conf import settings
44 from rhodecode.lib.vcs.conf import settings
45 from rhodecode.lib.vcs.exceptions import (
45 from rhodecode.lib.vcs.exceptions import (
46 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
46 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
47 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
47 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
48 NodeDoesNotExistError, NodeNotChangedError, VCSError,
48 NodeDoesNotExistError, NodeNotChangedError, VCSError,
49 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
49 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
50 RepositoryError)
50 RepositoryError)
51
51
52
52
53 log = logging.getLogger(__name__)
53 log = logging.getLogger(__name__)
54
54
55
55
56 FILEMODE_DEFAULT = 0o100644
56 FILEMODE_DEFAULT = 0o100644
57 FILEMODE_EXECUTABLE = 0o100755
57 FILEMODE_EXECUTABLE = 0o100755
58 EMPTY_COMMIT_ID = '0' * 40
58 EMPTY_COMMIT_ID = '0' * 40
59
59
60 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
60 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
61
61
62
62
63 class MergeFailureReason(object):
63 class MergeFailureReason(object):
64 """
64 """
65 Enumeration with all the reasons why the server side merge could fail.
65 Enumeration with all the reasons why the server side merge could fail.
66
66
67 DO NOT change the number of the reasons, as they may be stored in the
67 DO NOT change the number of the reasons, as they may be stored in the
68 database.
68 database.
69
69
70 Changing the name of a reason is acceptable and encouraged to deprecate old
70 Changing the name of a reason is acceptable and encouraged to deprecate old
71 reasons.
71 reasons.
72 """
72 """
73
73
74 # Everything went well.
74 # Everything went well.
75 NONE = 0
75 NONE = 0
76
76
77 # An unexpected exception was raised. Check the logs for more details.
77 # An unexpected exception was raised. Check the logs for more details.
78 UNKNOWN = 1
78 UNKNOWN = 1
79
79
80 # The merge was not successful, there are conflicts.
80 # The merge was not successful, there are conflicts.
81 MERGE_FAILED = 2
81 MERGE_FAILED = 2
82
82
83 # The merge succeeded but we could not push it to the target repository.
83 # The merge succeeded but we could not push it to the target repository.
84 PUSH_FAILED = 3
84 PUSH_FAILED = 3
85
85
86 # The specified target is not a head in the target repository.
86 # The specified target is not a head in the target repository.
87 TARGET_IS_NOT_HEAD = 4
87 TARGET_IS_NOT_HEAD = 4
88
88
89 # The source repository contains more branches than the target. Pushing
89 # The source repository contains more branches than the target. Pushing
90 # the merge will create additional branches in the target.
90 # the merge will create additional branches in the target.
91 HG_SOURCE_HAS_MORE_BRANCHES = 5
91 HG_SOURCE_HAS_MORE_BRANCHES = 5
92
92
93 # The target reference has multiple heads. That does not allow to correctly
93 # The target reference has multiple heads. That does not allow to correctly
94 # identify the target location. This could only happen for mercurial
94 # identify the target location. This could only happen for mercurial
95 # branches.
95 # branches.
96 HG_TARGET_HAS_MULTIPLE_HEADS = 6
96 HG_TARGET_HAS_MULTIPLE_HEADS = 6
97
97
98 # The target repository is locked
98 # The target repository is locked
99 TARGET_IS_LOCKED = 7
99 TARGET_IS_LOCKED = 7
100
100
101 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
101 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
102 # A involved commit could not be found.
102 # A involved commit could not be found.
103 _DEPRECATED_MISSING_COMMIT = 8
103 _DEPRECATED_MISSING_COMMIT = 8
104
104
105 # The target repo reference is missing.
105 # The target repo reference is missing.
106 MISSING_TARGET_REF = 9
106 MISSING_TARGET_REF = 9
107
107
108 # The source repo reference is missing.
108 # The source repo reference is missing.
109 MISSING_SOURCE_REF = 10
109 MISSING_SOURCE_REF = 10
110
110
111 # The merge was not successful, there are conflicts related to sub
111 # The merge was not successful, there are conflicts related to sub
112 # repositories.
112 # repositories.
113 SUBREPO_MERGE_FAILED = 11
113 SUBREPO_MERGE_FAILED = 11
114
114
115
115
116 class UpdateFailureReason(object):
116 class UpdateFailureReason(object):
117 """
117 """
118 Enumeration with all the reasons why the pull request update could fail.
118 Enumeration with all the reasons why the pull request update could fail.
119
119
120 DO NOT change the number of the reasons, as they may be stored in the
120 DO NOT change the number of the reasons, as they may be stored in the
121 database.
121 database.
122
122
123 Changing the name of a reason is acceptable and encouraged to deprecate old
123 Changing the name of a reason is acceptable and encouraged to deprecate old
124 reasons.
124 reasons.
125 """
125 """
126
126
127 # Everything went well.
127 # Everything went well.
128 NONE = 0
128 NONE = 0
129
129
130 # An unexpected exception was raised. Check the logs for more details.
130 # An unexpected exception was raised. Check the logs for more details.
131 UNKNOWN = 1
131 UNKNOWN = 1
132
132
133 # The pull request is up to date.
133 # The pull request is up to date.
134 NO_CHANGE = 2
134 NO_CHANGE = 2
135
135
136 # The pull request has a reference type that is not supported for update.
136 # The pull request has a reference type that is not supported for update.
137 WRONG_REF_TYPE = 3
137 WRONG_REF_TYPE = 3
138
138
139 # Update failed because the target reference is missing.
139 # Update failed because the target reference is missing.
140 MISSING_TARGET_REF = 4
140 MISSING_TARGET_REF = 4
141
141
142 # Update failed because the source reference is missing.
142 # Update failed because the source reference is missing.
143 MISSING_SOURCE_REF = 5
143 MISSING_SOURCE_REF = 5
144
144
145
145
146 class MergeResponse(object):
146 class MergeResponse(object):
147
147
148 # uses .format(**metadata) for variables
148 # uses .format(**metadata) for variables
149 MERGE_STATUS_MESSAGES = {
149 MERGE_STATUS_MESSAGES = {
150 MergeFailureReason.NONE: lazy_ugettext(
150 MergeFailureReason.NONE: lazy_ugettext(
151 u'This pull request can be automatically merged.'),
151 u'This pull request can be automatically merged.'),
152 MergeFailureReason.UNKNOWN: lazy_ugettext(
152 MergeFailureReason.UNKNOWN: lazy_ugettext(
153 u'This pull request cannot be merged because of an unhandled exception. '
153 u'This pull request cannot be merged because of an unhandled exception. '
154 u'{exception}'),
154 u'{exception}'),
155 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
155 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
156 u'This pull request cannot be merged because of merge conflicts. {unresolved_files}'),
156 u'This pull request cannot be merged because of merge conflicts. {unresolved_files}'),
157 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
157 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
158 u'This pull request could not be merged because push to '
158 u'This pull request could not be merged because push to '
159 u'target:`{target}@{merge_commit}` failed.'),
159 u'target:`{target}@{merge_commit}` failed.'),
160 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
160 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
161 u'This pull request cannot be merged because the target '
161 u'This pull request cannot be merged because the target '
162 u'`{target_ref.name}` is not a head.'),
162 u'`{target_ref.name}` is not a head.'),
163 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
163 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
164 u'This pull request cannot be merged because the source contains '
164 u'This pull request cannot be merged because the source contains '
165 u'more branches than the target.'),
165 u'more branches than the target.'),
166 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
166 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
167 u'This pull request cannot be merged because the target `{target_ref.name}` '
167 u'This pull request cannot be merged because the target `{target_ref.name}` '
168 u'has multiple heads: `{heads}`.'),
168 u'has multiple heads: `{heads}`.'),
169 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
169 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
170 u'This pull request cannot be merged because the target repository is '
170 u'This pull request cannot be merged because the target repository is '
171 u'locked by {locked_by}.'),
171 u'locked by {locked_by}.'),
172
172
173 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
173 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
174 u'This pull request cannot be merged because the target '
174 u'This pull request cannot be merged because the target '
175 u'reference `{target_ref.name}` is missing.'),
175 u'reference `{target_ref.name}` is missing.'),
176 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
176 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
177 u'This pull request cannot be merged because the source '
177 u'This pull request cannot be merged because the source '
178 u'reference `{source_ref.name}` is missing.'),
178 u'reference `{source_ref.name}` is missing.'),
179 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
179 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
180 u'This pull request cannot be merged because of conflicts related '
180 u'This pull request cannot be merged because of conflicts related '
181 u'to sub repositories.'),
181 u'to sub repositories.'),
182
182
183 # Deprecations
183 # Deprecations
184 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
184 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
185 u'This pull request cannot be merged because the target or the '
185 u'This pull request cannot be merged because the target or the '
186 u'source reference is missing.'),
186 u'source reference is missing.'),
187
187
188 }
188 }
189
189
190 def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None):
190 def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None):
191 self.possible = possible
191 self.possible = possible
192 self.executed = executed
192 self.executed = executed
193 self.merge_ref = merge_ref
193 self.merge_ref = merge_ref
194 self.failure_reason = failure_reason
194 self.failure_reason = failure_reason
195 self.metadata = metadata or {}
195 self.metadata = metadata or {}
196
196
197 def __repr__(self):
197 def __repr__(self):
198 return '<MergeResponse:{} {}>'.format(self.label, self.failure_reason)
198 return '<MergeResponse:{} {}>'.format(self.label, self.failure_reason)
199
199
200 def __eq__(self, other):
200 def __eq__(self, other):
201 same_instance = isinstance(other, self.__class__)
201 same_instance = isinstance(other, self.__class__)
202 return same_instance \
202 return same_instance \
203 and self.possible == other.possible \
203 and self.possible == other.possible \
204 and self.executed == other.executed \
204 and self.executed == other.executed \
205 and self.failure_reason == other.failure_reason
205 and self.failure_reason == other.failure_reason
206
206
207 @property
207 @property
208 def label(self):
208 def label(self):
209 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
209 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
210 not k.startswith('_'))
210 not k.startswith('_'))
211 return label_dict.get(self.failure_reason)
211 return label_dict.get(self.failure_reason)
212
212
213 @property
213 @property
214 def merge_status_message(self):
214 def merge_status_message(self):
215 """
215 """
216 Return a human friendly error message for the given merge status code.
216 Return a human friendly error message for the given merge status code.
217 """
217 """
218 msg = safe_unicode(self.MERGE_STATUS_MESSAGES[self.failure_reason])
218 msg = safe_unicode(self.MERGE_STATUS_MESSAGES[self.failure_reason])
219
219 try:
220 try:
220 return msg.format(**self.metadata)
221 return msg.format(**self.metadata)
221 except Exception:
222 except Exception:
222 log.exception('Failed to format %s message', self)
223 log.exception('Failed to format %s message', self)
223 return msg
224 return msg
224
225
225 def asdict(self):
226 def asdict(self):
226 data = {}
227 data = {}
227 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
228 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
228 'merge_status_message']:
229 'merge_status_message']:
229 data[k] = getattr(self, k)
230 data[k] = getattr(self, k)
230 return data
231 return data
231
232
232
233
233 class BaseRepository(object):
234 class BaseRepository(object):
234 """
235 """
235 Base Repository for final backends
236 Base Repository for final backends
236
237
237 .. attribute:: DEFAULT_BRANCH_NAME
238 .. attribute:: DEFAULT_BRANCH_NAME
238
239
239 name of default branch (i.e. "trunk" for svn, "master" for git etc.
240 name of default branch (i.e. "trunk" for svn, "master" for git etc.
240
241
241 .. attribute:: commit_ids
242 .. attribute:: commit_ids
242
243
243 list of all available commit ids, in ascending order
244 list of all available commit ids, in ascending order
244
245
245 .. attribute:: path
246 .. attribute:: path
246
247
247 absolute path to the repository
248 absolute path to the repository
248
249
249 .. attribute:: bookmarks
250 .. attribute:: bookmarks
250
251
251 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
252 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
252 there are no bookmarks or the backend implementation does not support
253 there are no bookmarks or the backend implementation does not support
253 bookmarks.
254 bookmarks.
254
255
255 .. attribute:: tags
256 .. attribute:: tags
256
257
257 Mapping from name to :term:`Commit ID` of the tag.
258 Mapping from name to :term:`Commit ID` of the tag.
258
259
259 """
260 """
260
261
261 DEFAULT_BRANCH_NAME = None
262 DEFAULT_BRANCH_NAME = None
262 DEFAULT_CONTACT = u"Unknown"
263 DEFAULT_CONTACT = u"Unknown"
263 DEFAULT_DESCRIPTION = u"unknown"
264 DEFAULT_DESCRIPTION = u"unknown"
264 EMPTY_COMMIT_ID = '0' * 40
265 EMPTY_COMMIT_ID = '0' * 40
265
266
266 path = None
267 path = None
267
268
268 _is_empty = None
269 _is_empty = None
269 _commit_ids = {}
270 _commit_ids = {}
270
271
271 def __init__(self, repo_path, config=None, create=False, **kwargs):
272 def __init__(self, repo_path, config=None, create=False, **kwargs):
272 """
273 """
273 Initializes repository. Raises RepositoryError if repository could
274 Initializes repository. Raises RepositoryError if repository could
274 not be find at the given ``repo_path`` or directory at ``repo_path``
275 not be find at the given ``repo_path`` or directory at ``repo_path``
275 exists and ``create`` is set to True.
276 exists and ``create`` is set to True.
276
277
277 :param repo_path: local path of the repository
278 :param repo_path: local path of the repository
278 :param config: repository configuration
279 :param config: repository configuration
279 :param create=False: if set to True, would try to create repository.
280 :param create=False: if set to True, would try to create repository.
280 :param src_url=None: if set, should be proper url from which repository
281 :param src_url=None: if set, should be proper url from which repository
281 would be cloned; requires ``create`` parameter to be set to True -
282 would be cloned; requires ``create`` parameter to be set to True -
282 raises RepositoryError if src_url is set and create evaluates to
283 raises RepositoryError if src_url is set and create evaluates to
283 False
284 False
284 """
285 """
285 raise NotImplementedError
286 raise NotImplementedError
286
287
287 def __repr__(self):
288 def __repr__(self):
288 return '<%s at %s>' % (self.__class__.__name__, self.path)
289 return '<%s at %s>' % (self.__class__.__name__, self.path)
289
290
290 def __len__(self):
291 def __len__(self):
291 return self.count()
292 return self.count()
292
293
293 def __eq__(self, other):
294 def __eq__(self, other):
294 same_instance = isinstance(other, self.__class__)
295 same_instance = isinstance(other, self.__class__)
295 return same_instance and other.path == self.path
296 return same_instance and other.path == self.path
296
297
297 def __ne__(self, other):
298 def __ne__(self, other):
298 return not self.__eq__(other)
299 return not self.__eq__(other)
299
300
300 def get_create_shadow_cache_pr_path(self, db_repo):
301 def get_create_shadow_cache_pr_path(self, db_repo):
301 path = db_repo.cached_diffs_dir
302 path = db_repo.cached_diffs_dir
302 if not os.path.exists(path):
303 if not os.path.exists(path):
303 os.makedirs(path, 0o755)
304 os.makedirs(path, 0o755)
304 return path
305 return path
305
306
306 @classmethod
307 @classmethod
307 def get_default_config(cls, default=None):
308 def get_default_config(cls, default=None):
308 config = Config()
309 config = Config()
309 if default and isinstance(default, list):
310 if default and isinstance(default, list):
310 for section, key, val in default:
311 for section, key, val in default:
311 config.set(section, key, val)
312 config.set(section, key, val)
312 return config
313 return config
313
314
314 @LazyProperty
315 @LazyProperty
315 def _remote(self):
316 def _remote(self):
316 raise NotImplementedError
317 raise NotImplementedError
317
318
318 def _heads(self, branch=None):
319 def _heads(self, branch=None):
319 return []
320 return []
320
321
321 @LazyProperty
322 @LazyProperty
322 def EMPTY_COMMIT(self):
323 def EMPTY_COMMIT(self):
323 return EmptyCommit(self.EMPTY_COMMIT_ID)
324 return EmptyCommit(self.EMPTY_COMMIT_ID)
324
325
325 @LazyProperty
326 @LazyProperty
326 def alias(self):
327 def alias(self):
327 for k, v in settings.BACKENDS.items():
328 for k, v in settings.BACKENDS.items():
328 if v.split('.')[-1] == str(self.__class__.__name__):
329 if v.split('.')[-1] == str(self.__class__.__name__):
329 return k
330 return k
330
331
331 @LazyProperty
332 @LazyProperty
332 def name(self):
333 def name(self):
333 return safe_unicode(os.path.basename(self.path))
334 return safe_unicode(os.path.basename(self.path))
334
335
335 @LazyProperty
336 @LazyProperty
336 def description(self):
337 def description(self):
337 raise NotImplementedError
338 raise NotImplementedError
338
339
339 def refs(self):
340 def refs(self):
340 """
341 """
341 returns a `dict` with branches, bookmarks, tags, and closed_branches
342 returns a `dict` with branches, bookmarks, tags, and closed_branches
342 for this repository
343 for this repository
343 """
344 """
344 return dict(
345 return dict(
345 branches=self.branches,
346 branches=self.branches,
346 branches_closed=self.branches_closed,
347 branches_closed=self.branches_closed,
347 tags=self.tags,
348 tags=self.tags,
348 bookmarks=self.bookmarks
349 bookmarks=self.bookmarks
349 )
350 )
350
351
351 @LazyProperty
352 @LazyProperty
352 def branches(self):
353 def branches(self):
353 """
354 """
354 A `dict` which maps branch names to commit ids.
355 A `dict` which maps branch names to commit ids.
355 """
356 """
356 raise NotImplementedError
357 raise NotImplementedError
357
358
358 @LazyProperty
359 @LazyProperty
359 def branches_closed(self):
360 def branches_closed(self):
360 """
361 """
361 A `dict` which maps tags names to commit ids.
362 A `dict` which maps tags names to commit ids.
362 """
363 """
363 raise NotImplementedError
364 raise NotImplementedError
364
365
365 @LazyProperty
366 @LazyProperty
366 def bookmarks(self):
367 def bookmarks(self):
367 """
368 """
368 A `dict` which maps tags names to commit ids.
369 A `dict` which maps tags names to commit ids.
369 """
370 """
370 raise NotImplementedError
371 raise NotImplementedError
371
372
372 @LazyProperty
373 @LazyProperty
373 def tags(self):
374 def tags(self):
374 """
375 """
375 A `dict` which maps tags names to commit ids.
376 A `dict` which maps tags names to commit ids.
376 """
377 """
377 raise NotImplementedError
378 raise NotImplementedError
378
379
379 @LazyProperty
380 @LazyProperty
380 def size(self):
381 def size(self):
381 """
382 """
382 Returns combined size in bytes for all repository files
383 Returns combined size in bytes for all repository files
383 """
384 """
384 tip = self.get_commit()
385 tip = self.get_commit()
385 return tip.size
386 return tip.size
386
387
387 def size_at_commit(self, commit_id):
388 def size_at_commit(self, commit_id):
388 commit = self.get_commit(commit_id)
389 commit = self.get_commit(commit_id)
389 return commit.size
390 return commit.size
390
391
391 def _check_for_empty(self):
392 def _check_for_empty(self):
392 no_commits = len(self._commit_ids) == 0
393 no_commits = len(self._commit_ids) == 0
393 if no_commits:
394 if no_commits:
394 # check on remote to be sure
395 # check on remote to be sure
395 return self._remote.is_empty()
396 return self._remote.is_empty()
396 else:
397 else:
397 return False
398 return False
398
399
399 def is_empty(self):
400 def is_empty(self):
400 if rhodecode.is_test:
401 if rhodecode.is_test:
401 return self._check_for_empty()
402 return self._check_for_empty()
402
403
403 if self._is_empty is None:
404 if self._is_empty is None:
404 # cache empty for production, but not tests
405 # cache empty for production, but not tests
405 self._is_empty = self._check_for_empty()
406 self._is_empty = self._check_for_empty()
406
407
407 return self._is_empty
408 return self._is_empty
408
409
409 @staticmethod
410 @staticmethod
410 def check_url(url, config):
411 def check_url(url, config):
411 """
412 """
412 Function will check given url and try to verify if it's a valid
413 Function will check given url and try to verify if it's a valid
413 link.
414 link.
414 """
415 """
415 raise NotImplementedError
416 raise NotImplementedError
416
417
417 @staticmethod
418 @staticmethod
418 def is_valid_repository(path):
419 def is_valid_repository(path):
419 """
420 """
420 Check if given `path` contains a valid repository of this backend
421 Check if given `path` contains a valid repository of this backend
421 """
422 """
422 raise NotImplementedError
423 raise NotImplementedError
423
424
424 # ==========================================================================
425 # ==========================================================================
425 # COMMITS
426 # COMMITS
426 # ==========================================================================
427 # ==========================================================================
427
428
428 @CachedProperty
429 @CachedProperty
429 def commit_ids(self):
430 def commit_ids(self):
430 raise NotImplementedError
431 raise NotImplementedError
431
432
432 def append_commit_id(self, commit_id):
433 def append_commit_id(self, commit_id):
433 if commit_id not in self.commit_ids:
434 if commit_id not in self.commit_ids:
434 self._rebuild_cache(self.commit_ids + [commit_id])
435 self._rebuild_cache(self.commit_ids + [commit_id])
435
436
436 # clear cache
437 # clear cache
437 self._invalidate_prop_cache('commit_ids')
438 self._invalidate_prop_cache('commit_ids')
438 self._is_empty = False
439 self._is_empty = False
439
440
440 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
441 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
442 translate_tag=None, maybe_unreachable=False):
441 """
443 """
442 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
444 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
443 are both None, most recent commit is returned.
445 are both None, most recent commit is returned.
444
446
445 :param pre_load: Optional. List of commit attributes to load.
447 :param pre_load: Optional. List of commit attributes to load.
446
448
447 :raises ``EmptyRepositoryError``: if there are no commits
449 :raises ``EmptyRepositoryError``: if there are no commits
448 """
450 """
449 raise NotImplementedError
451 raise NotImplementedError
450
452
451 def __iter__(self):
453 def __iter__(self):
452 for commit_id in self.commit_ids:
454 for commit_id in self.commit_ids:
453 yield self.get_commit(commit_id=commit_id)
455 yield self.get_commit(commit_id=commit_id)
454
456
455 def get_commits(
457 def get_commits(
456 self, start_id=None, end_id=None, start_date=None, end_date=None,
458 self, start_id=None, end_id=None, start_date=None, end_date=None,
457 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
459 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
458 """
460 """
459 Returns iterator of `BaseCommit` objects from start to end
461 Returns iterator of `BaseCommit` objects from start to end
460 not inclusive. This should behave just like a list, ie. end is not
462 not inclusive. This should behave just like a list, ie. end is not
461 inclusive.
463 inclusive.
462
464
463 :param start_id: None or str, must be a valid commit id
465 :param start_id: None or str, must be a valid commit id
464 :param end_id: None or str, must be a valid commit id
466 :param end_id: None or str, must be a valid commit id
465 :param start_date:
467 :param start_date:
466 :param end_date:
468 :param end_date:
467 :param branch_name:
469 :param branch_name:
468 :param show_hidden:
470 :param show_hidden:
469 :param pre_load:
471 :param pre_load:
470 :param translate_tags:
472 :param translate_tags:
471 """
473 """
472 raise NotImplementedError
474 raise NotImplementedError
473
475
474 def __getitem__(self, key):
476 def __getitem__(self, key):
475 """
477 """
476 Allows index based access to the commit objects of this repository.
478 Allows index based access to the commit objects of this repository.
477 """
479 """
478 pre_load = ["author", "branch", "date", "message", "parents"]
480 pre_load = ["author", "branch", "date", "message", "parents"]
479 if isinstance(key, slice):
481 if isinstance(key, slice):
480 return self._get_range(key, pre_load)
482 return self._get_range(key, pre_load)
481 return self.get_commit(commit_idx=key, pre_load=pre_load)
483 return self.get_commit(commit_idx=key, pre_load=pre_load)
482
484
483 def _get_range(self, slice_obj, pre_load):
485 def _get_range(self, slice_obj, pre_load):
484 for commit_id in self.commit_ids.__getitem__(slice_obj):
486 for commit_id in self.commit_ids.__getitem__(slice_obj):
485 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
487 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
486
488
487 def count(self):
489 def count(self):
488 return len(self.commit_ids)
490 return len(self.commit_ids)
489
491
490 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
492 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
491 """
493 """
492 Creates and returns a tag for the given ``commit_id``.
494 Creates and returns a tag for the given ``commit_id``.
493
495
494 :param name: name for new tag
496 :param name: name for new tag
495 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
497 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
496 :param commit_id: commit id for which new tag would be created
498 :param commit_id: commit id for which new tag would be created
497 :param message: message of the tag's commit
499 :param message: message of the tag's commit
498 :param date: date of tag's commit
500 :param date: date of tag's commit
499
501
500 :raises TagAlreadyExistError: if tag with same name already exists
502 :raises TagAlreadyExistError: if tag with same name already exists
501 """
503 """
502 raise NotImplementedError
504 raise NotImplementedError
503
505
504 def remove_tag(self, name, user, message=None, date=None):
506 def remove_tag(self, name, user, message=None, date=None):
505 """
507 """
506 Removes tag with the given ``name``.
508 Removes tag with the given ``name``.
507
509
508 :param name: name of the tag to be removed
510 :param name: name of the tag to be removed
509 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
511 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
510 :param message: message of the tag's removal commit
512 :param message: message of the tag's removal commit
511 :param date: date of tag's removal commit
513 :param date: date of tag's removal commit
512
514
513 :raises TagDoesNotExistError: if tag with given name does not exists
515 :raises TagDoesNotExistError: if tag with given name does not exists
514 """
516 """
515 raise NotImplementedError
517 raise NotImplementedError
516
518
517 def get_diff(
519 def get_diff(
518 self, commit1, commit2, path=None, ignore_whitespace=False,
520 self, commit1, commit2, path=None, ignore_whitespace=False,
519 context=3, path1=None):
521 context=3, path1=None):
520 """
522 """
521 Returns (git like) *diff*, as plain text. Shows changes introduced by
523 Returns (git like) *diff*, as plain text. Shows changes introduced by
522 `commit2` since `commit1`.
524 `commit2` since `commit1`.
523
525
524 :param commit1: Entry point from which diff is shown. Can be
526 :param commit1: Entry point from which diff is shown. Can be
525 ``self.EMPTY_COMMIT`` - in this case, patch showing all
527 ``self.EMPTY_COMMIT`` - in this case, patch showing all
526 the changes since empty state of the repository until `commit2`
528 the changes since empty state of the repository until `commit2`
527 :param commit2: Until which commit changes should be shown.
529 :param commit2: Until which commit changes should be shown.
528 :param path: Can be set to a path of a file to create a diff of that
530 :param path: Can be set to a path of a file to create a diff of that
529 file. If `path1` is also set, this value is only associated to
531 file. If `path1` is also set, this value is only associated to
530 `commit2`.
532 `commit2`.
531 :param ignore_whitespace: If set to ``True``, would not show whitespace
533 :param ignore_whitespace: If set to ``True``, would not show whitespace
532 changes. Defaults to ``False``.
534 changes. Defaults to ``False``.
533 :param context: How many lines before/after changed lines should be
535 :param context: How many lines before/after changed lines should be
534 shown. Defaults to ``3``.
536 shown. Defaults to ``3``.
535 :param path1: Can be set to a path to associate with `commit1`. This
537 :param path1: Can be set to a path to associate with `commit1`. This
536 parameter works only for backends which support diff generation for
538 parameter works only for backends which support diff generation for
537 different paths. Other backends will raise a `ValueError` if `path1`
539 different paths. Other backends will raise a `ValueError` if `path1`
538 is set and has a different value than `path`.
540 is set and has a different value than `path`.
539 :param file_path: filter this diff by given path pattern
541 :param file_path: filter this diff by given path pattern
540 """
542 """
541 raise NotImplementedError
543 raise NotImplementedError
542
544
543 def strip(self, commit_id, branch=None):
545 def strip(self, commit_id, branch=None):
544 """
546 """
545 Strip given commit_id from the repository
547 Strip given commit_id from the repository
546 """
548 """
547 raise NotImplementedError
549 raise NotImplementedError
548
550
549 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
551 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
550 """
552 """
551 Return a latest common ancestor commit if one exists for this repo
553 Return a latest common ancestor commit if one exists for this repo
552 `commit_id1` vs `commit_id2` from `repo2`.
554 `commit_id1` vs `commit_id2` from `repo2`.
553
555
554 :param commit_id1: Commit it from this repository to use as a
556 :param commit_id1: Commit it from this repository to use as a
555 target for the comparison.
557 target for the comparison.
556 :param commit_id2: Source commit id to use for comparison.
558 :param commit_id2: Source commit id to use for comparison.
557 :param repo2: Source repository to use for comparison.
559 :param repo2: Source repository to use for comparison.
558 """
560 """
559 raise NotImplementedError
561 raise NotImplementedError
560
562
561 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
563 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
562 """
564 """
563 Compare this repository's revision `commit_id1` with `commit_id2`.
565 Compare this repository's revision `commit_id1` with `commit_id2`.
564
566
565 Returns a tuple(commits, ancestor) that would be merged from
567 Returns a tuple(commits, ancestor) that would be merged from
566 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
568 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
567 will be returned as ancestor.
569 will be returned as ancestor.
568
570
569 :param commit_id1: Commit it from this repository to use as a
571 :param commit_id1: Commit it from this repository to use as a
570 target for the comparison.
572 target for the comparison.
571 :param commit_id2: Source commit id to use for comparison.
573 :param commit_id2: Source commit id to use for comparison.
572 :param repo2: Source repository to use for comparison.
574 :param repo2: Source repository to use for comparison.
573 :param merge: If set to ``True`` will do a merge compare which also
575 :param merge: If set to ``True`` will do a merge compare which also
574 returns the common ancestor.
576 returns the common ancestor.
575 :param pre_load: Optional. List of commit attributes to load.
577 :param pre_load: Optional. List of commit attributes to load.
576 """
578 """
577 raise NotImplementedError
579 raise NotImplementedError
578
580
579 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
581 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
580 user_name='', user_email='', message='', dry_run=False,
582 user_name='', user_email='', message='', dry_run=False,
581 use_rebase=False, close_branch=False):
583 use_rebase=False, close_branch=False):
582 """
584 """
583 Merge the revisions specified in `source_ref` from `source_repo`
585 Merge the revisions specified in `source_ref` from `source_repo`
584 onto the `target_ref` of this repository.
586 onto the `target_ref` of this repository.
585
587
586 `source_ref` and `target_ref` are named tupls with the following
588 `source_ref` and `target_ref` are named tupls with the following
587 fields `type`, `name` and `commit_id`.
589 fields `type`, `name` and `commit_id`.
588
590
589 Returns a MergeResponse named tuple with the following fields
591 Returns a MergeResponse named tuple with the following fields
590 'possible', 'executed', 'source_commit', 'target_commit',
592 'possible', 'executed', 'source_commit', 'target_commit',
591 'merge_commit'.
593 'merge_commit'.
592
594
593 :param repo_id: `repo_id` target repo id.
595 :param repo_id: `repo_id` target repo id.
594 :param workspace_id: `workspace_id` unique identifier.
596 :param workspace_id: `workspace_id` unique identifier.
595 :param target_ref: `target_ref` points to the commit on top of which
597 :param target_ref: `target_ref` points to the commit on top of which
596 the `source_ref` should be merged.
598 the `source_ref` should be merged.
597 :param source_repo: The repository that contains the commits to be
599 :param source_repo: The repository that contains the commits to be
598 merged.
600 merged.
599 :param source_ref: `source_ref` points to the topmost commit from
601 :param source_ref: `source_ref` points to the topmost commit from
600 the `source_repo` which should be merged.
602 the `source_repo` which should be merged.
601 :param user_name: Merge commit `user_name`.
603 :param user_name: Merge commit `user_name`.
602 :param user_email: Merge commit `user_email`.
604 :param user_email: Merge commit `user_email`.
603 :param message: Merge commit `message`.
605 :param message: Merge commit `message`.
604 :param dry_run: If `True` the merge will not take place.
606 :param dry_run: If `True` the merge will not take place.
605 :param use_rebase: If `True` commits from the source will be rebased
607 :param use_rebase: If `True` commits from the source will be rebased
606 on top of the target instead of being merged.
608 on top of the target instead of being merged.
607 :param close_branch: If `True` branch will be close before merging it
609 :param close_branch: If `True` branch will be close before merging it
608 """
610 """
609 if dry_run:
611 if dry_run:
610 message = message or settings.MERGE_DRY_RUN_MESSAGE
612 message = message or settings.MERGE_DRY_RUN_MESSAGE
611 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
613 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
612 user_name = user_name or settings.MERGE_DRY_RUN_USER
614 user_name = user_name or settings.MERGE_DRY_RUN_USER
613 else:
615 else:
614 if not user_name:
616 if not user_name:
615 raise ValueError('user_name cannot be empty')
617 raise ValueError('user_name cannot be empty')
616 if not user_email:
618 if not user_email:
617 raise ValueError('user_email cannot be empty')
619 raise ValueError('user_email cannot be empty')
618 if not message:
620 if not message:
619 raise ValueError('message cannot be empty')
621 raise ValueError('message cannot be empty')
620
622
621 try:
623 try:
622 return self._merge_repo(
624 return self._merge_repo(
623 repo_id, workspace_id, target_ref, source_repo,
625 repo_id, workspace_id, target_ref, source_repo,
624 source_ref, message, user_name, user_email, dry_run=dry_run,
626 source_ref, message, user_name, user_email, dry_run=dry_run,
625 use_rebase=use_rebase, close_branch=close_branch)
627 use_rebase=use_rebase, close_branch=close_branch)
626 except RepositoryError as exc:
628 except RepositoryError as exc:
627 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
629 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
628 return MergeResponse(
630 return MergeResponse(
629 False, False, None, MergeFailureReason.UNKNOWN,
631 False, False, None, MergeFailureReason.UNKNOWN,
630 metadata={'exception': str(exc)})
632 metadata={'exception': str(exc)})
631
633
632 def _merge_repo(self, repo_id, workspace_id, target_ref,
634 def _merge_repo(self, repo_id, workspace_id, target_ref,
633 source_repo, source_ref, merge_message,
635 source_repo, source_ref, merge_message,
634 merger_name, merger_email, dry_run=False,
636 merger_name, merger_email, dry_run=False,
635 use_rebase=False, close_branch=False):
637 use_rebase=False, close_branch=False):
636 """Internal implementation of merge."""
638 """Internal implementation of merge."""
637 raise NotImplementedError
639 raise NotImplementedError
638
640
639 def _maybe_prepare_merge_workspace(
641 def _maybe_prepare_merge_workspace(
640 self, repo_id, workspace_id, target_ref, source_ref):
642 self, repo_id, workspace_id, target_ref, source_ref):
641 """
643 """
642 Create the merge workspace.
644 Create the merge workspace.
643
645
644 :param workspace_id: `workspace_id` unique identifier.
646 :param workspace_id: `workspace_id` unique identifier.
645 """
647 """
646 raise NotImplementedError
648 raise NotImplementedError
647
649
648 @classmethod
650 @classmethod
649 def _get_legacy_shadow_repository_path(cls, repo_path, workspace_id):
651 def _get_legacy_shadow_repository_path(cls, repo_path, workspace_id):
650 """
652 """
651 Legacy version that was used before. We still need it for
653 Legacy version that was used before. We still need it for
652 backward compat
654 backward compat
653 """
655 """
654 return os.path.join(
656 return os.path.join(
655 os.path.dirname(repo_path),
657 os.path.dirname(repo_path),
656 '.__shadow_%s_%s' % (os.path.basename(repo_path), workspace_id))
658 '.__shadow_%s_%s' % (os.path.basename(repo_path), workspace_id))
657
659
658 @classmethod
660 @classmethod
659 def _get_shadow_repository_path(cls, repo_path, repo_id, workspace_id):
661 def _get_shadow_repository_path(cls, repo_path, repo_id, workspace_id):
660 # The name of the shadow repository must start with '.', so it is
662 # The name of the shadow repository must start with '.', so it is
661 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
663 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
662 legacy_repository_path = cls._get_legacy_shadow_repository_path(repo_path, workspace_id)
664 legacy_repository_path = cls._get_legacy_shadow_repository_path(repo_path, workspace_id)
663 if os.path.exists(legacy_repository_path):
665 if os.path.exists(legacy_repository_path):
664 return legacy_repository_path
666 return legacy_repository_path
665 else:
667 else:
666 return os.path.join(
668 return os.path.join(
667 os.path.dirname(repo_path),
669 os.path.dirname(repo_path),
668 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
670 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
669
671
670 def cleanup_merge_workspace(self, repo_id, workspace_id):
672 def cleanup_merge_workspace(self, repo_id, workspace_id):
671 """
673 """
672 Remove merge workspace.
674 Remove merge workspace.
673
675
674 This function MUST not fail in case there is no workspace associated to
676 This function MUST not fail in case there is no workspace associated to
675 the given `workspace_id`.
677 the given `workspace_id`.
676
678
677 :param workspace_id: `workspace_id` unique identifier.
679 :param workspace_id: `workspace_id` unique identifier.
678 """
680 """
679 shadow_repository_path = self._get_shadow_repository_path(
681 shadow_repository_path = self._get_shadow_repository_path(
680 self.path, repo_id, workspace_id)
682 self.path, repo_id, workspace_id)
681 shadow_repository_path_del = '{}.{}.delete'.format(
683 shadow_repository_path_del = '{}.{}.delete'.format(
682 shadow_repository_path, time.time())
684 shadow_repository_path, time.time())
683
685
684 # move the shadow repo, so it never conflicts with the one used.
686 # move the shadow repo, so it never conflicts with the one used.
685 # we use this method because shutil.rmtree had some edge case problems
687 # we use this method because shutil.rmtree had some edge case problems
686 # removing symlinked repositories
688 # removing symlinked repositories
687 if not os.path.isdir(shadow_repository_path):
689 if not os.path.isdir(shadow_repository_path):
688 return
690 return
689
691
690 shutil.move(shadow_repository_path, shadow_repository_path_del)
692 shutil.move(shadow_repository_path, shadow_repository_path_del)
691 try:
693 try:
692 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
694 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
693 except Exception:
695 except Exception:
694 log.exception('Failed to gracefully remove shadow repo under %s',
696 log.exception('Failed to gracefully remove shadow repo under %s',
695 shadow_repository_path_del)
697 shadow_repository_path_del)
696 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
698 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
697
699
698 # ========== #
700 # ========== #
699 # COMMIT API #
701 # COMMIT API #
700 # ========== #
702 # ========== #
701
703
702 @LazyProperty
704 @LazyProperty
703 def in_memory_commit(self):
705 def in_memory_commit(self):
704 """
706 """
705 Returns :class:`InMemoryCommit` object for this repository.
707 Returns :class:`InMemoryCommit` object for this repository.
706 """
708 """
707 raise NotImplementedError
709 raise NotImplementedError
708
710
709 # ======================== #
711 # ======================== #
710 # UTILITIES FOR SUBCLASSES #
712 # UTILITIES FOR SUBCLASSES #
711 # ======================== #
713 # ======================== #
712
714
713 def _validate_diff_commits(self, commit1, commit2):
715 def _validate_diff_commits(self, commit1, commit2):
714 """
716 """
715 Validates that the given commits are related to this repository.
717 Validates that the given commits are related to this repository.
716
718
717 Intended as a utility for sub classes to have a consistent validation
719 Intended as a utility for sub classes to have a consistent validation
718 of input parameters in methods like :meth:`get_diff`.
720 of input parameters in methods like :meth:`get_diff`.
719 """
721 """
720 self._validate_commit(commit1)
722 self._validate_commit(commit1)
721 self._validate_commit(commit2)
723 self._validate_commit(commit2)
722 if (isinstance(commit1, EmptyCommit) and
724 if (isinstance(commit1, EmptyCommit) and
723 isinstance(commit2, EmptyCommit)):
725 isinstance(commit2, EmptyCommit)):
724 raise ValueError("Cannot compare two empty commits")
726 raise ValueError("Cannot compare two empty commits")
725
727
726 def _validate_commit(self, commit):
728 def _validate_commit(self, commit):
727 if not isinstance(commit, BaseCommit):
729 if not isinstance(commit, BaseCommit):
728 raise TypeError(
730 raise TypeError(
729 "%s is not of type BaseCommit" % repr(commit))
731 "%s is not of type BaseCommit" % repr(commit))
730 if commit.repository != self and not isinstance(commit, EmptyCommit):
732 if commit.repository != self and not isinstance(commit, EmptyCommit):
731 raise ValueError(
733 raise ValueError(
732 "Commit %s must be a valid commit from this repository %s, "
734 "Commit %s must be a valid commit from this repository %s, "
733 "related to this repository instead %s." %
735 "related to this repository instead %s." %
734 (commit, self, commit.repository))
736 (commit, self, commit.repository))
735
737
736 def _validate_commit_id(self, commit_id):
738 def _validate_commit_id(self, commit_id):
737 if not isinstance(commit_id, compat.string_types):
739 if not isinstance(commit_id, compat.string_types):
738 raise TypeError("commit_id must be a string value got {} instead".format(type(commit_id)))
740 raise TypeError("commit_id must be a string value got {} instead".format(type(commit_id)))
739
741
740 def _validate_commit_idx(self, commit_idx):
742 def _validate_commit_idx(self, commit_idx):
741 if not isinstance(commit_idx, (int, long)):
743 if not isinstance(commit_idx, (int, long)):
742 raise TypeError("commit_idx must be a numeric value")
744 raise TypeError("commit_idx must be a numeric value")
743
745
744 def _validate_branch_name(self, branch_name):
746 def _validate_branch_name(self, branch_name):
745 if branch_name and branch_name not in self.branches_all:
747 if branch_name and branch_name not in self.branches_all:
746 msg = ("Branch %s not found in %s" % (branch_name, self))
748 msg = ("Branch %s not found in %s" % (branch_name, self))
747 raise BranchDoesNotExistError(msg)
749 raise BranchDoesNotExistError(msg)
748
750
749 #
751 #
750 # Supporting deprecated API parts
752 # Supporting deprecated API parts
751 # TODO: johbo: consider to move this into a mixin
753 # TODO: johbo: consider to move this into a mixin
752 #
754 #
753
755
754 @property
756 @property
755 def EMPTY_CHANGESET(self):
757 def EMPTY_CHANGESET(self):
756 warnings.warn(
758 warnings.warn(
757 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
759 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
758 return self.EMPTY_COMMIT_ID
760 return self.EMPTY_COMMIT_ID
759
761
760 @property
762 @property
761 def revisions(self):
763 def revisions(self):
762 warnings.warn("Use commits attribute instead", DeprecationWarning)
764 warnings.warn("Use commits attribute instead", DeprecationWarning)
763 return self.commit_ids
765 return self.commit_ids
764
766
765 @revisions.setter
767 @revisions.setter
766 def revisions(self, value):
768 def revisions(self, value):
767 warnings.warn("Use commits attribute instead", DeprecationWarning)
769 warnings.warn("Use commits attribute instead", DeprecationWarning)
768 self.commit_ids = value
770 self.commit_ids = value
769
771
770 def get_changeset(self, revision=None, pre_load=None):
772 def get_changeset(self, revision=None, pre_load=None):
771 warnings.warn("Use get_commit instead", DeprecationWarning)
773 warnings.warn("Use get_commit instead", DeprecationWarning)
772 commit_id = None
774 commit_id = None
773 commit_idx = None
775 commit_idx = None
774 if isinstance(revision, compat.string_types):
776 if isinstance(revision, compat.string_types):
775 commit_id = revision
777 commit_id = revision
776 else:
778 else:
777 commit_idx = revision
779 commit_idx = revision
778 return self.get_commit(
780 return self.get_commit(
779 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
781 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
780
782
781 def get_changesets(
783 def get_changesets(
782 self, start=None, end=None, start_date=None, end_date=None,
784 self, start=None, end=None, start_date=None, end_date=None,
783 branch_name=None, pre_load=None):
785 branch_name=None, pre_load=None):
784 warnings.warn("Use get_commits instead", DeprecationWarning)
786 warnings.warn("Use get_commits instead", DeprecationWarning)
785 start_id = self._revision_to_commit(start)
787 start_id = self._revision_to_commit(start)
786 end_id = self._revision_to_commit(end)
788 end_id = self._revision_to_commit(end)
787 return self.get_commits(
789 return self.get_commits(
788 start_id=start_id, end_id=end_id, start_date=start_date,
790 start_id=start_id, end_id=end_id, start_date=start_date,
789 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
791 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
790
792
791 def _revision_to_commit(self, revision):
793 def _revision_to_commit(self, revision):
792 """
794 """
793 Translates a revision to a commit_id
795 Translates a revision to a commit_id
794
796
795 Helps to support the old changeset based API which allows to use
797 Helps to support the old changeset based API which allows to use
796 commit ids and commit indices interchangeable.
798 commit ids and commit indices interchangeable.
797 """
799 """
798 if revision is None:
800 if revision is None:
799 return revision
801 return revision
800
802
801 if isinstance(revision, compat.string_types):
803 if isinstance(revision, compat.string_types):
802 commit_id = revision
804 commit_id = revision
803 else:
805 else:
804 commit_id = self.commit_ids[revision]
806 commit_id = self.commit_ids[revision]
805 return commit_id
807 return commit_id
806
808
807 @property
809 @property
808 def in_memory_changeset(self):
810 def in_memory_changeset(self):
809 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
811 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
810 return self.in_memory_commit
812 return self.in_memory_commit
811
813
812 def get_path_permissions(self, username):
814 def get_path_permissions(self, username):
813 """
815 """
814 Returns a path permission checker or None if not supported
816 Returns a path permission checker or None if not supported
815
817
816 :param username: session user name
818 :param username: session user name
817 :return: an instance of BasePathPermissionChecker or None
819 :return: an instance of BasePathPermissionChecker or None
818 """
820 """
819 return None
821 return None
820
822
821 def install_hooks(self, force=False):
823 def install_hooks(self, force=False):
822 return self._remote.install_hooks(force)
824 return self._remote.install_hooks(force)
823
825
824 def get_hooks_info(self):
826 def get_hooks_info(self):
825 return self._remote.get_hooks_info()
827 return self._remote.get_hooks_info()
826
828
827
829
828 class BaseCommit(object):
830 class BaseCommit(object):
829 """
831 """
830 Each backend should implement it's commit representation.
832 Each backend should implement it's commit representation.
831
833
832 **Attributes**
834 **Attributes**
833
835
834 ``repository``
836 ``repository``
835 repository object within which commit exists
837 repository object within which commit exists
836
838
837 ``id``
839 ``id``
838 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
840 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
839 just ``tip``.
841 just ``tip``.
840
842
841 ``raw_id``
843 ``raw_id``
842 raw commit representation (i.e. full 40 length sha for git
844 raw commit representation (i.e. full 40 length sha for git
843 backend)
845 backend)
844
846
845 ``short_id``
847 ``short_id``
846 shortened (if apply) version of ``raw_id``; it would be simple
848 shortened (if apply) version of ``raw_id``; it would be simple
847 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
849 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
848 as ``raw_id`` for subversion
850 as ``raw_id`` for subversion
849
851
850 ``idx``
852 ``idx``
851 commit index
853 commit index
852
854
853 ``files``
855 ``files``
854 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
856 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
855
857
856 ``dirs``
858 ``dirs``
857 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
859 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
858
860
859 ``nodes``
861 ``nodes``
860 combined list of ``Node`` objects
862 combined list of ``Node`` objects
861
863
862 ``author``
864 ``author``
863 author of the commit, as unicode
865 author of the commit, as unicode
864
866
865 ``message``
867 ``message``
866 message of the commit, as unicode
868 message of the commit, as unicode
867
869
868 ``parents``
870 ``parents``
869 list of parent commits
871 list of parent commits
870
872
871 """
873 """
872
874
873 branch = None
875 branch = None
874 """
876 """
875 Depending on the backend this should be set to the branch name of the
877 Depending on the backend this should be set to the branch name of the
876 commit. Backends not supporting branches on commits should leave this
878 commit. Backends not supporting branches on commits should leave this
877 value as ``None``.
879 value as ``None``.
878 """
880 """
879
881
880 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
882 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
881 """
883 """
882 This template is used to generate a default prefix for repository archives
884 This template is used to generate a default prefix for repository archives
883 if no prefix has been specified.
885 if no prefix has been specified.
884 """
886 """
885
887
886 def __str__(self):
888 def __str__(self):
887 return '<%s at %s:%s>' % (
889 return '<%s at %s:%s>' % (
888 self.__class__.__name__, self.idx, self.short_id)
890 self.__class__.__name__, self.idx, self.short_id)
889
891
890 def __repr__(self):
892 def __repr__(self):
891 return self.__str__()
893 return self.__str__()
892
894
893 def __unicode__(self):
895 def __unicode__(self):
894 return u'%s:%s' % (self.idx, self.short_id)
896 return u'%s:%s' % (self.idx, self.short_id)
895
897
896 def __eq__(self, other):
898 def __eq__(self, other):
897 same_instance = isinstance(other, self.__class__)
899 same_instance = isinstance(other, self.__class__)
898 return same_instance and self.raw_id == other.raw_id
900 return same_instance and self.raw_id == other.raw_id
899
901
900 def __json__(self):
902 def __json__(self):
901 parents = []
903 parents = []
902 try:
904 try:
903 for parent in self.parents:
905 for parent in self.parents:
904 parents.append({'raw_id': parent.raw_id})
906 parents.append({'raw_id': parent.raw_id})
905 except NotImplementedError:
907 except NotImplementedError:
906 # empty commit doesn't have parents implemented
908 # empty commit doesn't have parents implemented
907 pass
909 pass
908
910
909 return {
911 return {
910 'short_id': self.short_id,
912 'short_id': self.short_id,
911 'raw_id': self.raw_id,
913 'raw_id': self.raw_id,
912 'revision': self.idx,
914 'revision': self.idx,
913 'message': self.message,
915 'message': self.message,
914 'date': self.date,
916 'date': self.date,
915 'author': self.author,
917 'author': self.author,
916 'parents': parents,
918 'parents': parents,
917 'branch': self.branch
919 'branch': self.branch
918 }
920 }
919
921
920 def __getstate__(self):
922 def __getstate__(self):
921 d = self.__dict__.copy()
923 d = self.__dict__.copy()
922 d.pop('_remote', None)
924 d.pop('_remote', None)
923 d.pop('repository', None)
925 d.pop('repository', None)
924 return d
926 return d
925
927
926 def _get_refs(self):
928 def _get_refs(self):
927 return {
929 return {
928 'branches': [self.branch] if self.branch else [],
930 'branches': [self.branch] if self.branch else [],
929 'bookmarks': getattr(self, 'bookmarks', []),
931 'bookmarks': getattr(self, 'bookmarks', []),
930 'tags': self.tags
932 'tags': self.tags
931 }
933 }
932
934
933 @LazyProperty
935 @LazyProperty
934 def last(self):
936 def last(self):
935 """
937 """
936 ``True`` if this is last commit in repository, ``False``
938 ``True`` if this is last commit in repository, ``False``
937 otherwise; trying to access this attribute while there is no
939 otherwise; trying to access this attribute while there is no
938 commits would raise `EmptyRepositoryError`
940 commits would raise `EmptyRepositoryError`
939 """
941 """
940 if self.repository is None:
942 if self.repository is None:
941 raise CommitError("Cannot check if it's most recent commit")
943 raise CommitError("Cannot check if it's most recent commit")
942 return self.raw_id == self.repository.commit_ids[-1]
944 return self.raw_id == self.repository.commit_ids[-1]
943
945
944 @LazyProperty
946 @LazyProperty
945 def parents(self):
947 def parents(self):
946 """
948 """
947 Returns list of parent commits.
949 Returns list of parent commits.
948 """
950 """
949 raise NotImplementedError
951 raise NotImplementedError
950
952
951 @LazyProperty
953 @LazyProperty
952 def first_parent(self):
954 def first_parent(self):
953 """
955 """
954 Returns list of parent commits.
956 Returns list of parent commits.
955 """
957 """
956 return self.parents[0] if self.parents else EmptyCommit()
958 return self.parents[0] if self.parents else EmptyCommit()
957
959
958 @property
960 @property
959 def merge(self):
961 def merge(self):
960 """
962 """
961 Returns boolean if commit is a merge.
963 Returns boolean if commit is a merge.
962 """
964 """
963 return len(self.parents) > 1
965 return len(self.parents) > 1
964
966
965 @LazyProperty
967 @LazyProperty
966 def children(self):
968 def children(self):
967 """
969 """
968 Returns list of child commits.
970 Returns list of child commits.
969 """
971 """
970 raise NotImplementedError
972 raise NotImplementedError
971
973
972 @LazyProperty
974 @LazyProperty
973 def id(self):
975 def id(self):
974 """
976 """
975 Returns string identifying this commit.
977 Returns string identifying this commit.
976 """
978 """
977 raise NotImplementedError
979 raise NotImplementedError
978
980
979 @LazyProperty
981 @LazyProperty
980 def raw_id(self):
982 def raw_id(self):
981 """
983 """
982 Returns raw string identifying this commit.
984 Returns raw string identifying this commit.
983 """
985 """
984 raise NotImplementedError
986 raise NotImplementedError
985
987
986 @LazyProperty
988 @LazyProperty
987 def short_id(self):
989 def short_id(self):
988 """
990 """
989 Returns shortened version of ``raw_id`` attribute, as string,
991 Returns shortened version of ``raw_id`` attribute, as string,
990 identifying this commit, useful for presentation to users.
992 identifying this commit, useful for presentation to users.
991 """
993 """
992 raise NotImplementedError
994 raise NotImplementedError
993
995
994 @LazyProperty
996 @LazyProperty
995 def idx(self):
997 def idx(self):
996 """
998 """
997 Returns integer identifying this commit.
999 Returns integer identifying this commit.
998 """
1000 """
999 raise NotImplementedError
1001 raise NotImplementedError
1000
1002
1001 @LazyProperty
1003 @LazyProperty
1002 def committer(self):
1004 def committer(self):
1003 """
1005 """
1004 Returns committer for this commit
1006 Returns committer for this commit
1005 """
1007 """
1006 raise NotImplementedError
1008 raise NotImplementedError
1007
1009
1008 @LazyProperty
1010 @LazyProperty
1009 def committer_name(self):
1011 def committer_name(self):
1010 """
1012 """
1011 Returns committer name for this commit
1013 Returns committer name for this commit
1012 """
1014 """
1013
1015
1014 return author_name(self.committer)
1016 return author_name(self.committer)
1015
1017
1016 @LazyProperty
1018 @LazyProperty
1017 def committer_email(self):
1019 def committer_email(self):
1018 """
1020 """
1019 Returns committer email address for this commit
1021 Returns committer email address for this commit
1020 """
1022 """
1021
1023
1022 return author_email(self.committer)
1024 return author_email(self.committer)
1023
1025
1024 @LazyProperty
1026 @LazyProperty
1025 def author(self):
1027 def author(self):
1026 """
1028 """
1027 Returns author for this commit
1029 Returns author for this commit
1028 """
1030 """
1029
1031
1030 raise NotImplementedError
1032 raise NotImplementedError
1031
1033
1032 @LazyProperty
1034 @LazyProperty
1033 def author_name(self):
1035 def author_name(self):
1034 """
1036 """
1035 Returns author name for this commit
1037 Returns author name for this commit
1036 """
1038 """
1037
1039
1038 return author_name(self.author)
1040 return author_name(self.author)
1039
1041
1040 @LazyProperty
1042 @LazyProperty
1041 def author_email(self):
1043 def author_email(self):
1042 """
1044 """
1043 Returns author email address for this commit
1045 Returns author email address for this commit
1044 """
1046 """
1045
1047
1046 return author_email(self.author)
1048 return author_email(self.author)
1047
1049
1048 def get_file_mode(self, path):
1050 def get_file_mode(self, path):
1049 """
1051 """
1050 Returns stat mode of the file at `path`.
1052 Returns stat mode of the file at `path`.
1051 """
1053 """
1052 raise NotImplementedError
1054 raise NotImplementedError
1053
1055
1054 def is_link(self, path):
1056 def is_link(self, path):
1055 """
1057 """
1056 Returns ``True`` if given `path` is a symlink
1058 Returns ``True`` if given `path` is a symlink
1057 """
1059 """
1058 raise NotImplementedError
1060 raise NotImplementedError
1059
1061
1060 def is_node_binary(self, path):
1062 def is_node_binary(self, path):
1061 """
1063 """
1062 Returns ``True`` is given path is a binary file
1064 Returns ``True`` is given path is a binary file
1063 """
1065 """
1064 raise NotImplementedError
1066 raise NotImplementedError
1065
1067
1066 def get_file_content(self, path):
1068 def get_file_content(self, path):
1067 """
1069 """
1068 Returns content of the file at the given `path`.
1070 Returns content of the file at the given `path`.
1069 """
1071 """
1070 raise NotImplementedError
1072 raise NotImplementedError
1071
1073
1072 def get_file_content_streamed(self, path):
1074 def get_file_content_streamed(self, path):
1073 """
1075 """
1074 returns a streaming response from vcsserver with file content
1076 returns a streaming response from vcsserver with file content
1075 """
1077 """
1076 raise NotImplementedError
1078 raise NotImplementedError
1077
1079
1078 def get_file_size(self, path):
1080 def get_file_size(self, path):
1079 """
1081 """
1080 Returns size of the file at the given `path`.
1082 Returns size of the file at the given `path`.
1081 """
1083 """
1082 raise NotImplementedError
1084 raise NotImplementedError
1083
1085
1084 def get_path_commit(self, path, pre_load=None):
1086 def get_path_commit(self, path, pre_load=None):
1085 """
1087 """
1086 Returns last commit of the file at the given `path`.
1088 Returns last commit of the file at the given `path`.
1087
1089
1088 :param pre_load: Optional. List of commit attributes to load.
1090 :param pre_load: Optional. List of commit attributes to load.
1089 """
1091 """
1090 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1092 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1091 if not commits:
1093 if not commits:
1092 raise RepositoryError(
1094 raise RepositoryError(
1093 'Failed to fetch history for path {}. '
1095 'Failed to fetch history for path {}. '
1094 'Please check if such path exists in your repository'.format(
1096 'Please check if such path exists in your repository'.format(
1095 path))
1097 path))
1096 return commits[0]
1098 return commits[0]
1097
1099
1098 def get_path_history(self, path, limit=None, pre_load=None):
1100 def get_path_history(self, path, limit=None, pre_load=None):
1099 """
1101 """
1100 Returns history of file as reversed list of :class:`BaseCommit`
1102 Returns history of file as reversed list of :class:`BaseCommit`
1101 objects for which file at given `path` has been modified.
1103 objects for which file at given `path` has been modified.
1102
1104
1103 :param limit: Optional. Allows to limit the size of the returned
1105 :param limit: Optional. Allows to limit the size of the returned
1104 history. This is intended as a hint to the underlying backend, so
1106 history. This is intended as a hint to the underlying backend, so
1105 that it can apply optimizations depending on the limit.
1107 that it can apply optimizations depending on the limit.
1106 :param pre_load: Optional. List of commit attributes to load.
1108 :param pre_load: Optional. List of commit attributes to load.
1107 """
1109 """
1108 raise NotImplementedError
1110 raise NotImplementedError
1109
1111
1110 def get_file_annotate(self, path, pre_load=None):
1112 def get_file_annotate(self, path, pre_load=None):
1111 """
1113 """
1112 Returns a generator of four element tuples with
1114 Returns a generator of four element tuples with
1113 lineno, sha, commit lazy loader and line
1115 lineno, sha, commit lazy loader and line
1114
1116
1115 :param pre_load: Optional. List of commit attributes to load.
1117 :param pre_load: Optional. List of commit attributes to load.
1116 """
1118 """
1117 raise NotImplementedError
1119 raise NotImplementedError
1118
1120
1119 def get_nodes(self, path):
1121 def get_nodes(self, path):
1120 """
1122 """
1121 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1123 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1122 state of commit at the given ``path``.
1124 state of commit at the given ``path``.
1123
1125
1124 :raises ``CommitError``: if node at the given ``path`` is not
1126 :raises ``CommitError``: if node at the given ``path`` is not
1125 instance of ``DirNode``
1127 instance of ``DirNode``
1126 """
1128 """
1127 raise NotImplementedError
1129 raise NotImplementedError
1128
1130
1129 def get_node(self, path):
1131 def get_node(self, path):
1130 """
1132 """
1131 Returns ``Node`` object from the given ``path``.
1133 Returns ``Node`` object from the given ``path``.
1132
1134
1133 :raises ``NodeDoesNotExistError``: if there is no node at the given
1135 :raises ``NodeDoesNotExistError``: if there is no node at the given
1134 ``path``
1136 ``path``
1135 """
1137 """
1136 raise NotImplementedError
1138 raise NotImplementedError
1137
1139
1138 def get_largefile_node(self, path):
1140 def get_largefile_node(self, path):
1139 """
1141 """
1140 Returns the path to largefile from Mercurial/Git-lfs storage.
1142 Returns the path to largefile from Mercurial/Git-lfs storage.
1141 or None if it's not a largefile node
1143 or None if it's not a largefile node
1142 """
1144 """
1143 return None
1145 return None
1144
1146
1145 def archive_repo(self, archive_dest_path, kind='tgz', subrepos=None,
1147 def archive_repo(self, archive_dest_path, kind='tgz', subrepos=None,
1146 prefix=None, write_metadata=False, mtime=None, archive_at_path='/'):
1148 prefix=None, write_metadata=False, mtime=None, archive_at_path='/'):
1147 """
1149 """
1148 Creates an archive containing the contents of the repository.
1150 Creates an archive containing the contents of the repository.
1149
1151
1150 :param archive_dest_path: path to the file which to create the archive.
1152 :param archive_dest_path: path to the file which to create the archive.
1151 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1153 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1152 :param prefix: name of root directory in archive.
1154 :param prefix: name of root directory in archive.
1153 Default is repository name and commit's short_id joined with dash:
1155 Default is repository name and commit's short_id joined with dash:
1154 ``"{repo_name}-{short_id}"``.
1156 ``"{repo_name}-{short_id}"``.
1155 :param write_metadata: write a metadata file into archive.
1157 :param write_metadata: write a metadata file into archive.
1156 :param mtime: custom modification time for archive creation, defaults
1158 :param mtime: custom modification time for archive creation, defaults
1157 to time.time() if not given.
1159 to time.time() if not given.
1158 :param archive_at_path: pack files at this path (default '/')
1160 :param archive_at_path: pack files at this path (default '/')
1159
1161
1160 :raise VCSError: If prefix has a problem.
1162 :raise VCSError: If prefix has a problem.
1161 """
1163 """
1162 allowed_kinds = [x[0] for x in settings.ARCHIVE_SPECS]
1164 allowed_kinds = [x[0] for x in settings.ARCHIVE_SPECS]
1163 if kind not in allowed_kinds:
1165 if kind not in allowed_kinds:
1164 raise ImproperArchiveTypeError(
1166 raise ImproperArchiveTypeError(
1165 'Archive kind (%s) not supported use one of %s' %
1167 'Archive kind (%s) not supported use one of %s' %
1166 (kind, allowed_kinds))
1168 (kind, allowed_kinds))
1167
1169
1168 prefix = self._validate_archive_prefix(prefix)
1170 prefix = self._validate_archive_prefix(prefix)
1169
1171
1170 mtime = mtime is not None or time.mktime(self.date.timetuple())
1172 mtime = mtime is not None or time.mktime(self.date.timetuple())
1171
1173
1172 file_info = []
1174 file_info = []
1173 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1175 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1174 for _r, _d, files in cur_rev.walk(archive_at_path):
1176 for _r, _d, files in cur_rev.walk(archive_at_path):
1175 for f in files:
1177 for f in files:
1176 f_path = os.path.join(prefix, f.path)
1178 f_path = os.path.join(prefix, f.path)
1177 file_info.append(
1179 file_info.append(
1178 (f_path, f.mode, f.is_link(), f.raw_bytes))
1180 (f_path, f.mode, f.is_link(), f.raw_bytes))
1179
1181
1180 if write_metadata:
1182 if write_metadata:
1181 metadata = [
1183 metadata = [
1182 ('repo_name', self.repository.name),
1184 ('repo_name', self.repository.name),
1183 ('commit_id', self.raw_id),
1185 ('commit_id', self.raw_id),
1184 ('mtime', mtime),
1186 ('mtime', mtime),
1185 ('branch', self.branch),
1187 ('branch', self.branch),
1186 ('tags', ','.join(self.tags)),
1188 ('tags', ','.join(self.tags)),
1187 ]
1189 ]
1188 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1190 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1189 file_info.append(('.archival.txt', 0o644, False, '\n'.join(meta)))
1191 file_info.append(('.archival.txt', 0o644, False, '\n'.join(meta)))
1190
1192
1191 connection.Hg.archive_repo(archive_dest_path, mtime, file_info, kind)
1193 connection.Hg.archive_repo(archive_dest_path, mtime, file_info, kind)
1192
1194
1193 def _validate_archive_prefix(self, prefix):
1195 def _validate_archive_prefix(self, prefix):
1194 if prefix is None:
1196 if prefix is None:
1195 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1197 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1196 repo_name=safe_str(self.repository.name),
1198 repo_name=safe_str(self.repository.name),
1197 short_id=self.short_id)
1199 short_id=self.short_id)
1198 elif not isinstance(prefix, str):
1200 elif not isinstance(prefix, str):
1199 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1201 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1200 elif prefix.startswith('/'):
1202 elif prefix.startswith('/'):
1201 raise VCSError("Prefix cannot start with leading slash")
1203 raise VCSError("Prefix cannot start with leading slash")
1202 elif prefix.strip() == '':
1204 elif prefix.strip() == '':
1203 raise VCSError("Prefix cannot be empty")
1205 raise VCSError("Prefix cannot be empty")
1204 return prefix
1206 return prefix
1205
1207
1206 @LazyProperty
1208 @LazyProperty
1207 def root(self):
1209 def root(self):
1208 """
1210 """
1209 Returns ``RootNode`` object for this commit.
1211 Returns ``RootNode`` object for this commit.
1210 """
1212 """
1211 return self.get_node('')
1213 return self.get_node('')
1212
1214
1213 def next(self, branch=None):
1215 def next(self, branch=None):
1214 """
1216 """
1215 Returns next commit from current, if branch is gives it will return
1217 Returns next commit from current, if branch is gives it will return
1216 next commit belonging to this branch
1218 next commit belonging to this branch
1217
1219
1218 :param branch: show commits within the given named branch
1220 :param branch: show commits within the given named branch
1219 """
1221 """
1220 indexes = xrange(self.idx + 1, self.repository.count())
1222 indexes = xrange(self.idx + 1, self.repository.count())
1221 return self._find_next(indexes, branch)
1223 return self._find_next(indexes, branch)
1222
1224
1223 def prev(self, branch=None):
1225 def prev(self, branch=None):
1224 """
1226 """
1225 Returns previous commit from current, if branch is gives it will
1227 Returns previous commit from current, if branch is gives it will
1226 return previous commit belonging to this branch
1228 return previous commit belonging to this branch
1227
1229
1228 :param branch: show commit within the given named branch
1230 :param branch: show commit within the given named branch
1229 """
1231 """
1230 indexes = xrange(self.idx - 1, -1, -1)
1232 indexes = xrange(self.idx - 1, -1, -1)
1231 return self._find_next(indexes, branch)
1233 return self._find_next(indexes, branch)
1232
1234
1233 def _find_next(self, indexes, branch=None):
1235 def _find_next(self, indexes, branch=None):
1234 if branch and self.branch != branch:
1236 if branch and self.branch != branch:
1235 raise VCSError('Branch option used on commit not belonging '
1237 raise VCSError('Branch option used on commit not belonging '
1236 'to that branch')
1238 'to that branch')
1237
1239
1238 for next_idx in indexes:
1240 for next_idx in indexes:
1239 commit = self.repository.get_commit(commit_idx=next_idx)
1241 commit = self.repository.get_commit(commit_idx=next_idx)
1240 if branch and branch != commit.branch:
1242 if branch and branch != commit.branch:
1241 continue
1243 continue
1242 return commit
1244 return commit
1243 raise CommitDoesNotExistError
1245 raise CommitDoesNotExistError
1244
1246
1245 def diff(self, ignore_whitespace=True, context=3):
1247 def diff(self, ignore_whitespace=True, context=3):
1246 """
1248 """
1247 Returns a `Diff` object representing the change made by this commit.
1249 Returns a `Diff` object representing the change made by this commit.
1248 """
1250 """
1249 parent = self.first_parent
1251 parent = self.first_parent
1250 diff = self.repository.get_diff(
1252 diff = self.repository.get_diff(
1251 parent, self,
1253 parent, self,
1252 ignore_whitespace=ignore_whitespace,
1254 ignore_whitespace=ignore_whitespace,
1253 context=context)
1255 context=context)
1254 return diff
1256 return diff
1255
1257
1256 @LazyProperty
1258 @LazyProperty
1257 def added(self):
1259 def added(self):
1258 """
1260 """
1259 Returns list of added ``FileNode`` objects.
1261 Returns list of added ``FileNode`` objects.
1260 """
1262 """
1261 raise NotImplementedError
1263 raise NotImplementedError
1262
1264
1263 @LazyProperty
1265 @LazyProperty
1264 def changed(self):
1266 def changed(self):
1265 """
1267 """
1266 Returns list of modified ``FileNode`` objects.
1268 Returns list of modified ``FileNode`` objects.
1267 """
1269 """
1268 raise NotImplementedError
1270 raise NotImplementedError
1269
1271
1270 @LazyProperty
1272 @LazyProperty
1271 def removed(self):
1273 def removed(self):
1272 """
1274 """
1273 Returns list of removed ``FileNode`` objects.
1275 Returns list of removed ``FileNode`` objects.
1274 """
1276 """
1275 raise NotImplementedError
1277 raise NotImplementedError
1276
1278
1277 @LazyProperty
1279 @LazyProperty
1278 def size(self):
1280 def size(self):
1279 """
1281 """
1280 Returns total number of bytes from contents of all filenodes.
1282 Returns total number of bytes from contents of all filenodes.
1281 """
1283 """
1282 return sum((node.size for node in self.get_filenodes_generator()))
1284 return sum((node.size for node in self.get_filenodes_generator()))
1283
1285
1284 def walk(self, topurl=''):
1286 def walk(self, topurl=''):
1285 """
1287 """
1286 Similar to os.walk method. Insted of filesystem it walks through
1288 Similar to os.walk method. Insted of filesystem it walks through
1287 commit starting at given ``topurl``. Returns generator of tuples
1289 commit starting at given ``topurl``. Returns generator of tuples
1288 (topnode, dirnodes, filenodes).
1290 (topnode, dirnodes, filenodes).
1289 """
1291 """
1290 topnode = self.get_node(topurl)
1292 topnode = self.get_node(topurl)
1291 if not topnode.is_dir():
1293 if not topnode.is_dir():
1292 return
1294 return
1293 yield (topnode, topnode.dirs, topnode.files)
1295 yield (topnode, topnode.dirs, topnode.files)
1294 for dirnode in topnode.dirs:
1296 for dirnode in topnode.dirs:
1295 for tup in self.walk(dirnode.path):
1297 for tup in self.walk(dirnode.path):
1296 yield tup
1298 yield tup
1297
1299
1298 def get_filenodes_generator(self):
1300 def get_filenodes_generator(self):
1299 """
1301 """
1300 Returns generator that yields *all* file nodes.
1302 Returns generator that yields *all* file nodes.
1301 """
1303 """
1302 for topnode, dirs, files in self.walk():
1304 for topnode, dirs, files in self.walk():
1303 for node in files:
1305 for node in files:
1304 yield node
1306 yield node
1305
1307
1306 #
1308 #
1307 # Utilities for sub classes to support consistent behavior
1309 # Utilities for sub classes to support consistent behavior
1308 #
1310 #
1309
1311
1310 def no_node_at_path(self, path):
1312 def no_node_at_path(self, path):
1311 return NodeDoesNotExistError(
1313 return NodeDoesNotExistError(
1312 u"There is no file nor directory at the given path: "
1314 u"There is no file nor directory at the given path: "
1313 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1315 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1314
1316
1315 def _fix_path(self, path):
1317 def _fix_path(self, path):
1316 """
1318 """
1317 Paths are stored without trailing slash so we need to get rid off it if
1319 Paths are stored without trailing slash so we need to get rid off it if
1318 needed.
1320 needed.
1319 """
1321 """
1320 return path.rstrip('/')
1322 return path.rstrip('/')
1321
1323
1322 #
1324 #
1323 # Deprecated API based on changesets
1325 # Deprecated API based on changesets
1324 #
1326 #
1325
1327
1326 @property
1328 @property
1327 def revision(self):
1329 def revision(self):
1328 warnings.warn("Use idx instead", DeprecationWarning)
1330 warnings.warn("Use idx instead", DeprecationWarning)
1329 return self.idx
1331 return self.idx
1330
1332
1331 @revision.setter
1333 @revision.setter
1332 def revision(self, value):
1334 def revision(self, value):
1333 warnings.warn("Use idx instead", DeprecationWarning)
1335 warnings.warn("Use idx instead", DeprecationWarning)
1334 self.idx = value
1336 self.idx = value
1335
1337
1336 def get_file_changeset(self, path):
1338 def get_file_changeset(self, path):
1337 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1339 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1338 return self.get_path_commit(path)
1340 return self.get_path_commit(path)
1339
1341
1340
1342
1341 class BaseChangesetClass(type):
1343 class BaseChangesetClass(type):
1342
1344
1343 def __instancecheck__(self, instance):
1345 def __instancecheck__(self, instance):
1344 return isinstance(instance, BaseCommit)
1346 return isinstance(instance, BaseCommit)
1345
1347
1346
1348
1347 class BaseChangeset(BaseCommit):
1349 class BaseChangeset(BaseCommit):
1348
1350
1349 __metaclass__ = BaseChangesetClass
1351 __metaclass__ = BaseChangesetClass
1350
1352
1351 def __new__(cls, *args, **kwargs):
1353 def __new__(cls, *args, **kwargs):
1352 warnings.warn(
1354 warnings.warn(
1353 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1355 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1354 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1356 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1355
1357
1356
1358
1357 class BaseInMemoryCommit(object):
1359 class BaseInMemoryCommit(object):
1358 """
1360 """
1359 Represents differences between repository's state (most recent head) and
1361 Represents differences between repository's state (most recent head) and
1360 changes made *in place*.
1362 changes made *in place*.
1361
1363
1362 **Attributes**
1364 **Attributes**
1363
1365
1364 ``repository``
1366 ``repository``
1365 repository object for this in-memory-commit
1367 repository object for this in-memory-commit
1366
1368
1367 ``added``
1369 ``added``
1368 list of ``FileNode`` objects marked as *added*
1370 list of ``FileNode`` objects marked as *added*
1369
1371
1370 ``changed``
1372 ``changed``
1371 list of ``FileNode`` objects marked as *changed*
1373 list of ``FileNode`` objects marked as *changed*
1372
1374
1373 ``removed``
1375 ``removed``
1374 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1376 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1375 *removed*
1377 *removed*
1376
1378
1377 ``parents``
1379 ``parents``
1378 list of :class:`BaseCommit` instances representing parents of
1380 list of :class:`BaseCommit` instances representing parents of
1379 in-memory commit. Should always be 2-element sequence.
1381 in-memory commit. Should always be 2-element sequence.
1380
1382
1381 """
1383 """
1382
1384
1383 def __init__(self, repository):
1385 def __init__(self, repository):
1384 self.repository = repository
1386 self.repository = repository
1385 self.added = []
1387 self.added = []
1386 self.changed = []
1388 self.changed = []
1387 self.removed = []
1389 self.removed = []
1388 self.parents = []
1390 self.parents = []
1389
1391
1390 def add(self, *filenodes):
1392 def add(self, *filenodes):
1391 """
1393 """
1392 Marks given ``FileNode`` objects as *to be committed*.
1394 Marks given ``FileNode`` objects as *to be committed*.
1393
1395
1394 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1396 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1395 latest commit
1397 latest commit
1396 :raises ``NodeAlreadyAddedError``: if node with same path is already
1398 :raises ``NodeAlreadyAddedError``: if node with same path is already
1397 marked as *added*
1399 marked as *added*
1398 """
1400 """
1399 # Check if not already marked as *added* first
1401 # Check if not already marked as *added* first
1400 for node in filenodes:
1402 for node in filenodes:
1401 if node.path in (n.path for n in self.added):
1403 if node.path in (n.path for n in self.added):
1402 raise NodeAlreadyAddedError(
1404 raise NodeAlreadyAddedError(
1403 "Such FileNode %s is already marked for addition"
1405 "Such FileNode %s is already marked for addition"
1404 % node.path)
1406 % node.path)
1405 for node in filenodes:
1407 for node in filenodes:
1406 self.added.append(node)
1408 self.added.append(node)
1407
1409
1408 def change(self, *filenodes):
1410 def change(self, *filenodes):
1409 """
1411 """
1410 Marks given ``FileNode`` objects to be *changed* in next commit.
1412 Marks given ``FileNode`` objects to be *changed* in next commit.
1411
1413
1412 :raises ``EmptyRepositoryError``: if there are no commits yet
1414 :raises ``EmptyRepositoryError``: if there are no commits yet
1413 :raises ``NodeAlreadyExistsError``: if node with same path is already
1415 :raises ``NodeAlreadyExistsError``: if node with same path is already
1414 marked to be *changed*
1416 marked to be *changed*
1415 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1417 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1416 marked to be *removed*
1418 marked to be *removed*
1417 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1419 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1418 commit
1420 commit
1419 :raises ``NodeNotChangedError``: if node hasn't really be changed
1421 :raises ``NodeNotChangedError``: if node hasn't really be changed
1420 """
1422 """
1421 for node in filenodes:
1423 for node in filenodes:
1422 if node.path in (n.path for n in self.removed):
1424 if node.path in (n.path for n in self.removed):
1423 raise NodeAlreadyRemovedError(
1425 raise NodeAlreadyRemovedError(
1424 "Node at %s is already marked as removed" % node.path)
1426 "Node at %s is already marked as removed" % node.path)
1425 try:
1427 try:
1426 self.repository.get_commit()
1428 self.repository.get_commit()
1427 except EmptyRepositoryError:
1429 except EmptyRepositoryError:
1428 raise EmptyRepositoryError(
1430 raise EmptyRepositoryError(
1429 "Nothing to change - try to *add* new nodes rather than "
1431 "Nothing to change - try to *add* new nodes rather than "
1430 "changing them")
1432 "changing them")
1431 for node in filenodes:
1433 for node in filenodes:
1432 if node.path in (n.path for n in self.changed):
1434 if node.path in (n.path for n in self.changed):
1433 raise NodeAlreadyChangedError(
1435 raise NodeAlreadyChangedError(
1434 "Node at '%s' is already marked as changed" % node.path)
1436 "Node at '%s' is already marked as changed" % node.path)
1435 self.changed.append(node)
1437 self.changed.append(node)
1436
1438
1437 def remove(self, *filenodes):
1439 def remove(self, *filenodes):
1438 """
1440 """
1439 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1441 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1440 *removed* in next commit.
1442 *removed* in next commit.
1441
1443
1442 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1444 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1443 be *removed*
1445 be *removed*
1444 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1446 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1445 be *changed*
1447 be *changed*
1446 """
1448 """
1447 for node in filenodes:
1449 for node in filenodes:
1448 if node.path in (n.path for n in self.removed):
1450 if node.path in (n.path for n in self.removed):
1449 raise NodeAlreadyRemovedError(
1451 raise NodeAlreadyRemovedError(
1450 "Node is already marked to for removal at %s" % node.path)
1452 "Node is already marked to for removal at %s" % node.path)
1451 if node.path in (n.path for n in self.changed):
1453 if node.path in (n.path for n in self.changed):
1452 raise NodeAlreadyChangedError(
1454 raise NodeAlreadyChangedError(
1453 "Node is already marked to be changed at %s" % node.path)
1455 "Node is already marked to be changed at %s" % node.path)
1454 # We only mark node as *removed* - real removal is done by
1456 # We only mark node as *removed* - real removal is done by
1455 # commit method
1457 # commit method
1456 self.removed.append(node)
1458 self.removed.append(node)
1457
1459
1458 def reset(self):
1460 def reset(self):
1459 """
1461 """
1460 Resets this instance to initial state (cleans ``added``, ``changed``
1462 Resets this instance to initial state (cleans ``added``, ``changed``
1461 and ``removed`` lists).
1463 and ``removed`` lists).
1462 """
1464 """
1463 self.added = []
1465 self.added = []
1464 self.changed = []
1466 self.changed = []
1465 self.removed = []
1467 self.removed = []
1466 self.parents = []
1468 self.parents = []
1467
1469
1468 def get_ipaths(self):
1470 def get_ipaths(self):
1469 """
1471 """
1470 Returns generator of paths from nodes marked as added, changed or
1472 Returns generator of paths from nodes marked as added, changed or
1471 removed.
1473 removed.
1472 """
1474 """
1473 for node in itertools.chain(self.added, self.changed, self.removed):
1475 for node in itertools.chain(self.added, self.changed, self.removed):
1474 yield node.path
1476 yield node.path
1475
1477
1476 def get_paths(self):
1478 def get_paths(self):
1477 """
1479 """
1478 Returns list of paths from nodes marked as added, changed or removed.
1480 Returns list of paths from nodes marked as added, changed or removed.
1479 """
1481 """
1480 return list(self.get_ipaths())
1482 return list(self.get_ipaths())
1481
1483
1482 def check_integrity(self, parents=None):
1484 def check_integrity(self, parents=None):
1483 """
1485 """
1484 Checks in-memory commit's integrity. Also, sets parents if not
1486 Checks in-memory commit's integrity. Also, sets parents if not
1485 already set.
1487 already set.
1486
1488
1487 :raises CommitError: if any error occurs (i.e.
1489 :raises CommitError: if any error occurs (i.e.
1488 ``NodeDoesNotExistError``).
1490 ``NodeDoesNotExistError``).
1489 """
1491 """
1490 if not self.parents:
1492 if not self.parents:
1491 parents = parents or []
1493 parents = parents or []
1492 if len(parents) == 0:
1494 if len(parents) == 0:
1493 try:
1495 try:
1494 parents = [self.repository.get_commit(), None]
1496 parents = [self.repository.get_commit(), None]
1495 except EmptyRepositoryError:
1497 except EmptyRepositoryError:
1496 parents = [None, None]
1498 parents = [None, None]
1497 elif len(parents) == 1:
1499 elif len(parents) == 1:
1498 parents += [None]
1500 parents += [None]
1499 self.parents = parents
1501 self.parents = parents
1500
1502
1501 # Local parents, only if not None
1503 # Local parents, only if not None
1502 parents = [p for p in self.parents if p]
1504 parents = [p for p in self.parents if p]
1503
1505
1504 # Check nodes marked as added
1506 # Check nodes marked as added
1505 for p in parents:
1507 for p in parents:
1506 for node in self.added:
1508 for node in self.added:
1507 try:
1509 try:
1508 p.get_node(node.path)
1510 p.get_node(node.path)
1509 except NodeDoesNotExistError:
1511 except NodeDoesNotExistError:
1510 pass
1512 pass
1511 else:
1513 else:
1512 raise NodeAlreadyExistsError(
1514 raise NodeAlreadyExistsError(
1513 "Node `%s` already exists at %s" % (node.path, p))
1515 "Node `%s` already exists at %s" % (node.path, p))
1514
1516
1515 # Check nodes marked as changed
1517 # Check nodes marked as changed
1516 missing = set(self.changed)
1518 missing = set(self.changed)
1517 not_changed = set(self.changed)
1519 not_changed = set(self.changed)
1518 if self.changed and not parents:
1520 if self.changed and not parents:
1519 raise NodeDoesNotExistError(str(self.changed[0].path))
1521 raise NodeDoesNotExistError(str(self.changed[0].path))
1520 for p in parents:
1522 for p in parents:
1521 for node in self.changed:
1523 for node in self.changed:
1522 try:
1524 try:
1523 old = p.get_node(node.path)
1525 old = p.get_node(node.path)
1524 missing.remove(node)
1526 missing.remove(node)
1525 # if content actually changed, remove node from not_changed
1527 # if content actually changed, remove node from not_changed
1526 if old.content != node.content:
1528 if old.content != node.content:
1527 not_changed.remove(node)
1529 not_changed.remove(node)
1528 except NodeDoesNotExistError:
1530 except NodeDoesNotExistError:
1529 pass
1531 pass
1530 if self.changed and missing:
1532 if self.changed and missing:
1531 raise NodeDoesNotExistError(
1533 raise NodeDoesNotExistError(
1532 "Node `%s` marked as modified but missing in parents: %s"
1534 "Node `%s` marked as modified but missing in parents: %s"
1533 % (node.path, parents))
1535 % (node.path, parents))
1534
1536
1535 if self.changed and not_changed:
1537 if self.changed and not_changed:
1536 raise NodeNotChangedError(
1538 raise NodeNotChangedError(
1537 "Node `%s` wasn't actually changed (parents: %s)"
1539 "Node `%s` wasn't actually changed (parents: %s)"
1538 % (not_changed.pop().path, parents))
1540 % (not_changed.pop().path, parents))
1539
1541
1540 # Check nodes marked as removed
1542 # Check nodes marked as removed
1541 if self.removed and not parents:
1543 if self.removed and not parents:
1542 raise NodeDoesNotExistError(
1544 raise NodeDoesNotExistError(
1543 "Cannot remove node at %s as there "
1545 "Cannot remove node at %s as there "
1544 "were no parents specified" % self.removed[0].path)
1546 "were no parents specified" % self.removed[0].path)
1545 really_removed = set()
1547 really_removed = set()
1546 for p in parents:
1548 for p in parents:
1547 for node in self.removed:
1549 for node in self.removed:
1548 try:
1550 try:
1549 p.get_node(node.path)
1551 p.get_node(node.path)
1550 really_removed.add(node)
1552 really_removed.add(node)
1551 except CommitError:
1553 except CommitError:
1552 pass
1554 pass
1553 not_removed = set(self.removed) - really_removed
1555 not_removed = set(self.removed) - really_removed
1554 if not_removed:
1556 if not_removed:
1555 # TODO: johbo: This code branch does not seem to be covered
1557 # TODO: johbo: This code branch does not seem to be covered
1556 raise NodeDoesNotExistError(
1558 raise NodeDoesNotExistError(
1557 "Cannot remove node at %s from "
1559 "Cannot remove node at %s from "
1558 "following parents: %s" % (not_removed, parents))
1560 "following parents: %s" % (not_removed, parents))
1559
1561
1560 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
1562 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
1561 """
1563 """
1562 Performs in-memory commit (doesn't check workdir in any way) and
1564 Performs in-memory commit (doesn't check workdir in any way) and
1563 returns newly created :class:`BaseCommit`. Updates repository's
1565 returns newly created :class:`BaseCommit`. Updates repository's
1564 attribute `commits`.
1566 attribute `commits`.
1565
1567
1566 .. note::
1568 .. note::
1567
1569
1568 While overriding this method each backend's should call
1570 While overriding this method each backend's should call
1569 ``self.check_integrity(parents)`` in the first place.
1571 ``self.check_integrity(parents)`` in the first place.
1570
1572
1571 :param message: message of the commit
1573 :param message: message of the commit
1572 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1574 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1573 :param parents: single parent or sequence of parents from which commit
1575 :param parents: single parent or sequence of parents from which commit
1574 would be derived
1576 would be derived
1575 :param date: ``datetime.datetime`` instance. Defaults to
1577 :param date: ``datetime.datetime`` instance. Defaults to
1576 ``datetime.datetime.now()``.
1578 ``datetime.datetime.now()``.
1577 :param branch: branch name, as string. If none given, default backend's
1579 :param branch: branch name, as string. If none given, default backend's
1578 branch would be used.
1580 branch would be used.
1579
1581
1580 :raises ``CommitError``: if any error occurs while committing
1582 :raises ``CommitError``: if any error occurs while committing
1581 """
1583 """
1582 raise NotImplementedError
1584 raise NotImplementedError
1583
1585
1584
1586
1585 class BaseInMemoryChangesetClass(type):
1587 class BaseInMemoryChangesetClass(type):
1586
1588
1587 def __instancecheck__(self, instance):
1589 def __instancecheck__(self, instance):
1588 return isinstance(instance, BaseInMemoryCommit)
1590 return isinstance(instance, BaseInMemoryCommit)
1589
1591
1590
1592
1591 class BaseInMemoryChangeset(BaseInMemoryCommit):
1593 class BaseInMemoryChangeset(BaseInMemoryCommit):
1592
1594
1593 __metaclass__ = BaseInMemoryChangesetClass
1595 __metaclass__ = BaseInMemoryChangesetClass
1594
1596
1595 def __new__(cls, *args, **kwargs):
1597 def __new__(cls, *args, **kwargs):
1596 warnings.warn(
1598 warnings.warn(
1597 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1599 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1598 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1600 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1599
1601
1600
1602
1601 class EmptyCommit(BaseCommit):
1603 class EmptyCommit(BaseCommit):
1602 """
1604 """
1603 An dummy empty commit. It's possible to pass hash when creating
1605 An dummy empty commit. It's possible to pass hash when creating
1604 an EmptyCommit
1606 an EmptyCommit
1605 """
1607 """
1606
1608
1607 def __init__(
1609 def __init__(
1608 self, commit_id=EMPTY_COMMIT_ID, repo=None, alias=None, idx=-1,
1610 self, commit_id=EMPTY_COMMIT_ID, repo=None, alias=None, idx=-1,
1609 message='', author='', date=None):
1611 message='', author='', date=None):
1610 self._empty_commit_id = commit_id
1612 self._empty_commit_id = commit_id
1611 # TODO: johbo: Solve idx parameter, default value does not make
1613 # TODO: johbo: Solve idx parameter, default value does not make
1612 # too much sense
1614 # too much sense
1613 self.idx = idx
1615 self.idx = idx
1614 self.message = message
1616 self.message = message
1615 self.author = author
1617 self.author = author
1616 self.date = date or datetime.datetime.fromtimestamp(0)
1618 self.date = date or datetime.datetime.fromtimestamp(0)
1617 self.repository = repo
1619 self.repository = repo
1618 self.alias = alias
1620 self.alias = alias
1619
1621
1620 @LazyProperty
1622 @LazyProperty
1621 def raw_id(self):
1623 def raw_id(self):
1622 """
1624 """
1623 Returns raw string identifying this commit, useful for web
1625 Returns raw string identifying this commit, useful for web
1624 representation.
1626 representation.
1625 """
1627 """
1626
1628
1627 return self._empty_commit_id
1629 return self._empty_commit_id
1628
1630
1629 @LazyProperty
1631 @LazyProperty
1630 def branch(self):
1632 def branch(self):
1631 if self.alias:
1633 if self.alias:
1632 from rhodecode.lib.vcs.backends import get_backend
1634 from rhodecode.lib.vcs.backends import get_backend
1633 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1635 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1634
1636
1635 @LazyProperty
1637 @LazyProperty
1636 def short_id(self):
1638 def short_id(self):
1637 return self.raw_id[:12]
1639 return self.raw_id[:12]
1638
1640
1639 @LazyProperty
1641 @LazyProperty
1640 def id(self):
1642 def id(self):
1641 return self.raw_id
1643 return self.raw_id
1642
1644
1643 def get_path_commit(self, path):
1645 def get_path_commit(self, path):
1644 return self
1646 return self
1645
1647
1646 def get_file_content(self, path):
1648 def get_file_content(self, path):
1647 return u''
1649 return u''
1648
1650
1649 def get_file_content_streamed(self, path):
1651 def get_file_content_streamed(self, path):
1650 yield self.get_file_content()
1652 yield self.get_file_content()
1651
1653
1652 def get_file_size(self, path):
1654 def get_file_size(self, path):
1653 return 0
1655 return 0
1654
1656
1655
1657
1656 class EmptyChangesetClass(type):
1658 class EmptyChangesetClass(type):
1657
1659
1658 def __instancecheck__(self, instance):
1660 def __instancecheck__(self, instance):
1659 return isinstance(instance, EmptyCommit)
1661 return isinstance(instance, EmptyCommit)
1660
1662
1661
1663
1662 class EmptyChangeset(EmptyCommit):
1664 class EmptyChangeset(EmptyCommit):
1663
1665
1664 __metaclass__ = EmptyChangesetClass
1666 __metaclass__ = EmptyChangesetClass
1665
1667
1666 def __new__(cls, *args, **kwargs):
1668 def __new__(cls, *args, **kwargs):
1667 warnings.warn(
1669 warnings.warn(
1668 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1670 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1669 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1671 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1670
1672
1671 def __init__(self, cs=EMPTY_COMMIT_ID, repo=None, requested_revision=None,
1673 def __init__(self, cs=EMPTY_COMMIT_ID, repo=None, requested_revision=None,
1672 alias=None, revision=-1, message='', author='', date=None):
1674 alias=None, revision=-1, message='', author='', date=None):
1673 if requested_revision is not None:
1675 if requested_revision is not None:
1674 warnings.warn(
1676 warnings.warn(
1675 "Parameter requested_revision not supported anymore",
1677 "Parameter requested_revision not supported anymore",
1676 DeprecationWarning)
1678 DeprecationWarning)
1677 super(EmptyChangeset, self).__init__(
1679 super(EmptyChangeset, self).__init__(
1678 commit_id=cs, repo=repo, alias=alias, idx=revision,
1680 commit_id=cs, repo=repo, alias=alias, idx=revision,
1679 message=message, author=author, date=date)
1681 message=message, author=author, date=date)
1680
1682
1681 @property
1683 @property
1682 def revision(self):
1684 def revision(self):
1683 warnings.warn("Use idx instead", DeprecationWarning)
1685 warnings.warn("Use idx instead", DeprecationWarning)
1684 return self.idx
1686 return self.idx
1685
1687
1686 @revision.setter
1688 @revision.setter
1687 def revision(self, value):
1689 def revision(self, value):
1688 warnings.warn("Use idx instead", DeprecationWarning)
1690 warnings.warn("Use idx instead", DeprecationWarning)
1689 self.idx = value
1691 self.idx = value
1690
1692
1691
1693
1692 class EmptyRepository(BaseRepository):
1694 class EmptyRepository(BaseRepository):
1693 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1695 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1694 pass
1696 pass
1695
1697
1696 def get_diff(self, *args, **kwargs):
1698 def get_diff(self, *args, **kwargs):
1697 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1699 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1698 return GitDiff('')
1700 return GitDiff('')
1699
1701
1700
1702
1701 class CollectionGenerator(object):
1703 class CollectionGenerator(object):
1702
1704
1703 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1705 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1704 self.repo = repo
1706 self.repo = repo
1705 self.commit_ids = commit_ids
1707 self.commit_ids = commit_ids
1706 # TODO: (oliver) this isn't currently hooked up
1708 # TODO: (oliver) this isn't currently hooked up
1707 self.collection_size = None
1709 self.collection_size = None
1708 self.pre_load = pre_load
1710 self.pre_load = pre_load
1709 self.translate_tag = translate_tag
1711 self.translate_tag = translate_tag
1710
1712
1711 def __len__(self):
1713 def __len__(self):
1712 if self.collection_size is not None:
1714 if self.collection_size is not None:
1713 return self.collection_size
1715 return self.collection_size
1714 return self.commit_ids.__len__()
1716 return self.commit_ids.__len__()
1715
1717
1716 def __iter__(self):
1718 def __iter__(self):
1717 for commit_id in self.commit_ids:
1719 for commit_id in self.commit_ids:
1718 # TODO: johbo: Mercurial passes in commit indices or commit ids
1720 # TODO: johbo: Mercurial passes in commit indices or commit ids
1719 yield self._commit_factory(commit_id)
1721 yield self._commit_factory(commit_id)
1720
1722
1721 def _commit_factory(self, commit_id):
1723 def _commit_factory(self, commit_id):
1722 """
1724 """
1723 Allows backends to override the way commits are generated.
1725 Allows backends to override the way commits are generated.
1724 """
1726 """
1725 return self.repo.get_commit(
1727 return self.repo.get_commit(
1726 commit_id=commit_id, pre_load=self.pre_load,
1728 commit_id=commit_id, pre_load=self.pre_load,
1727 translate_tag=self.translate_tag)
1729 translate_tag=self.translate_tag)
1728
1730
1729 def __getslice__(self, i, j):
1731 def __getslice__(self, i, j):
1730 """
1732 """
1731 Returns an iterator of sliced repository
1733 Returns an iterator of sliced repository
1732 """
1734 """
1733 commit_ids = self.commit_ids[i:j]
1735 commit_ids = self.commit_ids[i:j]
1734 return self.__class__(
1736 return self.__class__(
1735 self.repo, commit_ids, pre_load=self.pre_load,
1737 self.repo, commit_ids, pre_load=self.pre_load,
1736 translate_tag=self.translate_tag)
1738 translate_tag=self.translate_tag)
1737
1739
1738 def __repr__(self):
1740 def __repr__(self):
1739 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1741 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1740
1742
1741
1743
1742 class Config(object):
1744 class Config(object):
1743 """
1745 """
1744 Represents the configuration for a repository.
1746 Represents the configuration for a repository.
1745
1747
1746 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1748 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1747 standard library. It implements only the needed subset.
1749 standard library. It implements only the needed subset.
1748 """
1750 """
1749
1751
1750 def __init__(self):
1752 def __init__(self):
1751 self._values = {}
1753 self._values = {}
1752
1754
1753 def copy(self):
1755 def copy(self):
1754 clone = Config()
1756 clone = Config()
1755 for section, values in self._values.items():
1757 for section, values in self._values.items():
1756 clone._values[section] = values.copy()
1758 clone._values[section] = values.copy()
1757 return clone
1759 return clone
1758
1760
1759 def __repr__(self):
1761 def __repr__(self):
1760 return '<Config(%s sections) at %s>' % (
1762 return '<Config(%s sections) at %s>' % (
1761 len(self._values), hex(id(self)))
1763 len(self._values), hex(id(self)))
1762
1764
1763 def items(self, section):
1765 def items(self, section):
1764 return self._values.get(section, {}).iteritems()
1766 return self._values.get(section, {}).iteritems()
1765
1767
1766 def get(self, section, option):
1768 def get(self, section, option):
1767 return self._values.get(section, {}).get(option)
1769 return self._values.get(section, {}).get(option)
1768
1770
1769 def set(self, section, option, value):
1771 def set(self, section, option, value):
1770 section_values = self._values.setdefault(section, {})
1772 section_values = self._values.setdefault(section, {})
1771 section_values[option] = value
1773 section_values[option] = value
1772
1774
1773 def clear_section(self, section):
1775 def clear_section(self, section):
1774 self._values[section] = {}
1776 self._values[section] = {}
1775
1777
1776 def serialize(self):
1778 def serialize(self):
1777 """
1779 """
1778 Creates a list of three tuples (section, key, value) representing
1780 Creates a list of three tuples (section, key, value) representing
1779 this config object.
1781 this config object.
1780 """
1782 """
1781 items = []
1783 items = []
1782 for section in self._values:
1784 for section in self._values:
1783 for option, value in self._values[section].items():
1785 for option, value in self._values[section].items():
1784 items.append(
1786 items.append(
1785 (safe_str(section), safe_str(option), safe_str(value)))
1787 (safe_str(section), safe_str(option), safe_str(value)))
1786 return items
1788 return items
1787
1789
1788
1790
1789 class Diff(object):
1791 class Diff(object):
1790 """
1792 """
1791 Represents a diff result from a repository backend.
1793 Represents a diff result from a repository backend.
1792
1794
1793 Subclasses have to provide a backend specific value for
1795 Subclasses have to provide a backend specific value for
1794 :attr:`_header_re` and :attr:`_meta_re`.
1796 :attr:`_header_re` and :attr:`_meta_re`.
1795 """
1797 """
1796 _meta_re = None
1798 _meta_re = None
1797 _header_re = None
1799 _header_re = None
1798
1800
1799 def __init__(self, raw_diff):
1801 def __init__(self, raw_diff):
1800 self.raw = raw_diff
1802 self.raw = raw_diff
1801
1803
1802 def chunks(self):
1804 def chunks(self):
1803 """
1805 """
1804 split the diff in chunks of separate --git a/file b/file chunks
1806 split the diff in chunks of separate --git a/file b/file chunks
1805 to make diffs consistent we must prepend with \n, and make sure
1807 to make diffs consistent we must prepend with \n, and make sure
1806 we can detect last chunk as this was also has special rule
1808 we can detect last chunk as this was also has special rule
1807 """
1809 """
1808
1810
1809 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1811 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1810 header = diff_parts[0]
1812 header = diff_parts[0]
1811
1813
1812 if self._meta_re:
1814 if self._meta_re:
1813 match = self._meta_re.match(header)
1815 match = self._meta_re.match(header)
1814
1816
1815 chunks = diff_parts[1:]
1817 chunks = diff_parts[1:]
1816 total_chunks = len(chunks)
1818 total_chunks = len(chunks)
1817
1819
1818 return (
1820 return (
1819 DiffChunk(chunk, self, cur_chunk == total_chunks)
1821 DiffChunk(chunk, self, cur_chunk == total_chunks)
1820 for cur_chunk, chunk in enumerate(chunks, start=1))
1822 for cur_chunk, chunk in enumerate(chunks, start=1))
1821
1823
1822
1824
1823 class DiffChunk(object):
1825 class DiffChunk(object):
1824
1826
1825 def __init__(self, chunk, diff, last_chunk):
1827 def __init__(self, chunk, diff, last_chunk):
1826 self._diff = diff
1828 self._diff = diff
1827
1829
1828 # since we split by \ndiff --git that part is lost from original diff
1830 # since we split by \ndiff --git that part is lost from original diff
1829 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1831 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1830 if not last_chunk:
1832 if not last_chunk:
1831 chunk += '\n'
1833 chunk += '\n'
1832
1834
1833 match = self._diff._header_re.match(chunk)
1835 match = self._diff._header_re.match(chunk)
1834 self.header = match.groupdict()
1836 self.header = match.groupdict()
1835 self.diff = chunk[match.end():]
1837 self.diff = chunk[match.end():]
1836 self.raw = chunk
1838 self.raw = chunk
1837
1839
1838
1840
1839 class BasePathPermissionChecker(object):
1841 class BasePathPermissionChecker(object):
1840
1842
1841 @staticmethod
1843 @staticmethod
1842 def create_from_patterns(includes, excludes):
1844 def create_from_patterns(includes, excludes):
1843 if includes and '*' in includes and not excludes:
1845 if includes and '*' in includes and not excludes:
1844 return AllPathPermissionChecker()
1846 return AllPathPermissionChecker()
1845 elif excludes and '*' in excludes:
1847 elif excludes and '*' in excludes:
1846 return NonePathPermissionChecker()
1848 return NonePathPermissionChecker()
1847 else:
1849 else:
1848 return PatternPathPermissionChecker(includes, excludes)
1850 return PatternPathPermissionChecker(includes, excludes)
1849
1851
1850 @property
1852 @property
1851 def has_full_access(self):
1853 def has_full_access(self):
1852 raise NotImplemented()
1854 raise NotImplemented()
1853
1855
1854 def has_access(self, path):
1856 def has_access(self, path):
1855 raise NotImplemented()
1857 raise NotImplemented()
1856
1858
1857
1859
1858 class AllPathPermissionChecker(BasePathPermissionChecker):
1860 class AllPathPermissionChecker(BasePathPermissionChecker):
1859
1861
1860 @property
1862 @property
1861 def has_full_access(self):
1863 def has_full_access(self):
1862 return True
1864 return True
1863
1865
1864 def has_access(self, path):
1866 def has_access(self, path):
1865 return True
1867 return True
1866
1868
1867
1869
1868 class NonePathPermissionChecker(BasePathPermissionChecker):
1870 class NonePathPermissionChecker(BasePathPermissionChecker):
1869
1871
1870 @property
1872 @property
1871 def has_full_access(self):
1873 def has_full_access(self):
1872 return False
1874 return False
1873
1875
1874 def has_access(self, path):
1876 def has_access(self, path):
1875 return False
1877 return False
1876
1878
1877
1879
1878 class PatternPathPermissionChecker(BasePathPermissionChecker):
1880 class PatternPathPermissionChecker(BasePathPermissionChecker):
1879
1881
1880 def __init__(self, includes, excludes):
1882 def __init__(self, includes, excludes):
1881 self.includes = includes
1883 self.includes = includes
1882 self.excludes = excludes
1884 self.excludes = excludes
1883 self.includes_re = [] if not includes else [
1885 self.includes_re = [] if not includes else [
1884 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1886 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1885 self.excludes_re = [] if not excludes else [
1887 self.excludes_re = [] if not excludes else [
1886 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1888 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1887
1889
1888 @property
1890 @property
1889 def has_full_access(self):
1891 def has_full_access(self):
1890 return '*' in self.includes and not self.excludes
1892 return '*' in self.includes and not self.excludes
1891
1893
1892 def has_access(self, path):
1894 def has_access(self, path):
1893 for regex in self.excludes_re:
1895 for regex in self.excludes_re:
1894 if regex.match(path):
1896 if regex.match(path):
1895 return False
1897 return False
1896 for regex in self.includes_re:
1898 for regex in self.includes_re:
1897 if regex.match(path):
1899 if regex.match(path):
1898 return True
1900 return True
1899 return False
1901 return False
@@ -1,1017 +1,1029 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 GIT repository module
22 GIT repository module
23 """
23 """
24
24
25 import logging
25 import logging
26 import os
26 import os
27 import re
27 import re
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from rhodecode.lib.compat import OrderedDict
31 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.datelib import (
32 from rhodecode.lib.datelib import (
33 utcdate_fromtimestamp, makedate, date_astimestamp)
33 utcdate_fromtimestamp, makedate, date_astimestamp)
34 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.utils2 import CachedProperty
35 from rhodecode.lib.utils2 import CachedProperty
36 from rhodecode.lib.vcs import connection, path as vcspath
36 from rhodecode.lib.vcs import connection, path as vcspath
37 from rhodecode.lib.vcs.backends.base import (
37 from rhodecode.lib.vcs.backends.base import (
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 MergeFailureReason, Reference)
39 MergeFailureReason, Reference)
40 from rhodecode.lib.vcs.backends.git.commit import GitCommit
40 from rhodecode.lib.vcs.backends.git.commit import GitCommit
41 from rhodecode.lib.vcs.backends.git.diff import GitDiff
41 from rhodecode.lib.vcs.backends.git.diff import GitDiff
42 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
42 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
43 from rhodecode.lib.vcs.exceptions import (
43 from rhodecode.lib.vcs.exceptions import (
44 CommitDoesNotExistError, EmptyRepositoryError,
44 CommitDoesNotExistError, EmptyRepositoryError,
45 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo)
45 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo)
46
46
47
47
48 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
48 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
49
49
50 log = logging.getLogger(__name__)
50 log = logging.getLogger(__name__)
51
51
52
52
53 class GitRepository(BaseRepository):
53 class GitRepository(BaseRepository):
54 """
54 """
55 Git repository backend.
55 Git repository backend.
56 """
56 """
57 DEFAULT_BRANCH_NAME = 'master'
57 DEFAULT_BRANCH_NAME = 'master'
58
58
59 contact = BaseRepository.DEFAULT_CONTACT
59 contact = BaseRepository.DEFAULT_CONTACT
60
60
61 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 def __init__(self, repo_path, config=None, create=False, src_url=None,
62 do_workspace_checkout=False, with_wire=None, bare=False):
62 do_workspace_checkout=False, with_wire=None, bare=False):
63
63
64 self.path = safe_str(os.path.abspath(repo_path))
64 self.path = safe_str(os.path.abspath(repo_path))
65 self.config = config if config else self.get_default_config()
65 self.config = config if config else self.get_default_config()
66 self.with_wire = with_wire or {"cache": False} # default should not use cache
66 self.with_wire = with_wire or {"cache": False} # default should not use cache
67
67
68 self._init_repo(create, src_url, do_workspace_checkout, bare)
68 self._init_repo(create, src_url, do_workspace_checkout, bare)
69
69
70 # caches
70 # caches
71 self._commit_ids = {}
71 self._commit_ids = {}
72
72
73 @LazyProperty
73 @LazyProperty
74 def _remote(self):
74 def _remote(self):
75 repo_id = self.path
75 repo_id = self.path
76 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
76 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
77
77
78 @LazyProperty
78 @LazyProperty
79 def bare(self):
79 def bare(self):
80 return self._remote.bare()
80 return self._remote.bare()
81
81
82 @LazyProperty
82 @LazyProperty
83 def head(self):
83 def head(self):
84 return self._remote.head()
84 return self._remote.head()
85
85
86 @CachedProperty
86 @CachedProperty
87 def commit_ids(self):
87 def commit_ids(self):
88 """
88 """
89 Returns list of commit ids, in ascending order. Being lazy
89 Returns list of commit ids, in ascending order. Being lazy
90 attribute allows external tools to inject commit ids from cache.
90 attribute allows external tools to inject commit ids from cache.
91 """
91 """
92 commit_ids = self._get_all_commit_ids()
92 commit_ids = self._get_all_commit_ids()
93 self._rebuild_cache(commit_ids)
93 self._rebuild_cache(commit_ids)
94 return commit_ids
94 return commit_ids
95
95
96 def _rebuild_cache(self, commit_ids):
96 def _rebuild_cache(self, commit_ids):
97 self._commit_ids = dict((commit_id, index)
97 self._commit_ids = dict((commit_id, index)
98 for index, commit_id in enumerate(commit_ids))
98 for index, commit_id in enumerate(commit_ids))
99
99
100 def run_git_command(self, cmd, **opts):
100 def run_git_command(self, cmd, **opts):
101 """
101 """
102 Runs given ``cmd`` as git command and returns tuple
102 Runs given ``cmd`` as git command and returns tuple
103 (stdout, stderr).
103 (stdout, stderr).
104
104
105 :param cmd: git command to be executed
105 :param cmd: git command to be executed
106 :param opts: env options to pass into Subprocess command
106 :param opts: env options to pass into Subprocess command
107 """
107 """
108 if not isinstance(cmd, list):
108 if not isinstance(cmd, list):
109 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
109 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
110
110
111 skip_stderr_log = opts.pop('skip_stderr_log', False)
111 skip_stderr_log = opts.pop('skip_stderr_log', False)
112 out, err = self._remote.run_git_command(cmd, **opts)
112 out, err = self._remote.run_git_command(cmd, **opts)
113 if err and not skip_stderr_log:
113 if err and not skip_stderr_log:
114 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
114 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
115 return out, err
115 return out, err
116
116
117 @staticmethod
117 @staticmethod
118 def check_url(url, config):
118 def check_url(url, config):
119 """
119 """
120 Function will check given url and try to verify if it's a valid
120 Function will check given url and try to verify if it's a valid
121 link. Sometimes it may happened that git will issue basic
121 link. Sometimes it may happened that git will issue basic
122 auth request that can cause whole API to hang when used from python
122 auth request that can cause whole API to hang when used from python
123 or other external calls.
123 or other external calls.
124
124
125 On failures it'll raise urllib2.HTTPError, exception is also thrown
125 On failures it'll raise urllib2.HTTPError, exception is also thrown
126 when the return code is non 200
126 when the return code is non 200
127 """
127 """
128 # check first if it's not an url
128 # check first if it's not an url
129 if os.path.isdir(url) or url.startswith('file:'):
129 if os.path.isdir(url) or url.startswith('file:'):
130 return True
130 return True
131
131
132 if '+' in url.split('://', 1)[0]:
132 if '+' in url.split('://', 1)[0]:
133 url = url.split('+', 1)[1]
133 url = url.split('+', 1)[1]
134
134
135 # Request the _remote to verify the url
135 # Request the _remote to verify the url
136 return connection.Git.check_url(url, config.serialize())
136 return connection.Git.check_url(url, config.serialize())
137
137
138 @staticmethod
138 @staticmethod
139 def is_valid_repository(path):
139 def is_valid_repository(path):
140 if os.path.isdir(os.path.join(path, '.git')):
140 if os.path.isdir(os.path.join(path, '.git')):
141 return True
141 return True
142 # check case of bare repository
142 # check case of bare repository
143 try:
143 try:
144 GitRepository(path)
144 GitRepository(path)
145 return True
145 return True
146 except VCSError:
146 except VCSError:
147 pass
147 pass
148 return False
148 return False
149
149
150 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
150 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
151 bare=False):
151 bare=False):
152 if create and os.path.exists(self.path):
152 if create and os.path.exists(self.path):
153 raise RepositoryError(
153 raise RepositoryError(
154 "Cannot create repository at %s, location already exist"
154 "Cannot create repository at %s, location already exist"
155 % self.path)
155 % self.path)
156
156
157 if bare and do_workspace_checkout:
157 if bare and do_workspace_checkout:
158 raise RepositoryError("Cannot update a bare repository")
158 raise RepositoryError("Cannot update a bare repository")
159 try:
159 try:
160
160
161 if src_url:
161 if src_url:
162 # check URL before any actions
162 # check URL before any actions
163 GitRepository.check_url(src_url, self.config)
163 GitRepository.check_url(src_url, self.config)
164
164
165 if create:
165 if create:
166 os.makedirs(self.path, mode=0o755)
166 os.makedirs(self.path, mode=0o755)
167
167
168 if bare:
168 if bare:
169 self._remote.init_bare()
169 self._remote.init_bare()
170 else:
170 else:
171 self._remote.init()
171 self._remote.init()
172
172
173 if src_url and bare:
173 if src_url and bare:
174 # bare repository only allows a fetch and checkout is not allowed
174 # bare repository only allows a fetch and checkout is not allowed
175 self.fetch(src_url, commit_ids=None)
175 self.fetch(src_url, commit_ids=None)
176 elif src_url:
176 elif src_url:
177 self.pull(src_url, commit_ids=None,
177 self.pull(src_url, commit_ids=None,
178 update_after=do_workspace_checkout)
178 update_after=do_workspace_checkout)
179
179
180 else:
180 else:
181 if not self._remote.assert_correct_path():
181 if not self._remote.assert_correct_path():
182 raise RepositoryError(
182 raise RepositoryError(
183 'Path "%s" does not contain a Git repository' %
183 'Path "%s" does not contain a Git repository' %
184 (self.path,))
184 (self.path,))
185
185
186 # TODO: johbo: check if we have to translate the OSError here
186 # TODO: johbo: check if we have to translate the OSError here
187 except OSError as err:
187 except OSError as err:
188 raise RepositoryError(err)
188 raise RepositoryError(err)
189
189
190 def _get_all_commit_ids(self):
190 def _get_all_commit_ids(self):
191 return self._remote.get_all_commit_ids()
191 return self._remote.get_all_commit_ids()
192
192
193 def _get_commit_ids(self, filters=None):
193 def _get_commit_ids(self, filters=None):
194 # we must check if this repo is not empty, since later command
194 # we must check if this repo is not empty, since later command
195 # fails if it is. And it's cheaper to ask than throw the subprocess
195 # fails if it is. And it's cheaper to ask than throw the subprocess
196 # errors
196 # errors
197
197
198 head = self._remote.head(show_exc=False)
198 head = self._remote.head(show_exc=False)
199
199
200 if not head:
200 if not head:
201 return []
201 return []
202
202
203 rev_filter = ['--branches', '--tags']
203 rev_filter = ['--branches', '--tags']
204 extra_filter = []
204 extra_filter = []
205
205
206 if filters:
206 if filters:
207 if filters.get('since'):
207 if filters.get('since'):
208 extra_filter.append('--since=%s' % (filters['since']))
208 extra_filter.append('--since=%s' % (filters['since']))
209 if filters.get('until'):
209 if filters.get('until'):
210 extra_filter.append('--until=%s' % (filters['until']))
210 extra_filter.append('--until=%s' % (filters['until']))
211 if filters.get('branch_name'):
211 if filters.get('branch_name'):
212 rev_filter = []
212 rev_filter = []
213 extra_filter.append(filters['branch_name'])
213 extra_filter.append(filters['branch_name'])
214 rev_filter.extend(extra_filter)
214 rev_filter.extend(extra_filter)
215
215
216 # if filters.get('start') or filters.get('end'):
216 # if filters.get('start') or filters.get('end'):
217 # # skip is offset, max-count is limit
217 # # skip is offset, max-count is limit
218 # if filters.get('start'):
218 # if filters.get('start'):
219 # extra_filter += ' --skip=%s' % filters['start']
219 # extra_filter += ' --skip=%s' % filters['start']
220 # if filters.get('end'):
220 # if filters.get('end'):
221 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
221 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
222
222
223 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
223 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
224 try:
224 try:
225 output, __ = self.run_git_command(cmd)
225 output, __ = self.run_git_command(cmd)
226 except RepositoryError:
226 except RepositoryError:
227 # Can be raised for empty repositories
227 # Can be raised for empty repositories
228 return []
228 return []
229 return output.splitlines()
229 return output.splitlines()
230
230
231 def _lookup_commit(self, commit_id_or_idx, translate_tag=True):
231 def _lookup_commit(self, commit_id_or_idx, translate_tag=True, maybe_unreachable=False):
232 def is_null(value):
232 def is_null(value):
233 return len(value) == commit_id_or_idx.count('0')
233 return len(value) == commit_id_or_idx.count('0')
234
234
235 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
235 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
236 return self.commit_ids[-1]
236 return self.commit_ids[-1]
237
237 commit_missing_err = "Commit {} does not exist for `{}`".format(
238 commit_missing_err = "Commit {} does not exist for `{}`".format(
238 *map(safe_str, [commit_id_or_idx, self.name]))
239 *map(safe_str, [commit_id_or_idx, self.name]))
239
240
240 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
241 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
241 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
242 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
242 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
243 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
243 try:
244 try:
244 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
245 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
245 except Exception:
246 except Exception:
246 raise CommitDoesNotExistError(commit_missing_err)
247 raise CommitDoesNotExistError(commit_missing_err)
247
248
248 elif is_bstr:
249 elif is_bstr:
249 # Need to call remote to translate id for tagging scenario
250 # Need to call remote to translate id for tagging scenario
250 try:
251 try:
251 remote_data = self._remote.get_object(commit_id_or_idx)
252 remote_data = self._remote.get_object(commit_id_or_idx,
253 maybe_unreachable=maybe_unreachable)
252 commit_id_or_idx = remote_data["commit_id"]
254 commit_id_or_idx = remote_data["commit_id"]
253 except (CommitDoesNotExistError,):
255 except (CommitDoesNotExistError,):
254 raise CommitDoesNotExistError(commit_missing_err)
256 raise CommitDoesNotExistError(commit_missing_err)
255
257
256 # Ensure we return full id
258 # Ensure we return full id
257 if not SHA_PATTERN.match(str(commit_id_or_idx)):
259 if not SHA_PATTERN.match(str(commit_id_or_idx)):
258 raise CommitDoesNotExistError(
260 raise CommitDoesNotExistError(
259 "Given commit id %s not recognized" % commit_id_or_idx)
261 "Given commit id %s not recognized" % commit_id_or_idx)
260 return commit_id_or_idx
262 return commit_id_or_idx
261
263
262 def get_hook_location(self):
264 def get_hook_location(self):
263 """
265 """
264 returns absolute path to location where hooks are stored
266 returns absolute path to location where hooks are stored
265 """
267 """
266 loc = os.path.join(self.path, 'hooks')
268 loc = os.path.join(self.path, 'hooks')
267 if not self.bare:
269 if not self.bare:
268 loc = os.path.join(self.path, '.git', 'hooks')
270 loc = os.path.join(self.path, '.git', 'hooks')
269 return loc
271 return loc
270
272
271 @LazyProperty
273 @LazyProperty
272 def last_change(self):
274 def last_change(self):
273 """
275 """
274 Returns last change made on this repository as
276 Returns last change made on this repository as
275 `datetime.datetime` object.
277 `datetime.datetime` object.
276 """
278 """
277 try:
279 try:
278 return self.get_commit().date
280 return self.get_commit().date
279 except RepositoryError:
281 except RepositoryError:
280 tzoffset = makedate()[1]
282 tzoffset = makedate()[1]
281 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
283 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
282
284
283 def _get_fs_mtime(self):
285 def _get_fs_mtime(self):
284 idx_loc = '' if self.bare else '.git'
286 idx_loc = '' if self.bare else '.git'
285 # fallback to filesystem
287 # fallback to filesystem
286 in_path = os.path.join(self.path, idx_loc, "index")
288 in_path = os.path.join(self.path, idx_loc, "index")
287 he_path = os.path.join(self.path, idx_loc, "HEAD")
289 he_path = os.path.join(self.path, idx_loc, "HEAD")
288 if os.path.exists(in_path):
290 if os.path.exists(in_path):
289 return os.stat(in_path).st_mtime
291 return os.stat(in_path).st_mtime
290 else:
292 else:
291 return os.stat(he_path).st_mtime
293 return os.stat(he_path).st_mtime
292
294
293 @LazyProperty
295 @LazyProperty
294 def description(self):
296 def description(self):
295 description = self._remote.get_description()
297 description = self._remote.get_description()
296 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
298 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
297
299
298 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
300 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
299 if self.is_empty():
301 if self.is_empty():
300 return OrderedDict()
302 return OrderedDict()
301
303
302 result = []
304 result = []
303 for ref, sha in self._refs.iteritems():
305 for ref, sha in self._refs.iteritems():
304 if ref.startswith(prefix):
306 if ref.startswith(prefix):
305 ref_name = ref
307 ref_name = ref
306 if strip_prefix:
308 if strip_prefix:
307 ref_name = ref[len(prefix):]
309 ref_name = ref[len(prefix):]
308 result.append((safe_unicode(ref_name), sha))
310 result.append((safe_unicode(ref_name), sha))
309
311
310 def get_name(entry):
312 def get_name(entry):
311 return entry[0]
313 return entry[0]
312
314
313 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
315 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
314
316
315 def _get_branches(self):
317 def _get_branches(self):
316 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
318 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
317
319
318 @CachedProperty
320 @CachedProperty
319 def branches(self):
321 def branches(self):
320 return self._get_branches()
322 return self._get_branches()
321
323
322 @CachedProperty
324 @CachedProperty
323 def branches_closed(self):
325 def branches_closed(self):
324 return {}
326 return {}
325
327
326 @CachedProperty
328 @CachedProperty
327 def bookmarks(self):
329 def bookmarks(self):
328 return {}
330 return {}
329
331
330 @CachedProperty
332 @CachedProperty
331 def branches_all(self):
333 def branches_all(self):
332 all_branches = {}
334 all_branches = {}
333 all_branches.update(self.branches)
335 all_branches.update(self.branches)
334 all_branches.update(self.branches_closed)
336 all_branches.update(self.branches_closed)
335 return all_branches
337 return all_branches
336
338
337 @CachedProperty
339 @CachedProperty
338 def tags(self):
340 def tags(self):
339 return self._get_tags()
341 return self._get_tags()
340
342
341 def _get_tags(self):
343 def _get_tags(self):
342 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
344 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
343
345
344 def tag(self, name, user, commit_id=None, message=None, date=None,
346 def tag(self, name, user, commit_id=None, message=None, date=None,
345 **kwargs):
347 **kwargs):
346 # TODO: fix this method to apply annotated tags correct with message
348 # TODO: fix this method to apply annotated tags correct with message
347 """
349 """
348 Creates and returns a tag for the given ``commit_id``.
350 Creates and returns a tag for the given ``commit_id``.
349
351
350 :param name: name for new tag
352 :param name: name for new tag
351 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
353 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
352 :param commit_id: commit id for which new tag would be created
354 :param commit_id: commit id for which new tag would be created
353 :param message: message of the tag's commit
355 :param message: message of the tag's commit
354 :param date: date of tag's commit
356 :param date: date of tag's commit
355
357
356 :raises TagAlreadyExistError: if tag with same name already exists
358 :raises TagAlreadyExistError: if tag with same name already exists
357 """
359 """
358 if name in self.tags:
360 if name in self.tags:
359 raise TagAlreadyExistError("Tag %s already exists" % name)
361 raise TagAlreadyExistError("Tag %s already exists" % name)
360 commit = self.get_commit(commit_id=commit_id)
362 commit = self.get_commit(commit_id=commit_id)
361 message = message or "Added tag %s for commit %s" % (name, commit.raw_id)
363 message = message or "Added tag %s for commit %s" % (name, commit.raw_id)
362
364
363 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
365 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
364
366
365 self._invalidate_prop_cache('tags')
367 self._invalidate_prop_cache('tags')
366 self._invalidate_prop_cache('_refs')
368 self._invalidate_prop_cache('_refs')
367
369
368 return commit
370 return commit
369
371
370 def remove_tag(self, name, user, message=None, date=None):
372 def remove_tag(self, name, user, message=None, date=None):
371 """
373 """
372 Removes tag with the given ``name``.
374 Removes tag with the given ``name``.
373
375
374 :param name: name of the tag to be removed
376 :param name: name of the tag to be removed
375 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
377 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
376 :param message: message of the tag's removal commit
378 :param message: message of the tag's removal commit
377 :param date: date of tag's removal commit
379 :param date: date of tag's removal commit
378
380
379 :raises TagDoesNotExistError: if tag with given name does not exists
381 :raises TagDoesNotExistError: if tag with given name does not exists
380 """
382 """
381 if name not in self.tags:
383 if name not in self.tags:
382 raise TagDoesNotExistError("Tag %s does not exist" % name)
384 raise TagDoesNotExistError("Tag %s does not exist" % name)
383
385
384 self._remote.tag_remove(name)
386 self._remote.tag_remove(name)
385 self._invalidate_prop_cache('tags')
387 self._invalidate_prop_cache('tags')
386 self._invalidate_prop_cache('_refs')
388 self._invalidate_prop_cache('_refs')
387
389
388 def _get_refs(self):
390 def _get_refs(self):
389 return self._remote.get_refs()
391 return self._remote.get_refs()
390
392
391 @CachedProperty
393 @CachedProperty
392 def _refs(self):
394 def _refs(self):
393 return self._get_refs()
395 return self._get_refs()
394
396
395 @property
397 @property
396 def _ref_tree(self):
398 def _ref_tree(self):
397 node = tree = {}
399 node = tree = {}
398 for ref, sha in self._refs.iteritems():
400 for ref, sha in self._refs.iteritems():
399 path = ref.split('/')
401 path = ref.split('/')
400 for bit in path[:-1]:
402 for bit in path[:-1]:
401 node = node.setdefault(bit, {})
403 node = node.setdefault(bit, {})
402 node[path[-1]] = sha
404 node[path[-1]] = sha
403 node = tree
405 node = tree
404 return tree
406 return tree
405
407
406 def get_remote_ref(self, ref_name):
408 def get_remote_ref(self, ref_name):
407 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
409 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
408 try:
410 try:
409 return self._refs[ref_key]
411 return self._refs[ref_key]
410 except Exception:
412 except Exception:
411 return
413 return
412
414
413 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=True):
415 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
416 translate_tag=True, maybe_unreachable=False):
414 """
417 """
415 Returns `GitCommit` object representing commit from git repository
418 Returns `GitCommit` object representing commit from git repository
416 at the given `commit_id` or head (most recent commit) if None given.
419 at the given `commit_id` or head (most recent commit) if None given.
417 """
420 """
418 if self.is_empty():
421 if self.is_empty():
419 raise EmptyRepositoryError("There are no commits yet")
422 raise EmptyRepositoryError("There are no commits yet")
420
423
421 if commit_id is not None:
424 if commit_id is not None:
422 self._validate_commit_id(commit_id)
425 self._validate_commit_id(commit_id)
423 try:
426 try:
424 # we have cached idx, use it without contacting the remote
427 # we have cached idx, use it without contacting the remote
425 idx = self._commit_ids[commit_id]
428 idx = self._commit_ids[commit_id]
426 return GitCommit(self, commit_id, idx, pre_load=pre_load)
429 return GitCommit(self, commit_id, idx, pre_load=pre_load)
427 except KeyError:
430 except KeyError:
428 pass
431 pass
429
432
430 elif commit_idx is not None:
433 elif commit_idx is not None:
431 self._validate_commit_idx(commit_idx)
434 self._validate_commit_idx(commit_idx)
432 try:
435 try:
433 _commit_id = self.commit_ids[commit_idx]
436 _commit_id = self.commit_ids[commit_idx]
434 if commit_idx < 0:
437 if commit_idx < 0:
435 commit_idx = self.commit_ids.index(_commit_id)
438 commit_idx = self.commit_ids.index(_commit_id)
436 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
439 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
437 except IndexError:
440 except IndexError:
438 commit_id = commit_idx
441 commit_id = commit_idx
439 else:
442 else:
440 commit_id = "tip"
443 commit_id = "tip"
441
444
442 if translate_tag:
445 if translate_tag:
443 commit_id = self._lookup_commit(commit_id)
446 commit_id = self._lookup_commit(commit_id, maybe_unreachable=maybe_unreachable)
444
447
445 try:
448 try:
446 idx = self._commit_ids[commit_id]
449 idx = self._commit_ids[commit_id]
447 except KeyError:
450 except KeyError:
448 idx = -1
451 idx = -1
449
452
450 return GitCommit(self, commit_id, idx, pre_load=pre_load)
453 return GitCommit(self, commit_id, idx, pre_load=pre_load)
451
454
452 def get_commits(
455 def get_commits(
453 self, start_id=None, end_id=None, start_date=None, end_date=None,
456 self, start_id=None, end_id=None, start_date=None, end_date=None,
454 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
457 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
455 """
458 """
456 Returns generator of `GitCommit` objects from start to end (both
459 Returns generator of `GitCommit` objects from start to end (both
457 are inclusive), in ascending date order.
460 are inclusive), in ascending date order.
458
461
459 :param start_id: None, str(commit_id)
462 :param start_id: None, str(commit_id)
460 :param end_id: None, str(commit_id)
463 :param end_id: None, str(commit_id)
461 :param start_date: if specified, commits with commit date less than
464 :param start_date: if specified, commits with commit date less than
462 ``start_date`` would be filtered out from returned set
465 ``start_date`` would be filtered out from returned set
463 :param end_date: if specified, commits with commit date greater than
466 :param end_date: if specified, commits with commit date greater than
464 ``end_date`` would be filtered out from returned set
467 ``end_date`` would be filtered out from returned set
465 :param branch_name: if specified, commits not reachable from given
468 :param branch_name: if specified, commits not reachable from given
466 branch would be filtered out from returned set
469 branch would be filtered out from returned set
467 :param show_hidden: Show hidden commits such as obsolete or hidden from
470 :param show_hidden: Show hidden commits such as obsolete or hidden from
468 Mercurial evolve
471 Mercurial evolve
469 :raise BranchDoesNotExistError: If given `branch_name` does not
472 :raise BranchDoesNotExistError: If given `branch_name` does not
470 exist.
473 exist.
471 :raise CommitDoesNotExistError: If commits for given `start` or
474 :raise CommitDoesNotExistError: If commits for given `start` or
472 `end` could not be found.
475 `end` could not be found.
473
476
474 """
477 """
475 if self.is_empty():
478 if self.is_empty():
476 raise EmptyRepositoryError("There are no commits yet")
479 raise EmptyRepositoryError("There are no commits yet")
477
480
478 self._validate_branch_name(branch_name)
481 self._validate_branch_name(branch_name)
479
482
480 if start_id is not None:
483 if start_id is not None:
481 self._validate_commit_id(start_id)
484 self._validate_commit_id(start_id)
482 if end_id is not None:
485 if end_id is not None:
483 self._validate_commit_id(end_id)
486 self._validate_commit_id(end_id)
484
487
485 start_raw_id = self._lookup_commit(start_id)
488 start_raw_id = self._lookup_commit(start_id)
486 start_pos = self._commit_ids[start_raw_id] if start_id else None
489 start_pos = self._commit_ids[start_raw_id] if start_id else None
487 end_raw_id = self._lookup_commit(end_id)
490 end_raw_id = self._lookup_commit(end_id)
488 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
491 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
489
492
490 if None not in [start_id, end_id] and start_pos > end_pos:
493 if None not in [start_id, end_id] and start_pos > end_pos:
491 raise RepositoryError(
494 raise RepositoryError(
492 "Start commit '%s' cannot be after end commit '%s'" %
495 "Start commit '%s' cannot be after end commit '%s'" %
493 (start_id, end_id))
496 (start_id, end_id))
494
497
495 if end_pos is not None:
498 if end_pos is not None:
496 end_pos += 1
499 end_pos += 1
497
500
498 filter_ = []
501 filter_ = []
499 if branch_name:
502 if branch_name:
500 filter_.append({'branch_name': branch_name})
503 filter_.append({'branch_name': branch_name})
501 if start_date and not end_date:
504 if start_date and not end_date:
502 filter_.append({'since': start_date})
505 filter_.append({'since': start_date})
503 if end_date and not start_date:
506 if end_date and not start_date:
504 filter_.append({'until': end_date})
507 filter_.append({'until': end_date})
505 if start_date and end_date:
508 if start_date and end_date:
506 filter_.append({'since': start_date})
509 filter_.append({'since': start_date})
507 filter_.append({'until': end_date})
510 filter_.append({'until': end_date})
508
511
509 # if start_pos or end_pos:
512 # if start_pos or end_pos:
510 # filter_.append({'start': start_pos})
513 # filter_.append({'start': start_pos})
511 # filter_.append({'end': end_pos})
514 # filter_.append({'end': end_pos})
512
515
513 if filter_:
516 if filter_:
514 revfilters = {
517 revfilters = {
515 'branch_name': branch_name,
518 'branch_name': branch_name,
516 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
519 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
517 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
520 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
518 'start': start_pos,
521 'start': start_pos,
519 'end': end_pos,
522 'end': end_pos,
520 }
523 }
521 commit_ids = self._get_commit_ids(filters=revfilters)
524 commit_ids = self._get_commit_ids(filters=revfilters)
522
525
523 else:
526 else:
524 commit_ids = self.commit_ids
527 commit_ids = self.commit_ids
525
528
526 if start_pos or end_pos:
529 if start_pos or end_pos:
527 commit_ids = commit_ids[start_pos: end_pos]
530 commit_ids = commit_ids[start_pos: end_pos]
528
531
529 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
532 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
530 translate_tag=translate_tags)
533 translate_tag=translate_tags)
531
534
532 def get_diff(
535 def get_diff(
533 self, commit1, commit2, path='', ignore_whitespace=False,
536 self, commit1, commit2, path='', ignore_whitespace=False,
534 context=3, path1=None):
537 context=3, path1=None):
535 """
538 """
536 Returns (git like) *diff*, as plain text. Shows changes introduced by
539 Returns (git like) *diff*, as plain text. Shows changes introduced by
537 ``commit2`` since ``commit1``.
540 ``commit2`` since ``commit1``.
538
541
539 :param commit1: Entry point from which diff is shown. Can be
542 :param commit1: Entry point from which diff is shown. Can be
540 ``self.EMPTY_COMMIT`` - in this case, patch showing all
543 ``self.EMPTY_COMMIT`` - in this case, patch showing all
541 the changes since empty state of the repository until ``commit2``
544 the changes since empty state of the repository until ``commit2``
542 :param commit2: Until which commits changes should be shown.
545 :param commit2: Until which commits changes should be shown.
543 :param ignore_whitespace: If set to ``True``, would not show whitespace
546 :param ignore_whitespace: If set to ``True``, would not show whitespace
544 changes. Defaults to ``False``.
547 changes. Defaults to ``False``.
545 :param context: How many lines before/after changed lines should be
548 :param context: How many lines before/after changed lines should be
546 shown. Defaults to ``3``.
549 shown. Defaults to ``3``.
547 """
550 """
548 self._validate_diff_commits(commit1, commit2)
551 self._validate_diff_commits(commit1, commit2)
549 if path1 is not None and path1 != path:
552 if path1 is not None and path1 != path:
550 raise ValueError("Diff of two different paths not supported.")
553 raise ValueError("Diff of two different paths not supported.")
551
554
552 if path:
555 if path:
553 file_filter = path
556 file_filter = path
554 else:
557 else:
555 file_filter = None
558 file_filter = None
556
559
557 diff = self._remote.diff(
560 diff = self._remote.diff(
558 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
561 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
559 opt_ignorews=ignore_whitespace,
562 opt_ignorews=ignore_whitespace,
560 context=context)
563 context=context)
561 return GitDiff(diff)
564 return GitDiff(diff)
562
565
563 def strip(self, commit_id, branch_name):
566 def strip(self, commit_id, branch_name):
564 commit = self.get_commit(commit_id=commit_id)
567 commit = self.get_commit(commit_id=commit_id)
565 if commit.merge:
568 if commit.merge:
566 raise Exception('Cannot reset to merge commit')
569 raise Exception('Cannot reset to merge commit')
567
570
568 # parent is going to be the new head now
571 # parent is going to be the new head now
569 commit = commit.parents[0]
572 commit = commit.parents[0]
570 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
573 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
571
574
572 # clear cached properties
575 # clear cached properties
573 self._invalidate_prop_cache('commit_ids')
576 self._invalidate_prop_cache('commit_ids')
574 self._invalidate_prop_cache('_refs')
577 self._invalidate_prop_cache('_refs')
575 self._invalidate_prop_cache('branches')
578 self._invalidate_prop_cache('branches')
576
579
577 return len(self.commit_ids)
580 return len(self.commit_ids)
578
581
579 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
582 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
580 if commit_id1 == commit_id2:
583 if commit_id1 == commit_id2:
581 return commit_id1
584 return commit_id1
582
585
583 if self != repo2:
586 if self != repo2:
584 commits = self._remote.get_missing_revs(
587 commits = self._remote.get_missing_revs(
585 commit_id1, commit_id2, repo2.path)
588 commit_id1, commit_id2, repo2.path)
586 if commits:
589 if commits:
587 commit = repo2.get_commit(commits[-1])
590 commit = repo2.get_commit(commits[-1])
588 if commit.parents:
591 if commit.parents:
589 ancestor_id = commit.parents[0].raw_id
592 ancestor_id = commit.parents[0].raw_id
590 else:
593 else:
591 ancestor_id = None
594 ancestor_id = None
592 else:
595 else:
593 # no commits from other repo, ancestor_id is the commit_id2
596 # no commits from other repo, ancestor_id is the commit_id2
594 ancestor_id = commit_id2
597 ancestor_id = commit_id2
595 else:
598 else:
596 output, __ = self.run_git_command(
599 output, __ = self.run_git_command(
597 ['merge-base', commit_id1, commit_id2])
600 ['merge-base', commit_id1, commit_id2])
598 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
601 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
599
602
600 return ancestor_id
603 return ancestor_id
601
604
602 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
605 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
603 repo1 = self
606 repo1 = self
604 ancestor_id = None
607 ancestor_id = None
605
608
606 if commit_id1 == commit_id2:
609 if commit_id1 == commit_id2:
607 commits = []
610 commits = []
608 elif repo1 != repo2:
611 elif repo1 != repo2:
609 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
612 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
610 repo2.path)
613 repo2.path)
611 commits = [
614 commits = [
612 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
615 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
613 for commit_id in reversed(missing_ids)]
616 for commit_id in reversed(missing_ids)]
614 else:
617 else:
615 output, __ = repo1.run_git_command(
618 output, __ = repo1.run_git_command(
616 ['log', '--reverse', '--pretty=format: %H', '-s',
619 ['log', '--reverse', '--pretty=format: %H', '-s',
617 '%s..%s' % (commit_id1, commit_id2)])
620 '%s..%s' % (commit_id1, commit_id2)])
618 commits = [
621 commits = [
619 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
622 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
620 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
623 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
621
624
622 return commits
625 return commits
623
626
624 @LazyProperty
627 @LazyProperty
625 def in_memory_commit(self):
628 def in_memory_commit(self):
626 """
629 """
627 Returns ``GitInMemoryCommit`` object for this repository.
630 Returns ``GitInMemoryCommit`` object for this repository.
628 """
631 """
629 return GitInMemoryCommit(self)
632 return GitInMemoryCommit(self)
630
633
631 def pull(self, url, commit_ids=None, update_after=False):
634 def pull(self, url, commit_ids=None, update_after=False):
632 """
635 """
633 Pull changes from external location. Pull is different in GIT
636 Pull changes from external location. Pull is different in GIT
634 that fetch since it's doing a checkout
637 that fetch since it's doing a checkout
635
638
636 :param commit_ids: Optional. Can be set to a list of commit ids
639 :param commit_ids: Optional. Can be set to a list of commit ids
637 which shall be pulled from the other repository.
640 which shall be pulled from the other repository.
638 """
641 """
639 refs = None
642 refs = None
640 if commit_ids is not None:
643 if commit_ids is not None:
641 remote_refs = self._remote.get_remote_refs(url)
644 remote_refs = self._remote.get_remote_refs(url)
642 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
645 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
643 self._remote.pull(url, refs=refs, update_after=update_after)
646 self._remote.pull(url, refs=refs, update_after=update_after)
644 self._remote.invalidate_vcs_cache()
647 self._remote.invalidate_vcs_cache()
645
648
646 def fetch(self, url, commit_ids=None):
649 def fetch(self, url, commit_ids=None):
647 """
650 """
648 Fetch all git objects from external location.
651 Fetch all git objects from external location.
649 """
652 """
650 self._remote.sync_fetch(url, refs=commit_ids)
653 self._remote.sync_fetch(url, refs=commit_ids)
651 self._remote.invalidate_vcs_cache()
654 self._remote.invalidate_vcs_cache()
652
655
653 def push(self, url):
656 def push(self, url):
654 refs = None
657 refs = None
655 self._remote.sync_push(url, refs=refs)
658 self._remote.sync_push(url, refs=refs)
656
659
657 def set_refs(self, ref_name, commit_id):
660 def set_refs(self, ref_name, commit_id):
658 self._remote.set_refs(ref_name, commit_id)
661 self._remote.set_refs(ref_name, commit_id)
659 self._invalidate_prop_cache('_refs')
662 self._invalidate_prop_cache('_refs')
660
663
661 def remove_ref(self, ref_name):
664 def remove_ref(self, ref_name):
662 self._remote.remove_ref(ref_name)
665 self._remote.remove_ref(ref_name)
663 self._invalidate_prop_cache('_refs')
666 self._invalidate_prop_cache('_refs')
664
667
668 def run_gc(self, prune=True):
669 cmd = ['gc', '--aggressive']
670 if prune:
671 cmd += ['--prune=now']
672 _stdout, stderr = self.run_git_command(cmd, fail_on_stderr=False)
673 return stderr
674
665 def _update_server_info(self):
675 def _update_server_info(self):
666 """
676 """
667 runs gits update-server-info command in this repo instance
677 runs gits update-server-info command in this repo instance
668 """
678 """
669 self._remote.update_server_info()
679 self._remote.update_server_info()
670
680
671 def _current_branch(self):
681 def _current_branch(self):
672 """
682 """
673 Return the name of the current branch.
683 Return the name of the current branch.
674
684
675 It only works for non bare repositories (i.e. repositories with a
685 It only works for non bare repositories (i.e. repositories with a
676 working copy)
686 working copy)
677 """
687 """
678 if self.bare:
688 if self.bare:
679 raise RepositoryError('Bare git repos do not have active branches')
689 raise RepositoryError('Bare git repos do not have active branches')
680
690
681 if self.is_empty():
691 if self.is_empty():
682 return None
692 return None
683
693
684 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
694 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
685 return stdout.strip()
695 return stdout.strip()
686
696
687 def _checkout(self, branch_name, create=False, force=False):
697 def _checkout(self, branch_name, create=False, force=False):
688 """
698 """
689 Checkout a branch in the working directory.
699 Checkout a branch in the working directory.
690
700
691 It tries to create the branch if create is True, failing if the branch
701 It tries to create the branch if create is True, failing if the branch
692 already exists.
702 already exists.
693
703
694 It only works for non bare repositories (i.e. repositories with a
704 It only works for non bare repositories (i.e. repositories with a
695 working copy)
705 working copy)
696 """
706 """
697 if self.bare:
707 if self.bare:
698 raise RepositoryError('Cannot checkout branches in a bare git repo')
708 raise RepositoryError('Cannot checkout branches in a bare git repo')
699
709
700 cmd = ['checkout']
710 cmd = ['checkout']
701 if force:
711 if force:
702 cmd.append('-f')
712 cmd.append('-f')
703 if create:
713 if create:
704 cmd.append('-b')
714 cmd.append('-b')
705 cmd.append(branch_name)
715 cmd.append(branch_name)
706 self.run_git_command(cmd, fail_on_stderr=False)
716 self.run_git_command(cmd, fail_on_stderr=False)
707
717
708 def _create_branch(self, branch_name, commit_id):
718 def _create_branch(self, branch_name, commit_id):
709 """
719 """
710 creates a branch in a GIT repo
720 creates a branch in a GIT repo
711 """
721 """
712 self._remote.create_branch(branch_name, commit_id)
722 self._remote.create_branch(branch_name, commit_id)
713
723
714 def _identify(self):
724 def _identify(self):
715 """
725 """
716 Return the current state of the working directory.
726 Return the current state of the working directory.
717 """
727 """
718 if self.bare:
728 if self.bare:
719 raise RepositoryError('Bare git repos do not have active branches')
729 raise RepositoryError('Bare git repos do not have active branches')
720
730
721 if self.is_empty():
731 if self.is_empty():
722 return None
732 return None
723
733
724 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
734 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
725 return stdout.strip()
735 return stdout.strip()
726
736
727 def _local_clone(self, clone_path, branch_name, source_branch=None):
737 def _local_clone(self, clone_path, branch_name, source_branch=None):
728 """
738 """
729 Create a local clone of the current repo.
739 Create a local clone of the current repo.
730 """
740 """
731 # N.B.(skreft): the --branch option is required as otherwise the shallow
741 # N.B.(skreft): the --branch option is required as otherwise the shallow
732 # clone will only fetch the active branch.
742 # clone will only fetch the active branch.
733 cmd = ['clone', '--branch', branch_name,
743 cmd = ['clone', '--branch', branch_name,
734 self.path, os.path.abspath(clone_path)]
744 self.path, os.path.abspath(clone_path)]
735
745
736 self.run_git_command(cmd, fail_on_stderr=False)
746 self.run_git_command(cmd, fail_on_stderr=False)
737
747
738 # if we get the different source branch, make sure we also fetch it for
748 # if we get the different source branch, make sure we also fetch it for
739 # merge conditions
749 # merge conditions
740 if source_branch and source_branch != branch_name:
750 if source_branch and source_branch != branch_name:
741 # check if the ref exists.
751 # check if the ref exists.
742 shadow_repo = GitRepository(os.path.abspath(clone_path))
752 shadow_repo = GitRepository(os.path.abspath(clone_path))
743 if shadow_repo.get_remote_ref(source_branch):
753 if shadow_repo.get_remote_ref(source_branch):
744 cmd = ['fetch', self.path, source_branch]
754 cmd = ['fetch', self.path, source_branch]
745 self.run_git_command(cmd, fail_on_stderr=False)
755 self.run_git_command(cmd, fail_on_stderr=False)
746
756
747 def _local_fetch(self, repository_path, branch_name, use_origin=False):
757 def _local_fetch(self, repository_path, branch_name, use_origin=False):
748 """
758 """
749 Fetch a branch from a local repository.
759 Fetch a branch from a local repository.
750 """
760 """
751 repository_path = os.path.abspath(repository_path)
761 repository_path = os.path.abspath(repository_path)
752 if repository_path == self.path:
762 if repository_path == self.path:
753 raise ValueError('Cannot fetch from the same repository')
763 raise ValueError('Cannot fetch from the same repository')
754
764
755 if use_origin:
765 if use_origin:
756 branch_name = '+{branch}:refs/heads/{branch}'.format(
766 branch_name = '+{branch}:refs/heads/{branch}'.format(
757 branch=branch_name)
767 branch=branch_name)
758
768
759 cmd = ['fetch', '--no-tags', '--update-head-ok',
769 cmd = ['fetch', '--no-tags', '--update-head-ok',
760 repository_path, branch_name]
770 repository_path, branch_name]
761 self.run_git_command(cmd, fail_on_stderr=False)
771 self.run_git_command(cmd, fail_on_stderr=False)
762
772
763 def _local_reset(self, branch_name):
773 def _local_reset(self, branch_name):
764 branch_name = '{}'.format(branch_name)
774 branch_name = '{}'.format(branch_name)
765 cmd = ['reset', '--hard', branch_name, '--']
775 cmd = ['reset', '--hard', branch_name, '--']
766 self.run_git_command(cmd, fail_on_stderr=False)
776 self.run_git_command(cmd, fail_on_stderr=False)
767
777
768 def _last_fetch_heads(self):
778 def _last_fetch_heads(self):
769 """
779 """
770 Return the last fetched heads that need merging.
780 Return the last fetched heads that need merging.
771
781
772 The algorithm is defined at
782 The algorithm is defined at
773 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
783 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
774 """
784 """
775 if not self.bare:
785 if not self.bare:
776 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
786 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
777 else:
787 else:
778 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
788 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
779
789
780 heads = []
790 heads = []
781 with open(fetch_heads_path) as f:
791 with open(fetch_heads_path) as f:
782 for line in f:
792 for line in f:
783 if ' not-for-merge ' in line:
793 if ' not-for-merge ' in line:
784 continue
794 continue
785 line = re.sub('\t.*', '', line, flags=re.DOTALL)
795 line = re.sub('\t.*', '', line, flags=re.DOTALL)
786 heads.append(line)
796 heads.append(line)
787
797
788 return heads
798 return heads
789
799
790 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
800 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
791 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
801 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
792
802
793 def _local_pull(self, repository_path, branch_name, ff_only=True):
803 def _local_pull(self, repository_path, branch_name, ff_only=True):
794 """
804 """
795 Pull a branch from a local repository.
805 Pull a branch from a local repository.
796 """
806 """
797 if self.bare:
807 if self.bare:
798 raise RepositoryError('Cannot pull into a bare git repository')
808 raise RepositoryError('Cannot pull into a bare git repository')
799 # N.B.(skreft): The --ff-only option is to make sure this is a
809 # N.B.(skreft): The --ff-only option is to make sure this is a
800 # fast-forward (i.e., we are only pulling new changes and there are no
810 # fast-forward (i.e., we are only pulling new changes and there are no
801 # conflicts with our current branch)
811 # conflicts with our current branch)
802 # Additionally, that option needs to go before --no-tags, otherwise git
812 # Additionally, that option needs to go before --no-tags, otherwise git
803 # pull complains about it being an unknown flag.
813 # pull complains about it being an unknown flag.
804 cmd = ['pull']
814 cmd = ['pull']
805 if ff_only:
815 if ff_only:
806 cmd.append('--ff-only')
816 cmd.append('--ff-only')
807 cmd.extend(['--no-tags', repository_path, branch_name])
817 cmd.extend(['--no-tags', repository_path, branch_name])
808 self.run_git_command(cmd, fail_on_stderr=False)
818 self.run_git_command(cmd, fail_on_stderr=False)
809
819
810 def _local_merge(self, merge_message, user_name, user_email, heads):
820 def _local_merge(self, merge_message, user_name, user_email, heads):
811 """
821 """
812 Merge the given head into the checked out branch.
822 Merge the given head into the checked out branch.
813
823
814 It will force a merge commit.
824 It will force a merge commit.
815
825
816 Currently it raises an error if the repo is empty, as it is not possible
826 Currently it raises an error if the repo is empty, as it is not possible
817 to create a merge commit in an empty repo.
827 to create a merge commit in an empty repo.
818
828
819 :param merge_message: The message to use for the merge commit.
829 :param merge_message: The message to use for the merge commit.
820 :param heads: the heads to merge.
830 :param heads: the heads to merge.
821 """
831 """
822 if self.bare:
832 if self.bare:
823 raise RepositoryError('Cannot merge into a bare git repository')
833 raise RepositoryError('Cannot merge into a bare git repository')
824
834
825 if not heads:
835 if not heads:
826 return
836 return
827
837
828 if self.is_empty():
838 if self.is_empty():
829 # TODO(skreft): do something more robust in this case.
839 # TODO(skreft): do something more robust in this case.
830 raise RepositoryError(
840 raise RepositoryError('Do not know how to merge into empty repositories yet')
831 'Do not know how to merge into empty repositories yet')
832 unresolved = None
841 unresolved = None
833
842
834 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
843 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
835 # commit message. We also specify the user who is doing the merge.
844 # commit message. We also specify the user who is doing the merge.
836 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
845 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
837 '-c', 'user.email=%s' % safe_str(user_email),
846 '-c', 'user.email=%s' % safe_str(user_email),
838 'merge', '--no-ff', '-m', safe_str(merge_message)]
847 'merge', '--no-ff', '-m', safe_str(merge_message)]
839 cmd.extend(heads)
848
849 merge_cmd = cmd + heads
850
840 try:
851 try:
841 output = self.run_git_command(cmd, fail_on_stderr=False)
852 self.run_git_command(merge_cmd, fail_on_stderr=False)
842 except RepositoryError:
853 except RepositoryError:
843 files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'],
854 files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'],
844 fail_on_stderr=False)[0].splitlines()
855 fail_on_stderr=False)[0].splitlines()
845 # NOTE(marcink): we add U notation for consistent with HG backend output
856 # NOTE(marcink): we add U notation for consistent with HG backend output
846 unresolved = ['U {}'.format(f) for f in files]
857 unresolved = ['U {}'.format(f) for f in files]
847
858
848 # Cleanup any merge leftovers
859 # Cleanup any merge leftovers
860 self._remote.invalidate_vcs_cache()
849 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
861 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
850
862
851 if unresolved:
863 if unresolved:
852 raise UnresolvedFilesInRepo(unresolved)
864 raise UnresolvedFilesInRepo(unresolved)
853 else:
865 else:
854 raise
866 raise
855
867
856 def _local_push(
868 def _local_push(
857 self, source_branch, repository_path, target_branch,
869 self, source_branch, repository_path, target_branch,
858 enable_hooks=False, rc_scm_data=None):
870 enable_hooks=False, rc_scm_data=None):
859 """
871 """
860 Push the source_branch to the given repository and target_branch.
872 Push the source_branch to the given repository and target_branch.
861
873
862 Currently it if the target_branch is not master and the target repo is
874 Currently it if the target_branch is not master and the target repo is
863 empty, the push will work, but then GitRepository won't be able to find
875 empty, the push will work, but then GitRepository won't be able to find
864 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
876 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
865 pointing to master, which does not exist).
877 pointing to master, which does not exist).
866
878
867 It does not run the hooks in the target repo.
879 It does not run the hooks in the target repo.
868 """
880 """
869 # TODO(skreft): deal with the case in which the target repo is empty,
881 # TODO(skreft): deal with the case in which the target repo is empty,
870 # and the target_branch is not master.
882 # and the target_branch is not master.
871 target_repo = GitRepository(repository_path)
883 target_repo = GitRepository(repository_path)
872 if (not target_repo.bare and
884 if (not target_repo.bare and
873 target_repo._current_branch() == target_branch):
885 target_repo._current_branch() == target_branch):
874 # Git prevents pushing to the checked out branch, so simulate it by
886 # Git prevents pushing to the checked out branch, so simulate it by
875 # pulling into the target repository.
887 # pulling into the target repository.
876 target_repo._local_pull(self.path, source_branch)
888 target_repo._local_pull(self.path, source_branch)
877 else:
889 else:
878 cmd = ['push', os.path.abspath(repository_path),
890 cmd = ['push', os.path.abspath(repository_path),
879 '%s:%s' % (source_branch, target_branch)]
891 '%s:%s' % (source_branch, target_branch)]
880 gitenv = {}
892 gitenv = {}
881 if rc_scm_data:
893 if rc_scm_data:
882 gitenv.update({'RC_SCM_DATA': rc_scm_data})
894 gitenv.update({'RC_SCM_DATA': rc_scm_data})
883
895
884 if not enable_hooks:
896 if not enable_hooks:
885 gitenv['RC_SKIP_HOOKS'] = '1'
897 gitenv['RC_SKIP_HOOKS'] = '1'
886 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
898 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
887
899
888 def _get_new_pr_branch(self, source_branch, target_branch):
900 def _get_new_pr_branch(self, source_branch, target_branch):
889 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
901 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
890 pr_branches = []
902 pr_branches = []
891 for branch in self.branches:
903 for branch in self.branches:
892 if branch.startswith(prefix):
904 if branch.startswith(prefix):
893 pr_branches.append(int(branch[len(prefix):]))
905 pr_branches.append(int(branch[len(prefix):]))
894
906
895 if not pr_branches:
907 if not pr_branches:
896 branch_id = 0
908 branch_id = 0
897 else:
909 else:
898 branch_id = max(pr_branches) + 1
910 branch_id = max(pr_branches) + 1
899
911
900 return '%s%d' % (prefix, branch_id)
912 return '%s%d' % (prefix, branch_id)
901
913
902 def _maybe_prepare_merge_workspace(
914 def _maybe_prepare_merge_workspace(
903 self, repo_id, workspace_id, target_ref, source_ref):
915 self, repo_id, workspace_id, target_ref, source_ref):
904 shadow_repository_path = self._get_shadow_repository_path(
916 shadow_repository_path = self._get_shadow_repository_path(
905 self.path, repo_id, workspace_id)
917 self.path, repo_id, workspace_id)
906 if not os.path.exists(shadow_repository_path):
918 if not os.path.exists(shadow_repository_path):
907 self._local_clone(
919 self._local_clone(
908 shadow_repository_path, target_ref.name, source_ref.name)
920 shadow_repository_path, target_ref.name, source_ref.name)
909 log.debug('Prepared %s shadow repository in %s',
921 log.debug('Prepared %s shadow repository in %s',
910 self.alias, shadow_repository_path)
922 self.alias, shadow_repository_path)
911
923
912 return shadow_repository_path
924 return shadow_repository_path
913
925
914 def _merge_repo(self, repo_id, workspace_id, target_ref,
926 def _merge_repo(self, repo_id, workspace_id, target_ref,
915 source_repo, source_ref, merge_message,
927 source_repo, source_ref, merge_message,
916 merger_name, merger_email, dry_run=False,
928 merger_name, merger_email, dry_run=False,
917 use_rebase=False, close_branch=False):
929 use_rebase=False, close_branch=False):
918
930
919 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
931 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
920 'rebase' if use_rebase else 'merge', dry_run)
932 'rebase' if use_rebase else 'merge', dry_run)
921 if target_ref.commit_id != self.branches[target_ref.name]:
933 if target_ref.commit_id != self.branches[target_ref.name]:
922 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
934 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
923 target_ref.commit_id, self.branches[target_ref.name])
935 target_ref.commit_id, self.branches[target_ref.name])
924 return MergeResponse(
936 return MergeResponse(
925 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
937 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
926 metadata={'target_ref': target_ref})
938 metadata={'target_ref': target_ref})
927
939
928 shadow_repository_path = self._maybe_prepare_merge_workspace(
940 shadow_repository_path = self._maybe_prepare_merge_workspace(
929 repo_id, workspace_id, target_ref, source_ref)
941 repo_id, workspace_id, target_ref, source_ref)
930 shadow_repo = self.get_shadow_instance(shadow_repository_path)
942 shadow_repo = self.get_shadow_instance(shadow_repository_path)
931
943
932 # checkout source, if it's different. Otherwise we could not
944 # checkout source, if it's different. Otherwise we could not
933 # fetch proper commits for merge testing
945 # fetch proper commits for merge testing
934 if source_ref.name != target_ref.name:
946 if source_ref.name != target_ref.name:
935 if shadow_repo.get_remote_ref(source_ref.name):
947 if shadow_repo.get_remote_ref(source_ref.name):
936 shadow_repo._checkout(source_ref.name, force=True)
948 shadow_repo._checkout(source_ref.name, force=True)
937
949
938 # checkout target, and fetch changes
950 # checkout target, and fetch changes
939 shadow_repo._checkout(target_ref.name, force=True)
951 shadow_repo._checkout(target_ref.name, force=True)
940
952
941 # fetch/reset pull the target, in case it is changed
953 # fetch/reset pull the target, in case it is changed
942 # this handles even force changes
954 # this handles even force changes
943 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
955 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
944 shadow_repo._local_reset(target_ref.name)
956 shadow_repo._local_reset(target_ref.name)
945
957
946 # Need to reload repo to invalidate the cache, or otherwise we cannot
958 # Need to reload repo to invalidate the cache, or otherwise we cannot
947 # retrieve the last target commit.
959 # retrieve the last target commit.
948 shadow_repo = self.get_shadow_instance(shadow_repository_path)
960 shadow_repo = self.get_shadow_instance(shadow_repository_path)
949 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
961 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
950 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
962 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
951 target_ref, target_ref.commit_id,
963 target_ref, target_ref.commit_id,
952 shadow_repo.branches[target_ref.name])
964 shadow_repo.branches[target_ref.name])
953 return MergeResponse(
965 return MergeResponse(
954 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
966 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
955 metadata={'target_ref': target_ref})
967 metadata={'target_ref': target_ref})
956
968
957 # calculate new branch
969 # calculate new branch
958 pr_branch = shadow_repo._get_new_pr_branch(
970 pr_branch = shadow_repo._get_new_pr_branch(
959 source_ref.name, target_ref.name)
971 source_ref.name, target_ref.name)
960 log.debug('using pull-request merge branch: `%s`', pr_branch)
972 log.debug('using pull-request merge branch: `%s`', pr_branch)
961 # checkout to temp branch, and fetch changes
973 # checkout to temp branch, and fetch changes
962 shadow_repo._checkout(pr_branch, create=True)
974 shadow_repo._checkout(pr_branch, create=True)
963 try:
975 try:
964 shadow_repo._local_fetch(source_repo.path, source_ref.name)
976 shadow_repo._local_fetch(source_repo.path, source_ref.name)
965 except RepositoryError:
977 except RepositoryError:
966 log.exception('Failure when doing local fetch on '
978 log.exception('Failure when doing local fetch on '
967 'shadow repo: %s', shadow_repo)
979 'shadow repo: %s', shadow_repo)
968 return MergeResponse(
980 return MergeResponse(
969 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
981 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
970 metadata={'source_ref': source_ref})
982 metadata={'source_ref': source_ref})
971
983
972 merge_ref = None
984 merge_ref = None
973 merge_failure_reason = MergeFailureReason.NONE
985 merge_failure_reason = MergeFailureReason.NONE
974 metadata = {}
986 metadata = {}
975 try:
987 try:
976 shadow_repo._local_merge(merge_message, merger_name, merger_email,
988 shadow_repo._local_merge(merge_message, merger_name, merger_email,
977 [source_ref.commit_id])
989 [source_ref.commit_id])
978 merge_possible = True
990 merge_possible = True
979
991
980 # Need to invalidate the cache, or otherwise we
992 # Need to invalidate the cache, or otherwise we
981 # cannot retrieve the merge commit.
993 # cannot retrieve the merge commit.
982 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
994 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
983 merge_commit_id = shadow_repo.branches[pr_branch]
995 merge_commit_id = shadow_repo.branches[pr_branch]
984
996
985 # Set a reference pointing to the merge commit. This reference may
997 # Set a reference pointing to the merge commit. This reference may
986 # be used to easily identify the last successful merge commit in
998 # be used to easily identify the last successful merge commit in
987 # the shadow repository.
999 # the shadow repository.
988 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
1000 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
989 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
1001 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
990 except RepositoryError as e:
1002 except RepositoryError as e:
991 log.exception('Failure when doing local merge on git shadow repo')
1003 log.exception('Failure when doing local merge on git shadow repo')
992 if isinstance(e, UnresolvedFilesInRepo):
1004 if isinstance(e, UnresolvedFilesInRepo):
993 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
1005 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
994
1006
995 merge_possible = False
1007 merge_possible = False
996 merge_failure_reason = MergeFailureReason.MERGE_FAILED
1008 merge_failure_reason = MergeFailureReason.MERGE_FAILED
997
1009
998 if merge_possible and not dry_run:
1010 if merge_possible and not dry_run:
999 try:
1011 try:
1000 shadow_repo._local_push(
1012 shadow_repo._local_push(
1001 pr_branch, self.path, target_ref.name, enable_hooks=True,
1013 pr_branch, self.path, target_ref.name, enable_hooks=True,
1002 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1014 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1003 merge_succeeded = True
1015 merge_succeeded = True
1004 except RepositoryError:
1016 except RepositoryError:
1005 log.exception(
1017 log.exception(
1006 'Failure when doing local push from the shadow '
1018 'Failure when doing local push from the shadow '
1007 'repository to the target repository at %s.', self.path)
1019 'repository to the target repository at %s.', self.path)
1008 merge_succeeded = False
1020 merge_succeeded = False
1009 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1021 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1010 metadata['target'] = 'git shadow repo'
1022 metadata['target'] = 'git shadow repo'
1011 metadata['merge_commit'] = pr_branch
1023 metadata['merge_commit'] = pr_branch
1012 else:
1024 else:
1013 merge_succeeded = False
1025 merge_succeeded = False
1014
1026
1015 return MergeResponse(
1027 return MergeResponse(
1016 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1028 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1017 metadata=metadata)
1029 metadata=metadata)
@@ -1,978 +1,979 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 HG repository module
22 HG repository module
23 """
23 """
24 import os
24 import os
25 import logging
25 import logging
26 import binascii
26 import binascii
27 import urllib
27 import urllib
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from rhodecode.lib.compat import OrderedDict
31 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.datelib import (
32 from rhodecode.lib.datelib import (
33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.utils2 import CachedProperty
35 from rhodecode.lib.utils2 import CachedProperty
36 from rhodecode.lib.vcs import connection, exceptions
36 from rhodecode.lib.vcs import connection, exceptions
37 from rhodecode.lib.vcs.backends.base import (
37 from rhodecode.lib.vcs.backends.base import (
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 MergeFailureReason, Reference, BasePathPermissionChecker)
39 MergeFailureReason, Reference, BasePathPermissionChecker)
40 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
40 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
41 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
41 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
42 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
42 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
43 from rhodecode.lib.vcs.exceptions import (
43 from rhodecode.lib.vcs.exceptions import (
44 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
44 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
45 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
45 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
46 from rhodecode.lib.vcs.compat import configparser
46 from rhodecode.lib.vcs.compat import configparser
47
47
48 hexlify = binascii.hexlify
48 hexlify = binascii.hexlify
49 nullid = "\0" * 20
49 nullid = "\0" * 20
50
50
51 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
52
52
53
53
54 class MercurialRepository(BaseRepository):
54 class MercurialRepository(BaseRepository):
55 """
55 """
56 Mercurial repository backend
56 Mercurial repository backend
57 """
57 """
58 DEFAULT_BRANCH_NAME = 'default'
58 DEFAULT_BRANCH_NAME = 'default'
59
59
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 do_workspace_checkout=False, with_wire=None, bare=False):
61 do_workspace_checkout=False, with_wire=None, bare=False):
62 """
62 """
63 Raises RepositoryError if repository could not be find at the given
63 Raises RepositoryError if repository could not be find at the given
64 ``repo_path``.
64 ``repo_path``.
65
65
66 :param repo_path: local path of the repository
66 :param repo_path: local path of the repository
67 :param config: config object containing the repo configuration
67 :param config: config object containing the repo configuration
68 :param create=False: if set to True, would try to create repository if
68 :param create=False: if set to True, would try to create repository if
69 it does not exist rather than raising exception
69 it does not exist rather than raising exception
70 :param src_url=None: would try to clone repository from given location
70 :param src_url=None: would try to clone repository from given location
71 :param do_workspace_checkout=False: sets update of working copy after
71 :param do_workspace_checkout=False: sets update of working copy after
72 making a clone
72 making a clone
73 :param bare: not used, compatible with other VCS
73 :param bare: not used, compatible with other VCS
74 """
74 """
75
75
76 self.path = safe_str(os.path.abspath(repo_path))
76 self.path = safe_str(os.path.abspath(repo_path))
77 # mercurial since 4.4.X requires certain configuration to be present
77 # mercurial since 4.4.X requires certain configuration to be present
78 # because sometimes we init the repos with config we need to meet
78 # because sometimes we init the repos with config we need to meet
79 # special requirements
79 # special requirements
80 self.config = config if config else self.get_default_config(
80 self.config = config if config else self.get_default_config(
81 default=[('extensions', 'largefiles', '1')])
81 default=[('extensions', 'largefiles', '1')])
82 self.with_wire = with_wire or {"cache": False} # default should not use cache
82 self.with_wire = with_wire or {"cache": False} # default should not use cache
83
83
84 self._init_repo(create, src_url, do_workspace_checkout)
84 self._init_repo(create, src_url, do_workspace_checkout)
85
85
86 # caches
86 # caches
87 self._commit_ids = {}
87 self._commit_ids = {}
88
88
89 @LazyProperty
89 @LazyProperty
90 def _remote(self):
90 def _remote(self):
91 repo_id = self.path
91 repo_id = self.path
92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
93
93
94 @CachedProperty
94 @CachedProperty
95 def commit_ids(self):
95 def commit_ids(self):
96 """
96 """
97 Returns list of commit ids, in ascending order. Being lazy
97 Returns list of commit ids, in ascending order. Being lazy
98 attribute allows external tools to inject shas from cache.
98 attribute allows external tools to inject shas from cache.
99 """
99 """
100 commit_ids = self._get_all_commit_ids()
100 commit_ids = self._get_all_commit_ids()
101 self._rebuild_cache(commit_ids)
101 self._rebuild_cache(commit_ids)
102 return commit_ids
102 return commit_ids
103
103
104 def _rebuild_cache(self, commit_ids):
104 def _rebuild_cache(self, commit_ids):
105 self._commit_ids = dict((commit_id, index)
105 self._commit_ids = dict((commit_id, index)
106 for index, commit_id in enumerate(commit_ids))
106 for index, commit_id in enumerate(commit_ids))
107
107
108 @CachedProperty
108 @CachedProperty
109 def branches(self):
109 def branches(self):
110 return self._get_branches()
110 return self._get_branches()
111
111
112 @CachedProperty
112 @CachedProperty
113 def branches_closed(self):
113 def branches_closed(self):
114 return self._get_branches(active=False, closed=True)
114 return self._get_branches(active=False, closed=True)
115
115
116 @CachedProperty
116 @CachedProperty
117 def branches_all(self):
117 def branches_all(self):
118 all_branches = {}
118 all_branches = {}
119 all_branches.update(self.branches)
119 all_branches.update(self.branches)
120 all_branches.update(self.branches_closed)
120 all_branches.update(self.branches_closed)
121 return all_branches
121 return all_branches
122
122
123 def _get_branches(self, active=True, closed=False):
123 def _get_branches(self, active=True, closed=False):
124 """
124 """
125 Gets branches for this repository
125 Gets branches for this repository
126 Returns only not closed active branches by default
126 Returns only not closed active branches by default
127
127
128 :param active: return also active branches
128 :param active: return also active branches
129 :param closed: return also closed branches
129 :param closed: return also closed branches
130
130
131 """
131 """
132 if self.is_empty():
132 if self.is_empty():
133 return {}
133 return {}
134
134
135 def get_name(ctx):
135 def get_name(ctx):
136 return ctx[0]
136 return ctx[0]
137
137
138 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
138 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
139 self._remote.branches(active, closed).items()]
139 self._remote.branches(active, closed).items()]
140
140
141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
142
142
143 @CachedProperty
143 @CachedProperty
144 def tags(self):
144 def tags(self):
145 """
145 """
146 Gets tags for this repository
146 Gets tags for this repository
147 """
147 """
148 return self._get_tags()
148 return self._get_tags()
149
149
150 def _get_tags(self):
150 def _get_tags(self):
151 if self.is_empty():
151 if self.is_empty():
152 return {}
152 return {}
153
153
154 def get_name(ctx):
154 def get_name(ctx):
155 return ctx[0]
155 return ctx[0]
156
156
157 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
157 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
158 self._remote.tags().items()]
158 self._remote.tags().items()]
159
159
160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
161
161
162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
163 """
163 """
164 Creates and returns a tag for the given ``commit_id``.
164 Creates and returns a tag for the given ``commit_id``.
165
165
166 :param name: name for new tag
166 :param name: name for new tag
167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
168 :param commit_id: commit id for which new tag would be created
168 :param commit_id: commit id for which new tag would be created
169 :param message: message of the tag's commit
169 :param message: message of the tag's commit
170 :param date: date of tag's commit
170 :param date: date of tag's commit
171
171
172 :raises TagAlreadyExistError: if tag with same name already exists
172 :raises TagAlreadyExistError: if tag with same name already exists
173 """
173 """
174 if name in self.tags:
174 if name in self.tags:
175 raise TagAlreadyExistError("Tag %s already exists" % name)
175 raise TagAlreadyExistError("Tag %s already exists" % name)
176
176
177 commit = self.get_commit(commit_id=commit_id)
177 commit = self.get_commit(commit_id=commit_id)
178 local = kwargs.setdefault('local', False)
178 local = kwargs.setdefault('local', False)
179
179
180 if message is None:
180 if message is None:
181 message = "Added tag %s for commit %s" % (name, commit.short_id)
181 message = "Added tag %s for commit %s" % (name, commit.short_id)
182
182
183 date, tz = date_to_timestamp_plus_offset(date)
183 date, tz = date_to_timestamp_plus_offset(date)
184
184
185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
186 self._remote.invalidate_vcs_cache()
186 self._remote.invalidate_vcs_cache()
187
187
188 # Reinitialize tags
188 # Reinitialize tags
189 self._invalidate_prop_cache('tags')
189 self._invalidate_prop_cache('tags')
190 tag_id = self.tags[name]
190 tag_id = self.tags[name]
191
191
192 return self.get_commit(commit_id=tag_id)
192 return self.get_commit(commit_id=tag_id)
193
193
194 def remove_tag(self, name, user, message=None, date=None):
194 def remove_tag(self, name, user, message=None, date=None):
195 """
195 """
196 Removes tag with the given `name`.
196 Removes tag with the given `name`.
197
197
198 :param name: name of the tag to be removed
198 :param name: name of the tag to be removed
199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
200 :param message: message of the tag's removal commit
200 :param message: message of the tag's removal commit
201 :param date: date of tag's removal commit
201 :param date: date of tag's removal commit
202
202
203 :raises TagDoesNotExistError: if tag with given name does not exists
203 :raises TagDoesNotExistError: if tag with given name does not exists
204 """
204 """
205 if name not in self.tags:
205 if name not in self.tags:
206 raise TagDoesNotExistError("Tag %s does not exist" % name)
206 raise TagDoesNotExistError("Tag %s does not exist" % name)
207
207
208 if message is None:
208 if message is None:
209 message = "Removed tag %s" % name
209 message = "Removed tag %s" % name
210 local = False
210 local = False
211
211
212 date, tz = date_to_timestamp_plus_offset(date)
212 date, tz = date_to_timestamp_plus_offset(date)
213
213
214 self._remote.tag(name, nullid, message, local, user, date, tz)
214 self._remote.tag(name, nullid, message, local, user, date, tz)
215 self._remote.invalidate_vcs_cache()
215 self._remote.invalidate_vcs_cache()
216 self._invalidate_prop_cache('tags')
216 self._invalidate_prop_cache('tags')
217
217
218 @LazyProperty
218 @LazyProperty
219 def bookmarks(self):
219 def bookmarks(self):
220 """
220 """
221 Gets bookmarks for this repository
221 Gets bookmarks for this repository
222 """
222 """
223 return self._get_bookmarks()
223 return self._get_bookmarks()
224
224
225 def _get_bookmarks(self):
225 def _get_bookmarks(self):
226 if self.is_empty():
226 if self.is_empty():
227 return {}
227 return {}
228
228
229 def get_name(ctx):
229 def get_name(ctx):
230 return ctx[0]
230 return ctx[0]
231
231
232 _bookmarks = [
232 _bookmarks = [
233 (safe_unicode(n), hexlify(h)) for n, h in
233 (safe_unicode(n), hexlify(h)) for n, h in
234 self._remote.bookmarks().items()]
234 self._remote.bookmarks().items()]
235
235
236 return OrderedDict(sorted(_bookmarks, key=get_name))
236 return OrderedDict(sorted(_bookmarks, key=get_name))
237
237
238 def _get_all_commit_ids(self):
238 def _get_all_commit_ids(self):
239 return self._remote.get_all_commit_ids('visible')
239 return self._remote.get_all_commit_ids('visible')
240
240
241 def get_diff(
241 def get_diff(
242 self, commit1, commit2, path='', ignore_whitespace=False,
242 self, commit1, commit2, path='', ignore_whitespace=False,
243 context=3, path1=None):
243 context=3, path1=None):
244 """
244 """
245 Returns (git like) *diff*, as plain text. Shows changes introduced by
245 Returns (git like) *diff*, as plain text. Shows changes introduced by
246 `commit2` since `commit1`.
246 `commit2` since `commit1`.
247
247
248 :param commit1: Entry point from which diff is shown. Can be
248 :param commit1: Entry point from which diff is shown. Can be
249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
250 the changes since empty state of the repository until `commit2`
250 the changes since empty state of the repository until `commit2`
251 :param commit2: Until which commit changes should be shown.
251 :param commit2: Until which commit changes should be shown.
252 :param ignore_whitespace: If set to ``True``, would not show whitespace
252 :param ignore_whitespace: If set to ``True``, would not show whitespace
253 changes. Defaults to ``False``.
253 changes. Defaults to ``False``.
254 :param context: How many lines before/after changed lines should be
254 :param context: How many lines before/after changed lines should be
255 shown. Defaults to ``3``.
255 shown. Defaults to ``3``.
256 """
256 """
257 self._validate_diff_commits(commit1, commit2)
257 self._validate_diff_commits(commit1, commit2)
258 if path1 is not None and path1 != path:
258 if path1 is not None and path1 != path:
259 raise ValueError("Diff of two different paths not supported.")
259 raise ValueError("Diff of two different paths not supported.")
260
260
261 if path:
261 if path:
262 file_filter = [self.path, path]
262 file_filter = [self.path, path]
263 else:
263 else:
264 file_filter = None
264 file_filter = None
265
265
266 diff = self._remote.diff(
266 diff = self._remote.diff(
267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
268 opt_git=True, opt_ignorews=ignore_whitespace,
268 opt_git=True, opt_ignorews=ignore_whitespace,
269 context=context)
269 context=context)
270 return MercurialDiff(diff)
270 return MercurialDiff(diff)
271
271
272 def strip(self, commit_id, branch=None):
272 def strip(self, commit_id, branch=None):
273 self._remote.strip(commit_id, update=False, backup="none")
273 self._remote.strip(commit_id, update=False, backup="none")
274
274
275 self._remote.invalidate_vcs_cache()
275 self._remote.invalidate_vcs_cache()
276 # clear cache
276 # clear cache
277 self._invalidate_prop_cache('commit_ids')
277 self._invalidate_prop_cache('commit_ids')
278
278
279 return len(self.commit_ids)
279 return len(self.commit_ids)
280
280
281 def verify(self):
281 def verify(self):
282 verify = self._remote.verify()
282 verify = self._remote.verify()
283
283
284 self._remote.invalidate_vcs_cache()
284 self._remote.invalidate_vcs_cache()
285 return verify
285 return verify
286
286
287 def hg_update_cache(self):
287 def hg_update_cache(self):
288 update_cache = self._remote.hg_update_cache()
288 update_cache = self._remote.hg_update_cache()
289
289
290 self._remote.invalidate_vcs_cache()
290 self._remote.invalidate_vcs_cache()
291 return update_cache
291 return update_cache
292
292
293 def hg_rebuild_fn_cache(self):
293 def hg_rebuild_fn_cache(self):
294 update_cache = self._remote.hg_rebuild_fn_cache()
294 update_cache = self._remote.hg_rebuild_fn_cache()
295
295
296 self._remote.invalidate_vcs_cache()
296 self._remote.invalidate_vcs_cache()
297 return update_cache
297 return update_cache
298
298
299 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
299 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
300 if commit_id1 == commit_id2:
300 if commit_id1 == commit_id2:
301 return commit_id1
301 return commit_id1
302
302
303 ancestors = self._remote.revs_from_revspec(
303 ancestors = self._remote.revs_from_revspec(
304 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
304 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
305 other_path=repo2.path)
305 other_path=repo2.path)
306 return repo2[ancestors[0]].raw_id if ancestors else None
306 return repo2[ancestors[0]].raw_id if ancestors else None
307
307
308 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
308 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
309 if commit_id1 == commit_id2:
309 if commit_id1 == commit_id2:
310 commits = []
310 commits = []
311 else:
311 else:
312 if merge:
312 if merge:
313 indexes = self._remote.revs_from_revspec(
313 indexes = self._remote.revs_from_revspec(
314 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
314 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
315 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
315 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
316 else:
316 else:
317 indexes = self._remote.revs_from_revspec(
317 indexes = self._remote.revs_from_revspec(
318 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
318 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
319 commit_id1, other_path=repo2.path)
319 commit_id1, other_path=repo2.path)
320
320
321 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
321 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
322 for idx in indexes]
322 for idx in indexes]
323
323
324 return commits
324 return commits
325
325
326 @staticmethod
326 @staticmethod
327 def check_url(url, config):
327 def check_url(url, config):
328 """
328 """
329 Function will check given url and try to verify if it's a valid
329 Function will check given url and try to verify if it's a valid
330 link. Sometimes it may happened that mercurial will issue basic
330 link. Sometimes it may happened that mercurial will issue basic
331 auth request that can cause whole API to hang when used from python
331 auth request that can cause whole API to hang when used from python
332 or other external calls.
332 or other external calls.
333
333
334 On failures it'll raise urllib2.HTTPError, exception is also thrown
334 On failures it'll raise urllib2.HTTPError, exception is also thrown
335 when the return code is non 200
335 when the return code is non 200
336 """
336 """
337 # check first if it's not an local url
337 # check first if it's not an local url
338 if os.path.isdir(url) or url.startswith('file:'):
338 if os.path.isdir(url) or url.startswith('file:'):
339 return True
339 return True
340
340
341 # Request the _remote to verify the url
341 # Request the _remote to verify the url
342 return connection.Hg.check_url(url, config.serialize())
342 return connection.Hg.check_url(url, config.serialize())
343
343
344 @staticmethod
344 @staticmethod
345 def is_valid_repository(path):
345 def is_valid_repository(path):
346 return os.path.isdir(os.path.join(path, '.hg'))
346 return os.path.isdir(os.path.join(path, '.hg'))
347
347
348 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
348 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
349 """
349 """
350 Function will check for mercurial repository in given path. If there
350 Function will check for mercurial repository in given path. If there
351 is no repository in that path it will raise an exception unless
351 is no repository in that path it will raise an exception unless
352 `create` parameter is set to True - in that case repository would
352 `create` parameter is set to True - in that case repository would
353 be created.
353 be created.
354
354
355 If `src_url` is given, would try to clone repository from the
355 If `src_url` is given, would try to clone repository from the
356 location at given clone_point. Additionally it'll make update to
356 location at given clone_point. Additionally it'll make update to
357 working copy accordingly to `do_workspace_checkout` flag.
357 working copy accordingly to `do_workspace_checkout` flag.
358 """
358 """
359 if create and os.path.exists(self.path):
359 if create and os.path.exists(self.path):
360 raise RepositoryError(
360 raise RepositoryError(
361 "Cannot create repository at %s, location already exist"
361 "Cannot create repository at %s, location already exist"
362 % self.path)
362 % self.path)
363
363
364 if src_url:
364 if src_url:
365 url = str(self._get_url(src_url))
365 url = str(self._get_url(src_url))
366 MercurialRepository.check_url(url, self.config)
366 MercurialRepository.check_url(url, self.config)
367
367
368 self._remote.clone(url, self.path, do_workspace_checkout)
368 self._remote.clone(url, self.path, do_workspace_checkout)
369
369
370 # Don't try to create if we've already cloned repo
370 # Don't try to create if we've already cloned repo
371 create = False
371 create = False
372
372
373 if create:
373 if create:
374 os.makedirs(self.path, mode=0o755)
374 os.makedirs(self.path, mode=0o755)
375 self._remote.localrepository(create)
375 self._remote.localrepository(create)
376
376
377 @LazyProperty
377 @LazyProperty
378 def in_memory_commit(self):
378 def in_memory_commit(self):
379 return MercurialInMemoryCommit(self)
379 return MercurialInMemoryCommit(self)
380
380
381 @LazyProperty
381 @LazyProperty
382 def description(self):
382 def description(self):
383 description = self._remote.get_config_value(
383 description = self._remote.get_config_value(
384 'web', 'description', untrusted=True)
384 'web', 'description', untrusted=True)
385 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
385 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
386
386
387 @LazyProperty
387 @LazyProperty
388 def contact(self):
388 def contact(self):
389 contact = (
389 contact = (
390 self._remote.get_config_value("web", "contact") or
390 self._remote.get_config_value("web", "contact") or
391 self._remote.get_config_value("ui", "username"))
391 self._remote.get_config_value("ui", "username"))
392 return safe_unicode(contact or self.DEFAULT_CONTACT)
392 return safe_unicode(contact or self.DEFAULT_CONTACT)
393
393
394 @LazyProperty
394 @LazyProperty
395 def last_change(self):
395 def last_change(self):
396 """
396 """
397 Returns last change made on this repository as
397 Returns last change made on this repository as
398 `datetime.datetime` object.
398 `datetime.datetime` object.
399 """
399 """
400 try:
400 try:
401 return self.get_commit().date
401 return self.get_commit().date
402 except RepositoryError:
402 except RepositoryError:
403 tzoffset = makedate()[1]
403 tzoffset = makedate()[1]
404 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
404 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
405
405
406 def _get_fs_mtime(self):
406 def _get_fs_mtime(self):
407 # fallback to filesystem
407 # fallback to filesystem
408 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
408 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
409 st_path = os.path.join(self.path, '.hg', "store")
409 st_path = os.path.join(self.path, '.hg', "store")
410 if os.path.exists(cl_path):
410 if os.path.exists(cl_path):
411 return os.stat(cl_path).st_mtime
411 return os.stat(cl_path).st_mtime
412 else:
412 else:
413 return os.stat(st_path).st_mtime
413 return os.stat(st_path).st_mtime
414
414
415 def _get_url(self, url):
415 def _get_url(self, url):
416 """
416 """
417 Returns normalized url. If schema is not given, would fall
417 Returns normalized url. If schema is not given, would fall
418 to filesystem
418 to filesystem
419 (``file:///``) schema.
419 (``file:///``) schema.
420 """
420 """
421 url = url.encode('utf8')
421 url = url.encode('utf8')
422 if url != 'default' and '://' not in url:
422 if url != 'default' and '://' not in url:
423 url = "file:" + urllib.pathname2url(url)
423 url = "file:" + urllib.pathname2url(url)
424 return url
424 return url
425
425
426 def get_hook_location(self):
426 def get_hook_location(self):
427 """
427 """
428 returns absolute path to location where hooks are stored
428 returns absolute path to location where hooks are stored
429 """
429 """
430 return os.path.join(self.path, '.hg', '.hgrc')
430 return os.path.join(self.path, '.hg', '.hgrc')
431
431
432 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
432 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
433 translate_tag=None, maybe_unreachable=False):
433 """
434 """
434 Returns ``MercurialCommit`` object representing repository's
435 Returns ``MercurialCommit`` object representing repository's
435 commit at the given `commit_id` or `commit_idx`.
436 commit at the given `commit_id` or `commit_idx`.
436 """
437 """
437 if self.is_empty():
438 if self.is_empty():
438 raise EmptyRepositoryError("There are no commits yet")
439 raise EmptyRepositoryError("There are no commits yet")
439
440
440 if commit_id is not None:
441 if commit_id is not None:
441 self._validate_commit_id(commit_id)
442 self._validate_commit_id(commit_id)
442 try:
443 try:
443 # we have cached idx, use it without contacting the remote
444 # we have cached idx, use it without contacting the remote
444 idx = self._commit_ids[commit_id]
445 idx = self._commit_ids[commit_id]
445 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
446 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
446 except KeyError:
447 except KeyError:
447 pass
448 pass
448
449
449 elif commit_idx is not None:
450 elif commit_idx is not None:
450 self._validate_commit_idx(commit_idx)
451 self._validate_commit_idx(commit_idx)
451 try:
452 try:
452 _commit_id = self.commit_ids[commit_idx]
453 _commit_id = self.commit_ids[commit_idx]
453 if commit_idx < 0:
454 if commit_idx < 0:
454 commit_idx = self.commit_ids.index(_commit_id)
455 commit_idx = self.commit_ids.index(_commit_id)
455
456
456 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
457 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
457 except IndexError:
458 except IndexError:
458 commit_id = commit_idx
459 commit_id = commit_idx
459 else:
460 else:
460 commit_id = "tip"
461 commit_id = "tip"
461
462
462 if isinstance(commit_id, unicode):
463 if isinstance(commit_id, unicode):
463 commit_id = safe_str(commit_id)
464 commit_id = safe_str(commit_id)
464
465
465 try:
466 try:
466 raw_id, idx = self._remote.lookup(commit_id, both=True)
467 raw_id, idx = self._remote.lookup(commit_id, both=True)
467 except CommitDoesNotExistError:
468 except CommitDoesNotExistError:
468 msg = "Commit {} does not exist for `{}`".format(
469 msg = "Commit {} does not exist for `{}`".format(
469 *map(safe_str, [commit_id, self.name]))
470 *map(safe_str, [commit_id, self.name]))
470 raise CommitDoesNotExistError(msg)
471 raise CommitDoesNotExistError(msg)
471
472
472 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
473 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
473
474
474 def get_commits(
475 def get_commits(
475 self, start_id=None, end_id=None, start_date=None, end_date=None,
476 self, start_id=None, end_id=None, start_date=None, end_date=None,
476 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
477 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
477 """
478 """
478 Returns generator of ``MercurialCommit`` objects from start to end
479 Returns generator of ``MercurialCommit`` objects from start to end
479 (both are inclusive)
480 (both are inclusive)
480
481
481 :param start_id: None, str(commit_id)
482 :param start_id: None, str(commit_id)
482 :param end_id: None, str(commit_id)
483 :param end_id: None, str(commit_id)
483 :param start_date: if specified, commits with commit date less than
484 :param start_date: if specified, commits with commit date less than
484 ``start_date`` would be filtered out from returned set
485 ``start_date`` would be filtered out from returned set
485 :param end_date: if specified, commits with commit date greater than
486 :param end_date: if specified, commits with commit date greater than
486 ``end_date`` would be filtered out from returned set
487 ``end_date`` would be filtered out from returned set
487 :param branch_name: if specified, commits not reachable from given
488 :param branch_name: if specified, commits not reachable from given
488 branch would be filtered out from returned set
489 branch would be filtered out from returned set
489 :param show_hidden: Show hidden commits such as obsolete or hidden from
490 :param show_hidden: Show hidden commits such as obsolete or hidden from
490 Mercurial evolve
491 Mercurial evolve
491 :raise BranchDoesNotExistError: If given ``branch_name`` does not
492 :raise BranchDoesNotExistError: If given ``branch_name`` does not
492 exist.
493 exist.
493 :raise CommitDoesNotExistError: If commit for given ``start`` or
494 :raise CommitDoesNotExistError: If commit for given ``start`` or
494 ``end`` could not be found.
495 ``end`` could not be found.
495 """
496 """
496 # actually we should check now if it's not an empty repo
497 # actually we should check now if it's not an empty repo
497 if self.is_empty():
498 if self.is_empty():
498 raise EmptyRepositoryError("There are no commits yet")
499 raise EmptyRepositoryError("There are no commits yet")
499 self._validate_branch_name(branch_name)
500 self._validate_branch_name(branch_name)
500
501
501 branch_ancestors = False
502 branch_ancestors = False
502 if start_id is not None:
503 if start_id is not None:
503 self._validate_commit_id(start_id)
504 self._validate_commit_id(start_id)
504 c_start = self.get_commit(commit_id=start_id)
505 c_start = self.get_commit(commit_id=start_id)
505 start_pos = self._commit_ids[c_start.raw_id]
506 start_pos = self._commit_ids[c_start.raw_id]
506 else:
507 else:
507 start_pos = None
508 start_pos = None
508
509
509 if end_id is not None:
510 if end_id is not None:
510 self._validate_commit_id(end_id)
511 self._validate_commit_id(end_id)
511 c_end = self.get_commit(commit_id=end_id)
512 c_end = self.get_commit(commit_id=end_id)
512 end_pos = max(0, self._commit_ids[c_end.raw_id])
513 end_pos = max(0, self._commit_ids[c_end.raw_id])
513 else:
514 else:
514 end_pos = None
515 end_pos = None
515
516
516 if None not in [start_id, end_id] and start_pos > end_pos:
517 if None not in [start_id, end_id] and start_pos > end_pos:
517 raise RepositoryError(
518 raise RepositoryError(
518 "Start commit '%s' cannot be after end commit '%s'" %
519 "Start commit '%s' cannot be after end commit '%s'" %
519 (start_id, end_id))
520 (start_id, end_id))
520
521
521 if end_pos is not None:
522 if end_pos is not None:
522 end_pos += 1
523 end_pos += 1
523
524
524 commit_filter = []
525 commit_filter = []
525
526
526 if branch_name and not branch_ancestors:
527 if branch_name and not branch_ancestors:
527 commit_filter.append('branch("%s")' % (branch_name,))
528 commit_filter.append('branch("%s")' % (branch_name,))
528 elif branch_name and branch_ancestors:
529 elif branch_name and branch_ancestors:
529 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
530 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
530
531
531 if start_date and not end_date:
532 if start_date and not end_date:
532 commit_filter.append('date(">%s")' % (start_date,))
533 commit_filter.append('date(">%s")' % (start_date,))
533 if end_date and not start_date:
534 if end_date and not start_date:
534 commit_filter.append('date("<%s")' % (end_date,))
535 commit_filter.append('date("<%s")' % (end_date,))
535 if start_date and end_date:
536 if start_date and end_date:
536 commit_filter.append(
537 commit_filter.append(
537 'date(">%s") and date("<%s")' % (start_date, end_date))
538 'date(">%s") and date("<%s")' % (start_date, end_date))
538
539
539 if not show_hidden:
540 if not show_hidden:
540 commit_filter.append('not obsolete()')
541 commit_filter.append('not obsolete()')
541 commit_filter.append('not hidden()')
542 commit_filter.append('not hidden()')
542
543
543 # TODO: johbo: Figure out a simpler way for this solution
544 # TODO: johbo: Figure out a simpler way for this solution
544 collection_generator = CollectionGenerator
545 collection_generator = CollectionGenerator
545 if commit_filter:
546 if commit_filter:
546 commit_filter = ' and '.join(map(safe_str, commit_filter))
547 commit_filter = ' and '.join(map(safe_str, commit_filter))
547 revisions = self._remote.rev_range([commit_filter])
548 revisions = self._remote.rev_range([commit_filter])
548 collection_generator = MercurialIndexBasedCollectionGenerator
549 collection_generator = MercurialIndexBasedCollectionGenerator
549 else:
550 else:
550 revisions = self.commit_ids
551 revisions = self.commit_ids
551
552
552 if start_pos or end_pos:
553 if start_pos or end_pos:
553 revisions = revisions[start_pos:end_pos]
554 revisions = revisions[start_pos:end_pos]
554
555
555 return collection_generator(self, revisions, pre_load=pre_load)
556 return collection_generator(self, revisions, pre_load=pre_load)
556
557
557 def pull(self, url, commit_ids=None):
558 def pull(self, url, commit_ids=None):
558 """
559 """
559 Pull changes from external location.
560 Pull changes from external location.
560
561
561 :param commit_ids: Optional. Can be set to a list of commit ids
562 :param commit_ids: Optional. Can be set to a list of commit ids
562 which shall be pulled from the other repository.
563 which shall be pulled from the other repository.
563 """
564 """
564 url = self._get_url(url)
565 url = self._get_url(url)
565 self._remote.pull(url, commit_ids=commit_ids)
566 self._remote.pull(url, commit_ids=commit_ids)
566 self._remote.invalidate_vcs_cache()
567 self._remote.invalidate_vcs_cache()
567
568
568 def fetch(self, url, commit_ids=None):
569 def fetch(self, url, commit_ids=None):
569 """
570 """
570 Backward compatibility with GIT fetch==pull
571 Backward compatibility with GIT fetch==pull
571 """
572 """
572 return self.pull(url, commit_ids=commit_ids)
573 return self.pull(url, commit_ids=commit_ids)
573
574
574 def push(self, url):
575 def push(self, url):
575 url = self._get_url(url)
576 url = self._get_url(url)
576 self._remote.sync_push(url)
577 self._remote.sync_push(url)
577
578
578 def _local_clone(self, clone_path):
579 def _local_clone(self, clone_path):
579 """
580 """
580 Create a local clone of the current repo.
581 Create a local clone of the current repo.
581 """
582 """
582 self._remote.clone(self.path, clone_path, update_after_clone=True,
583 self._remote.clone(self.path, clone_path, update_after_clone=True,
583 hooks=False)
584 hooks=False)
584
585
585 def _update(self, revision, clean=False):
586 def _update(self, revision, clean=False):
586 """
587 """
587 Update the working copy to the specified revision.
588 Update the working copy to the specified revision.
588 """
589 """
589 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
590 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
590 self._remote.update(revision, clean=clean)
591 self._remote.update(revision, clean=clean)
591
592
592 def _identify(self):
593 def _identify(self):
593 """
594 """
594 Return the current state of the working directory.
595 Return the current state of the working directory.
595 """
596 """
596 return self._remote.identify().strip().rstrip('+')
597 return self._remote.identify().strip().rstrip('+')
597
598
598 def _heads(self, branch=None):
599 def _heads(self, branch=None):
599 """
600 """
600 Return the commit ids of the repository heads.
601 Return the commit ids of the repository heads.
601 """
602 """
602 return self._remote.heads(branch=branch).strip().split(' ')
603 return self._remote.heads(branch=branch).strip().split(' ')
603
604
604 def _ancestor(self, revision1, revision2):
605 def _ancestor(self, revision1, revision2):
605 """
606 """
606 Return the common ancestor of the two revisions.
607 Return the common ancestor of the two revisions.
607 """
608 """
608 return self._remote.ancestor(revision1, revision2)
609 return self._remote.ancestor(revision1, revision2)
609
610
610 def _local_push(
611 def _local_push(
611 self, revision, repository_path, push_branches=False,
612 self, revision, repository_path, push_branches=False,
612 enable_hooks=False):
613 enable_hooks=False):
613 """
614 """
614 Push the given revision to the specified repository.
615 Push the given revision to the specified repository.
615
616
616 :param push_branches: allow to create branches in the target repo.
617 :param push_branches: allow to create branches in the target repo.
617 """
618 """
618 self._remote.push(
619 self._remote.push(
619 [revision], repository_path, hooks=enable_hooks,
620 [revision], repository_path, hooks=enable_hooks,
620 push_branches=push_branches)
621 push_branches=push_branches)
621
622
622 def _local_merge(self, target_ref, merge_message, user_name, user_email,
623 def _local_merge(self, target_ref, merge_message, user_name, user_email,
623 source_ref, use_rebase=False, dry_run=False):
624 source_ref, use_rebase=False, dry_run=False):
624 """
625 """
625 Merge the given source_revision into the checked out revision.
626 Merge the given source_revision into the checked out revision.
626
627
627 Returns the commit id of the merge and a boolean indicating if the
628 Returns the commit id of the merge and a boolean indicating if the
628 commit needs to be pushed.
629 commit needs to be pushed.
629 """
630 """
630 self._update(target_ref.commit_id, clean=True)
631 self._update(target_ref.commit_id, clean=True)
631
632
632 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
633 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
633 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
634 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
634
635
635 if ancestor == source_ref.commit_id:
636 if ancestor == source_ref.commit_id:
636 # Nothing to do, the changes were already integrated
637 # Nothing to do, the changes were already integrated
637 return target_ref.commit_id, False
638 return target_ref.commit_id, False
638
639
639 elif ancestor == target_ref.commit_id and is_the_same_branch:
640 elif ancestor == target_ref.commit_id and is_the_same_branch:
640 # In this case we should force a commit message
641 # In this case we should force a commit message
641 return source_ref.commit_id, True
642 return source_ref.commit_id, True
642
643
643 unresolved = None
644 unresolved = None
644 if use_rebase:
645 if use_rebase:
645 try:
646 try:
646 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
647 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
647 target_ref.commit_id)
648 target_ref.commit_id)
648 self.bookmark(bookmark_name, revision=source_ref.commit_id)
649 self.bookmark(bookmark_name, revision=source_ref.commit_id)
649 self._remote.rebase(
650 self._remote.rebase(
650 source=source_ref.commit_id, dest=target_ref.commit_id)
651 source=source_ref.commit_id, dest=target_ref.commit_id)
651 self._remote.invalidate_vcs_cache()
652 self._remote.invalidate_vcs_cache()
652 self._update(bookmark_name, clean=True)
653 self._update(bookmark_name, clean=True)
653 return self._identify(), True
654 return self._identify(), True
654 except RepositoryError as e:
655 except RepositoryError as e:
655 # The rebase-abort may raise another exception which 'hides'
656 # The rebase-abort may raise another exception which 'hides'
656 # the original one, therefore we log it here.
657 # the original one, therefore we log it here.
657 log.exception('Error while rebasing shadow repo during merge.')
658 log.exception('Error while rebasing shadow repo during merge.')
658 if 'unresolved conflicts' in safe_str(e):
659 if 'unresolved conflicts' in safe_str(e):
659 unresolved = self._remote.get_unresolved_files()
660 unresolved = self._remote.get_unresolved_files()
660 log.debug('unresolved files: %s', unresolved)
661 log.debug('unresolved files: %s', unresolved)
661
662
662 # Cleanup any rebase leftovers
663 # Cleanup any rebase leftovers
663 self._remote.invalidate_vcs_cache()
664 self._remote.invalidate_vcs_cache()
664 self._remote.rebase(abort=True)
665 self._remote.rebase(abort=True)
665 self._remote.invalidate_vcs_cache()
666 self._remote.invalidate_vcs_cache()
666 self._remote.update(clean=True)
667 self._remote.update(clean=True)
667 if unresolved:
668 if unresolved:
668 raise UnresolvedFilesInRepo(unresolved)
669 raise UnresolvedFilesInRepo(unresolved)
669 else:
670 else:
670 raise
671 raise
671 else:
672 else:
672 try:
673 try:
673 self._remote.merge(source_ref.commit_id)
674 self._remote.merge(source_ref.commit_id)
674 self._remote.invalidate_vcs_cache()
675 self._remote.invalidate_vcs_cache()
675 self._remote.commit(
676 self._remote.commit(
676 message=safe_str(merge_message),
677 message=safe_str(merge_message),
677 username=safe_str('%s <%s>' % (user_name, user_email)))
678 username=safe_str('%s <%s>' % (user_name, user_email)))
678 self._remote.invalidate_vcs_cache()
679 self._remote.invalidate_vcs_cache()
679 return self._identify(), True
680 return self._identify(), True
680 except RepositoryError as e:
681 except RepositoryError as e:
681 # The merge-abort may raise another exception which 'hides'
682 # The merge-abort may raise another exception which 'hides'
682 # the original one, therefore we log it here.
683 # the original one, therefore we log it here.
683 log.exception('Error while merging shadow repo during merge.')
684 log.exception('Error while merging shadow repo during merge.')
684 if 'unresolved merge conflicts' in safe_str(e):
685 if 'unresolved merge conflicts' in safe_str(e):
685 unresolved = self._remote.get_unresolved_files()
686 unresolved = self._remote.get_unresolved_files()
686 log.debug('unresolved files: %s', unresolved)
687 log.debug('unresolved files: %s', unresolved)
687
688
688 # Cleanup any merge leftovers
689 # Cleanup any merge leftovers
689 self._remote.update(clean=True)
690 self._remote.update(clean=True)
690 if unresolved:
691 if unresolved:
691 raise UnresolvedFilesInRepo(unresolved)
692 raise UnresolvedFilesInRepo(unresolved)
692 else:
693 else:
693 raise
694 raise
694
695
695 def _local_close(self, target_ref, user_name, user_email,
696 def _local_close(self, target_ref, user_name, user_email,
696 source_ref, close_message=''):
697 source_ref, close_message=''):
697 """
698 """
698 Close the branch of the given source_revision
699 Close the branch of the given source_revision
699
700
700 Returns the commit id of the close and a boolean indicating if the
701 Returns the commit id of the close and a boolean indicating if the
701 commit needs to be pushed.
702 commit needs to be pushed.
702 """
703 """
703 self._update(source_ref.commit_id)
704 self._update(source_ref.commit_id)
704 message = close_message or "Closing branch: `{}`".format(source_ref.name)
705 message = close_message or "Closing branch: `{}`".format(source_ref.name)
705 try:
706 try:
706 self._remote.commit(
707 self._remote.commit(
707 message=safe_str(message),
708 message=safe_str(message),
708 username=safe_str('%s <%s>' % (user_name, user_email)),
709 username=safe_str('%s <%s>' % (user_name, user_email)),
709 close_branch=True)
710 close_branch=True)
710 self._remote.invalidate_vcs_cache()
711 self._remote.invalidate_vcs_cache()
711 return self._identify(), True
712 return self._identify(), True
712 except RepositoryError:
713 except RepositoryError:
713 # Cleanup any commit leftovers
714 # Cleanup any commit leftovers
714 self._remote.update(clean=True)
715 self._remote.update(clean=True)
715 raise
716 raise
716
717
717 def _is_the_same_branch(self, target_ref, source_ref):
718 def _is_the_same_branch(self, target_ref, source_ref):
718 return (
719 return (
719 self._get_branch_name(target_ref) ==
720 self._get_branch_name(target_ref) ==
720 self._get_branch_name(source_ref))
721 self._get_branch_name(source_ref))
721
722
722 def _get_branch_name(self, ref):
723 def _get_branch_name(self, ref):
723 if ref.type == 'branch':
724 if ref.type == 'branch':
724 return ref.name
725 return ref.name
725 return self._remote.ctx_branch(ref.commit_id)
726 return self._remote.ctx_branch(ref.commit_id)
726
727
727 def _maybe_prepare_merge_workspace(
728 def _maybe_prepare_merge_workspace(
728 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
729 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
729 shadow_repository_path = self._get_shadow_repository_path(
730 shadow_repository_path = self._get_shadow_repository_path(
730 self.path, repo_id, workspace_id)
731 self.path, repo_id, workspace_id)
731 if not os.path.exists(shadow_repository_path):
732 if not os.path.exists(shadow_repository_path):
732 self._local_clone(shadow_repository_path)
733 self._local_clone(shadow_repository_path)
733 log.debug(
734 log.debug(
734 'Prepared shadow repository in %s', shadow_repository_path)
735 'Prepared shadow repository in %s', shadow_repository_path)
735
736
736 return shadow_repository_path
737 return shadow_repository_path
737
738
738 def _merge_repo(self, repo_id, workspace_id, target_ref,
739 def _merge_repo(self, repo_id, workspace_id, target_ref,
739 source_repo, source_ref, merge_message,
740 source_repo, source_ref, merge_message,
740 merger_name, merger_email, dry_run=False,
741 merger_name, merger_email, dry_run=False,
741 use_rebase=False, close_branch=False):
742 use_rebase=False, close_branch=False):
742
743
743 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
744 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
744 'rebase' if use_rebase else 'merge', dry_run)
745 'rebase' if use_rebase else 'merge', dry_run)
745 if target_ref.commit_id not in self._heads():
746 if target_ref.commit_id not in self._heads():
746 return MergeResponse(
747 return MergeResponse(
747 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
748 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
748 metadata={'target_ref': target_ref})
749 metadata={'target_ref': target_ref})
749
750
750 try:
751 try:
751 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
752 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
752 heads = '\n,'.join(self._heads(target_ref.name))
753 heads = '\n,'.join(self._heads(target_ref.name))
753 metadata = {
754 metadata = {
754 'target_ref': target_ref,
755 'target_ref': target_ref,
755 'source_ref': source_ref,
756 'source_ref': source_ref,
756 'heads': heads
757 'heads': heads
757 }
758 }
758 return MergeResponse(
759 return MergeResponse(
759 False, False, None,
760 False, False, None,
760 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
761 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
761 metadata=metadata)
762 metadata=metadata)
762 except CommitDoesNotExistError:
763 except CommitDoesNotExistError:
763 log.exception('Failure when looking up branch heads on hg target')
764 log.exception('Failure when looking up branch heads on hg target')
764 return MergeResponse(
765 return MergeResponse(
765 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
766 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
766 metadata={'target_ref': target_ref})
767 metadata={'target_ref': target_ref})
767
768
768 shadow_repository_path = self._maybe_prepare_merge_workspace(
769 shadow_repository_path = self._maybe_prepare_merge_workspace(
769 repo_id, workspace_id, target_ref, source_ref)
770 repo_id, workspace_id, target_ref, source_ref)
770 shadow_repo = self.get_shadow_instance(shadow_repository_path)
771 shadow_repo = self.get_shadow_instance(shadow_repository_path)
771
772
772 log.debug('Pulling in target reference %s', target_ref)
773 log.debug('Pulling in target reference %s', target_ref)
773 self._validate_pull_reference(target_ref)
774 self._validate_pull_reference(target_ref)
774 shadow_repo._local_pull(self.path, target_ref)
775 shadow_repo._local_pull(self.path, target_ref)
775
776
776 try:
777 try:
777 log.debug('Pulling in source reference %s', source_ref)
778 log.debug('Pulling in source reference %s', source_ref)
778 source_repo._validate_pull_reference(source_ref)
779 source_repo._validate_pull_reference(source_ref)
779 shadow_repo._local_pull(source_repo.path, source_ref)
780 shadow_repo._local_pull(source_repo.path, source_ref)
780 except CommitDoesNotExistError:
781 except CommitDoesNotExistError:
781 log.exception('Failure when doing local pull on hg shadow repo')
782 log.exception('Failure when doing local pull on hg shadow repo')
782 return MergeResponse(
783 return MergeResponse(
783 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
784 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
784 metadata={'source_ref': source_ref})
785 metadata={'source_ref': source_ref})
785
786
786 merge_ref = None
787 merge_ref = None
787 merge_commit_id = None
788 merge_commit_id = None
788 close_commit_id = None
789 close_commit_id = None
789 merge_failure_reason = MergeFailureReason.NONE
790 merge_failure_reason = MergeFailureReason.NONE
790 metadata = {}
791 metadata = {}
791
792
792 # enforce that close branch should be used only in case we source from
793 # enforce that close branch should be used only in case we source from
793 # an actual Branch
794 # an actual Branch
794 close_branch = close_branch and source_ref.type == 'branch'
795 close_branch = close_branch and source_ref.type == 'branch'
795
796
796 # don't allow to close branch if source and target are the same
797 # don't allow to close branch if source and target are the same
797 close_branch = close_branch and source_ref.name != target_ref.name
798 close_branch = close_branch and source_ref.name != target_ref.name
798
799
799 needs_push_on_close = False
800 needs_push_on_close = False
800 if close_branch and not use_rebase and not dry_run:
801 if close_branch and not use_rebase and not dry_run:
801 try:
802 try:
802 close_commit_id, needs_push_on_close = shadow_repo._local_close(
803 close_commit_id, needs_push_on_close = shadow_repo._local_close(
803 target_ref, merger_name, merger_email, source_ref)
804 target_ref, merger_name, merger_email, source_ref)
804 merge_possible = True
805 merge_possible = True
805 except RepositoryError:
806 except RepositoryError:
806 log.exception('Failure when doing close branch on '
807 log.exception('Failure when doing close branch on '
807 'shadow repo: %s', shadow_repo)
808 'shadow repo: %s', shadow_repo)
808 merge_possible = False
809 merge_possible = False
809 merge_failure_reason = MergeFailureReason.MERGE_FAILED
810 merge_failure_reason = MergeFailureReason.MERGE_FAILED
810 else:
811 else:
811 merge_possible = True
812 merge_possible = True
812
813
813 needs_push = False
814 needs_push = False
814 if merge_possible:
815 if merge_possible:
815 try:
816 try:
816 merge_commit_id, needs_push = shadow_repo._local_merge(
817 merge_commit_id, needs_push = shadow_repo._local_merge(
817 target_ref, merge_message, merger_name, merger_email,
818 target_ref, merge_message, merger_name, merger_email,
818 source_ref, use_rebase=use_rebase, dry_run=dry_run)
819 source_ref, use_rebase=use_rebase, dry_run=dry_run)
819 merge_possible = True
820 merge_possible = True
820
821
821 # read the state of the close action, if it
822 # read the state of the close action, if it
822 # maybe required a push
823 # maybe required a push
823 needs_push = needs_push or needs_push_on_close
824 needs_push = needs_push or needs_push_on_close
824
825
825 # Set a bookmark pointing to the merge commit. This bookmark
826 # Set a bookmark pointing to the merge commit. This bookmark
826 # may be used to easily identify the last successful merge
827 # may be used to easily identify the last successful merge
827 # commit in the shadow repository.
828 # commit in the shadow repository.
828 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
829 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
829 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
830 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
830 except SubrepoMergeError:
831 except SubrepoMergeError:
831 log.exception(
832 log.exception(
832 'Subrepo merge error during local merge on hg shadow repo.')
833 'Subrepo merge error during local merge on hg shadow repo.')
833 merge_possible = False
834 merge_possible = False
834 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
835 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
835 needs_push = False
836 needs_push = False
836 except RepositoryError as e:
837 except RepositoryError as e:
837 log.exception('Failure when doing local merge on hg shadow repo')
838 log.exception('Failure when doing local merge on hg shadow repo')
838 if isinstance(e, UnresolvedFilesInRepo):
839 if isinstance(e, UnresolvedFilesInRepo):
839 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
840 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
840
841
841 merge_possible = False
842 merge_possible = False
842 merge_failure_reason = MergeFailureReason.MERGE_FAILED
843 merge_failure_reason = MergeFailureReason.MERGE_FAILED
843 needs_push = False
844 needs_push = False
844
845
845 if merge_possible and not dry_run:
846 if merge_possible and not dry_run:
846 if needs_push:
847 if needs_push:
847 # In case the target is a bookmark, update it, so after pushing
848 # In case the target is a bookmark, update it, so after pushing
848 # the bookmarks is also updated in the target.
849 # the bookmarks is also updated in the target.
849 if target_ref.type == 'book':
850 if target_ref.type == 'book':
850 shadow_repo.bookmark(
851 shadow_repo.bookmark(
851 target_ref.name, revision=merge_commit_id)
852 target_ref.name, revision=merge_commit_id)
852 try:
853 try:
853 shadow_repo_with_hooks = self.get_shadow_instance(
854 shadow_repo_with_hooks = self.get_shadow_instance(
854 shadow_repository_path,
855 shadow_repository_path,
855 enable_hooks=True)
856 enable_hooks=True)
856 # This is the actual merge action, we push from shadow
857 # This is the actual merge action, we push from shadow
857 # into origin.
858 # into origin.
858 # Note: the push_branches option will push any new branch
859 # Note: the push_branches option will push any new branch
859 # defined in the source repository to the target. This may
860 # defined in the source repository to the target. This may
860 # be dangerous as branches are permanent in Mercurial.
861 # be dangerous as branches are permanent in Mercurial.
861 # This feature was requested in issue #441.
862 # This feature was requested in issue #441.
862 shadow_repo_with_hooks._local_push(
863 shadow_repo_with_hooks._local_push(
863 merge_commit_id, self.path, push_branches=True,
864 merge_commit_id, self.path, push_branches=True,
864 enable_hooks=True)
865 enable_hooks=True)
865
866
866 # maybe we also need to push the close_commit_id
867 # maybe we also need to push the close_commit_id
867 if close_commit_id:
868 if close_commit_id:
868 shadow_repo_with_hooks._local_push(
869 shadow_repo_with_hooks._local_push(
869 close_commit_id, self.path, push_branches=True,
870 close_commit_id, self.path, push_branches=True,
870 enable_hooks=True)
871 enable_hooks=True)
871 merge_succeeded = True
872 merge_succeeded = True
872 except RepositoryError:
873 except RepositoryError:
873 log.exception(
874 log.exception(
874 'Failure when doing local push from the shadow '
875 'Failure when doing local push from the shadow '
875 'repository to the target repository at %s.', self.path)
876 'repository to the target repository at %s.', self.path)
876 merge_succeeded = False
877 merge_succeeded = False
877 merge_failure_reason = MergeFailureReason.PUSH_FAILED
878 merge_failure_reason = MergeFailureReason.PUSH_FAILED
878 metadata['target'] = 'hg shadow repo'
879 metadata['target'] = 'hg shadow repo'
879 metadata['merge_commit'] = merge_commit_id
880 metadata['merge_commit'] = merge_commit_id
880 else:
881 else:
881 merge_succeeded = True
882 merge_succeeded = True
882 else:
883 else:
883 merge_succeeded = False
884 merge_succeeded = False
884
885
885 return MergeResponse(
886 return MergeResponse(
886 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
887 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
887 metadata=metadata)
888 metadata=metadata)
888
889
889 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
890 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
890 config = self.config.copy()
891 config = self.config.copy()
891 if not enable_hooks:
892 if not enable_hooks:
892 config.clear_section('hooks')
893 config.clear_section('hooks')
893 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
894 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
894
895
895 def _validate_pull_reference(self, reference):
896 def _validate_pull_reference(self, reference):
896 if not (reference.name in self.bookmarks or
897 if not (reference.name in self.bookmarks or
897 reference.name in self.branches or
898 reference.name in self.branches or
898 self.get_commit(reference.commit_id)):
899 self.get_commit(reference.commit_id)):
899 raise CommitDoesNotExistError(
900 raise CommitDoesNotExistError(
900 'Unknown branch, bookmark or commit id')
901 'Unknown branch, bookmark or commit id')
901
902
902 def _local_pull(self, repository_path, reference):
903 def _local_pull(self, repository_path, reference):
903 """
904 """
904 Fetch a branch, bookmark or commit from a local repository.
905 Fetch a branch, bookmark or commit from a local repository.
905 """
906 """
906 repository_path = os.path.abspath(repository_path)
907 repository_path = os.path.abspath(repository_path)
907 if repository_path == self.path:
908 if repository_path == self.path:
908 raise ValueError('Cannot pull from the same repository')
909 raise ValueError('Cannot pull from the same repository')
909
910
910 reference_type_to_option_name = {
911 reference_type_to_option_name = {
911 'book': 'bookmark',
912 'book': 'bookmark',
912 'branch': 'branch',
913 'branch': 'branch',
913 }
914 }
914 option_name = reference_type_to_option_name.get(
915 option_name = reference_type_to_option_name.get(
915 reference.type, 'revision')
916 reference.type, 'revision')
916
917
917 if option_name == 'revision':
918 if option_name == 'revision':
918 ref = reference.commit_id
919 ref = reference.commit_id
919 else:
920 else:
920 ref = reference.name
921 ref = reference.name
921
922
922 options = {option_name: [ref]}
923 options = {option_name: [ref]}
923 self._remote.pull_cmd(repository_path, hooks=False, **options)
924 self._remote.pull_cmd(repository_path, hooks=False, **options)
924 self._remote.invalidate_vcs_cache()
925 self._remote.invalidate_vcs_cache()
925
926
926 def bookmark(self, bookmark, revision=None):
927 def bookmark(self, bookmark, revision=None):
927 if isinstance(bookmark, unicode):
928 if isinstance(bookmark, unicode):
928 bookmark = safe_str(bookmark)
929 bookmark = safe_str(bookmark)
929 self._remote.bookmark(bookmark, revision=revision)
930 self._remote.bookmark(bookmark, revision=revision)
930 self._remote.invalidate_vcs_cache()
931 self._remote.invalidate_vcs_cache()
931
932
932 def get_path_permissions(self, username):
933 def get_path_permissions(self, username):
933 hgacl_file = os.path.join(self.path, '.hg/hgacl')
934 hgacl_file = os.path.join(self.path, '.hg/hgacl')
934
935
935 def read_patterns(suffix):
936 def read_patterns(suffix):
936 svalue = None
937 svalue = None
937 for section, option in [
938 for section, option in [
938 ('narrowacl', username + suffix),
939 ('narrowacl', username + suffix),
939 ('narrowacl', 'default' + suffix),
940 ('narrowacl', 'default' + suffix),
940 ('narrowhgacl', username + suffix),
941 ('narrowhgacl', username + suffix),
941 ('narrowhgacl', 'default' + suffix)
942 ('narrowhgacl', 'default' + suffix)
942 ]:
943 ]:
943 try:
944 try:
944 svalue = hgacl.get(section, option)
945 svalue = hgacl.get(section, option)
945 break # stop at the first value we find
946 break # stop at the first value we find
946 except configparser.NoOptionError:
947 except configparser.NoOptionError:
947 pass
948 pass
948 if not svalue:
949 if not svalue:
949 return None
950 return None
950 result = ['/']
951 result = ['/']
951 for pattern in svalue.split():
952 for pattern in svalue.split():
952 result.append(pattern)
953 result.append(pattern)
953 if '*' not in pattern and '?' not in pattern:
954 if '*' not in pattern and '?' not in pattern:
954 result.append(pattern + '/*')
955 result.append(pattern + '/*')
955 return result
956 return result
956
957
957 if os.path.exists(hgacl_file):
958 if os.path.exists(hgacl_file):
958 try:
959 try:
959 hgacl = configparser.RawConfigParser()
960 hgacl = configparser.RawConfigParser()
960 hgacl.read(hgacl_file)
961 hgacl.read(hgacl_file)
961
962
962 includes = read_patterns('.includes')
963 includes = read_patterns('.includes')
963 excludes = read_patterns('.excludes')
964 excludes = read_patterns('.excludes')
964 return BasePathPermissionChecker.create_from_patterns(
965 return BasePathPermissionChecker.create_from_patterns(
965 includes, excludes)
966 includes, excludes)
966 except BaseException as e:
967 except BaseException as e:
967 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
968 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
968 hgacl_file, self.name, e)
969 hgacl_file, self.name, e)
969 raise exceptions.RepositoryRequirementError(msg)
970 raise exceptions.RepositoryRequirementError(msg)
970 else:
971 else:
971 return None
972 return None
972
973
973
974
974 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
975 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
975
976
976 def _commit_factory(self, commit_id):
977 def _commit_factory(self, commit_id):
977 return self.repo.get_commit(
978 return self.repo.get_commit(
978 commit_idx=commit_id, pre_load=self.pre_load)
979 commit_idx=commit_id, pre_load=self.pre_load)
@@ -1,369 +1,370 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 SVN repository module
22 SVN repository module
23 """
23 """
24
24
25 import logging
25 import logging
26 import os
26 import os
27 import urllib
27 import urllib
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from rhodecode.lib.compat import OrderedDict
31 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.datelib import date_astimestamp
32 from rhodecode.lib.datelib import date_astimestamp
33 from rhodecode.lib.utils import safe_str, safe_unicode
33 from rhodecode.lib.utils import safe_str, safe_unicode
34 from rhodecode.lib.utils2 import CachedProperty
34 from rhodecode.lib.utils2 import CachedProperty
35 from rhodecode.lib.vcs import connection, path as vcspath
35 from rhodecode.lib.vcs import connection, path as vcspath
36 from rhodecode.lib.vcs.backends import base
36 from rhodecode.lib.vcs.backends import base
37 from rhodecode.lib.vcs.backends.svn.commit import (
37 from rhodecode.lib.vcs.backends.svn.commit import (
38 SubversionCommit, _date_from_svn_properties)
38 SubversionCommit, _date_from_svn_properties)
39 from rhodecode.lib.vcs.backends.svn.diff import SubversionDiff
39 from rhodecode.lib.vcs.backends.svn.diff import SubversionDiff
40 from rhodecode.lib.vcs.backends.svn.inmemory import SubversionInMemoryCommit
40 from rhodecode.lib.vcs.backends.svn.inmemory import SubversionInMemoryCommit
41 from rhodecode.lib.vcs.conf import settings
41 from rhodecode.lib.vcs.conf import settings
42 from rhodecode.lib.vcs.exceptions import (
42 from rhodecode.lib.vcs.exceptions import (
43 CommitDoesNotExistError, EmptyRepositoryError, RepositoryError,
43 CommitDoesNotExistError, EmptyRepositoryError, RepositoryError,
44 VCSError, NodeDoesNotExistError)
44 VCSError, NodeDoesNotExistError)
45
45
46
46
47 log = logging.getLogger(__name__)
47 log = logging.getLogger(__name__)
48
48
49
49
50 class SubversionRepository(base.BaseRepository):
50 class SubversionRepository(base.BaseRepository):
51 """
51 """
52 Subversion backend implementation
52 Subversion backend implementation
53
53
54 .. important::
54 .. important::
55
55
56 It is very important to distinguish the commit index and the commit id
56 It is very important to distinguish the commit index and the commit id
57 which is assigned by Subversion. The first one is always handled as an
57 which is assigned by Subversion. The first one is always handled as an
58 `int` by this implementation. The commit id assigned by Subversion on
58 `int` by this implementation. The commit id assigned by Subversion on
59 the other side will always be a `str`.
59 the other side will always be a `str`.
60
60
61 There is a specific trap since the first commit will have the index
61 There is a specific trap since the first commit will have the index
62 ``0`` but the svn id will be ``"1"``.
62 ``0`` but the svn id will be ``"1"``.
63
63
64 """
64 """
65
65
66 # Note: Subversion does not really have a default branch name.
66 # Note: Subversion does not really have a default branch name.
67 DEFAULT_BRANCH_NAME = None
67 DEFAULT_BRANCH_NAME = None
68
68
69 contact = base.BaseRepository.DEFAULT_CONTACT
69 contact = base.BaseRepository.DEFAULT_CONTACT
70 description = base.BaseRepository.DEFAULT_DESCRIPTION
70 description = base.BaseRepository.DEFAULT_DESCRIPTION
71
71
72 def __init__(self, repo_path, config=None, create=False, src_url=None, with_wire=None,
72 def __init__(self, repo_path, config=None, create=False, src_url=None, with_wire=None,
73 bare=False, **kwargs):
73 bare=False, **kwargs):
74 self.path = safe_str(os.path.abspath(repo_path))
74 self.path = safe_str(os.path.abspath(repo_path))
75 self.config = config if config else self.get_default_config()
75 self.config = config if config else self.get_default_config()
76 self.with_wire = with_wire or {"cache": False} # default should not use cache
76 self.with_wire = with_wire or {"cache": False} # default should not use cache
77
77
78 self._init_repo(create, src_url)
78 self._init_repo(create, src_url)
79
79
80 # caches
80 # caches
81 self._commit_ids = {}
81 self._commit_ids = {}
82
82
83 @LazyProperty
83 @LazyProperty
84 def _remote(self):
84 def _remote(self):
85 repo_id = self.path
85 repo_id = self.path
86 return connection.Svn(self.path, repo_id, self.config, with_wire=self.with_wire)
86 return connection.Svn(self.path, repo_id, self.config, with_wire=self.with_wire)
87
87
88 def _init_repo(self, create, src_url):
88 def _init_repo(self, create, src_url):
89 if create and os.path.exists(self.path):
89 if create and os.path.exists(self.path):
90 raise RepositoryError(
90 raise RepositoryError(
91 "Cannot create repository at %s, location already exist"
91 "Cannot create repository at %s, location already exist"
92 % self.path)
92 % self.path)
93
93
94 if create:
94 if create:
95 self._remote.create_repository(settings.SVN_COMPATIBLE_VERSION)
95 self._remote.create_repository(settings.SVN_COMPATIBLE_VERSION)
96 if src_url:
96 if src_url:
97 src_url = _sanitize_url(src_url)
97 src_url = _sanitize_url(src_url)
98 self._remote.import_remote_repository(src_url)
98 self._remote.import_remote_repository(src_url)
99 else:
99 else:
100 self._check_path()
100 self._check_path()
101
101
102 @CachedProperty
102 @CachedProperty
103 def commit_ids(self):
103 def commit_ids(self):
104 head = self._remote.lookup(None)
104 head = self._remote.lookup(None)
105 return [str(r) for r in xrange(1, head + 1)]
105 return [str(r) for r in xrange(1, head + 1)]
106
106
107 def _rebuild_cache(self, commit_ids):
107 def _rebuild_cache(self, commit_ids):
108 pass
108 pass
109
109
110 def run_svn_command(self, cmd, **opts):
110 def run_svn_command(self, cmd, **opts):
111 """
111 """
112 Runs given ``cmd`` as svn command and returns tuple
112 Runs given ``cmd`` as svn command and returns tuple
113 (stdout, stderr).
113 (stdout, stderr).
114
114
115 :param cmd: full svn command to be executed
115 :param cmd: full svn command to be executed
116 :param opts: env options to pass into Subprocess command
116 :param opts: env options to pass into Subprocess command
117 """
117 """
118 if not isinstance(cmd, list):
118 if not isinstance(cmd, list):
119 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
119 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
120
120
121 skip_stderr_log = opts.pop('skip_stderr_log', False)
121 skip_stderr_log = opts.pop('skip_stderr_log', False)
122 out, err = self._remote.run_svn_command(cmd, **opts)
122 out, err = self._remote.run_svn_command(cmd, **opts)
123 if err and not skip_stderr_log:
123 if err and not skip_stderr_log:
124 log.debug('Stderr output of svn command "%s":\n%s', cmd, err)
124 log.debug('Stderr output of svn command "%s":\n%s', cmd, err)
125 return out, err
125 return out, err
126
126
127 @LazyProperty
127 @LazyProperty
128 def branches(self):
128 def branches(self):
129 return self._tags_or_branches('vcs_svn_branch')
129 return self._tags_or_branches('vcs_svn_branch')
130
130
131 @LazyProperty
131 @LazyProperty
132 def branches_closed(self):
132 def branches_closed(self):
133 return {}
133 return {}
134
134
135 @LazyProperty
135 @LazyProperty
136 def bookmarks(self):
136 def bookmarks(self):
137 return {}
137 return {}
138
138
139 @LazyProperty
139 @LazyProperty
140 def branches_all(self):
140 def branches_all(self):
141 # TODO: johbo: Implement proper branch support
141 # TODO: johbo: Implement proper branch support
142 all_branches = {}
142 all_branches = {}
143 all_branches.update(self.branches)
143 all_branches.update(self.branches)
144 all_branches.update(self.branches_closed)
144 all_branches.update(self.branches_closed)
145 return all_branches
145 return all_branches
146
146
147 @LazyProperty
147 @LazyProperty
148 def tags(self):
148 def tags(self):
149 return self._tags_or_branches('vcs_svn_tag')
149 return self._tags_or_branches('vcs_svn_tag')
150
150
151 def _tags_or_branches(self, config_section):
151 def _tags_or_branches(self, config_section):
152 found_items = {}
152 found_items = {}
153
153
154 if self.is_empty():
154 if self.is_empty():
155 return {}
155 return {}
156
156
157 for pattern in self._patterns_from_section(config_section):
157 for pattern in self._patterns_from_section(config_section):
158 pattern = vcspath.sanitize(pattern)
158 pattern = vcspath.sanitize(pattern)
159 tip = self.get_commit()
159 tip = self.get_commit()
160 try:
160 try:
161 if pattern.endswith('*'):
161 if pattern.endswith('*'):
162 basedir = tip.get_node(vcspath.dirname(pattern))
162 basedir = tip.get_node(vcspath.dirname(pattern))
163 directories = basedir.dirs
163 directories = basedir.dirs
164 else:
164 else:
165 directories = (tip.get_node(pattern), )
165 directories = (tip.get_node(pattern), )
166 except NodeDoesNotExistError:
166 except NodeDoesNotExistError:
167 continue
167 continue
168 found_items.update(
168 found_items.update(
169 (safe_unicode(n.path),
169 (safe_unicode(n.path),
170 self.commit_ids[-1])
170 self.commit_ids[-1])
171 for n in directories)
171 for n in directories)
172
172
173 def get_name(item):
173 def get_name(item):
174 return item[0]
174 return item[0]
175
175
176 return OrderedDict(sorted(found_items.items(), key=get_name))
176 return OrderedDict(sorted(found_items.items(), key=get_name))
177
177
178 def _patterns_from_section(self, section):
178 def _patterns_from_section(self, section):
179 return (pattern for key, pattern in self.config.items(section))
179 return (pattern for key, pattern in self.config.items(section))
180
180
181 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
181 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
182 if self != repo2:
182 if self != repo2:
183 raise ValueError(
183 raise ValueError(
184 "Subversion does not support getting common ancestor of"
184 "Subversion does not support getting common ancestor of"
185 " different repositories.")
185 " different repositories.")
186
186
187 if int(commit_id1) < int(commit_id2):
187 if int(commit_id1) < int(commit_id2):
188 return commit_id1
188 return commit_id1
189 return commit_id2
189 return commit_id2
190
190
191 def verify(self):
191 def verify(self):
192 verify = self._remote.verify()
192 verify = self._remote.verify()
193
193
194 self._remote.invalidate_vcs_cache()
194 self._remote.invalidate_vcs_cache()
195 return verify
195 return verify
196
196
197 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
197 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
198 # TODO: johbo: Implement better comparison, this is a very naive
198 # TODO: johbo: Implement better comparison, this is a very naive
199 # version which does not allow to compare branches, tags or folders
199 # version which does not allow to compare branches, tags or folders
200 # at all.
200 # at all.
201 if repo2 != self:
201 if repo2 != self:
202 raise ValueError(
202 raise ValueError(
203 "Subversion does not support comparison of of different "
203 "Subversion does not support comparison of of different "
204 "repositories.")
204 "repositories.")
205
205
206 if commit_id1 == commit_id2:
206 if commit_id1 == commit_id2:
207 return []
207 return []
208
208
209 commit_idx1 = self._get_commit_idx(commit_id1)
209 commit_idx1 = self._get_commit_idx(commit_id1)
210 commit_idx2 = self._get_commit_idx(commit_id2)
210 commit_idx2 = self._get_commit_idx(commit_id2)
211
211
212 commits = [
212 commits = [
213 self.get_commit(commit_idx=idx)
213 self.get_commit(commit_idx=idx)
214 for idx in range(commit_idx1 + 1, commit_idx2 + 1)]
214 for idx in range(commit_idx1 + 1, commit_idx2 + 1)]
215
215
216 return commits
216 return commits
217
217
218 def _get_commit_idx(self, commit_id):
218 def _get_commit_idx(self, commit_id):
219 try:
219 try:
220 svn_rev = int(commit_id)
220 svn_rev = int(commit_id)
221 except:
221 except:
222 # TODO: johbo: this might be only one case, HEAD, check this
222 # TODO: johbo: this might be only one case, HEAD, check this
223 svn_rev = self._remote.lookup(commit_id)
223 svn_rev = self._remote.lookup(commit_id)
224 commit_idx = svn_rev - 1
224 commit_idx = svn_rev - 1
225 if commit_idx >= len(self.commit_ids):
225 if commit_idx >= len(self.commit_ids):
226 raise CommitDoesNotExistError(
226 raise CommitDoesNotExistError(
227 "Commit at index %s does not exist." % (commit_idx, ))
227 "Commit at index %s does not exist." % (commit_idx, ))
228 return commit_idx
228 return commit_idx
229
229
230 @staticmethod
230 @staticmethod
231 def check_url(url, config):
231 def check_url(url, config):
232 """
232 """
233 Check if `url` is a valid source to import a Subversion repository.
233 Check if `url` is a valid source to import a Subversion repository.
234 """
234 """
235 # convert to URL if it's a local directory
235 # convert to URL if it's a local directory
236 if os.path.isdir(url):
236 if os.path.isdir(url):
237 url = 'file://' + urllib.pathname2url(url)
237 url = 'file://' + urllib.pathname2url(url)
238 return connection.Svn.check_url(url, config.serialize())
238 return connection.Svn.check_url(url, config.serialize())
239
239
240 @staticmethod
240 @staticmethod
241 def is_valid_repository(path):
241 def is_valid_repository(path):
242 try:
242 try:
243 SubversionRepository(path)
243 SubversionRepository(path)
244 return True
244 return True
245 except VCSError:
245 except VCSError:
246 pass
246 pass
247 return False
247 return False
248
248
249 def _check_path(self):
249 def _check_path(self):
250 if not os.path.exists(self.path):
250 if not os.path.exists(self.path):
251 raise VCSError('Path "%s" does not exist!' % (self.path, ))
251 raise VCSError('Path "%s" does not exist!' % (self.path, ))
252 if not self._remote.is_path_valid_repository(self.path):
252 if not self._remote.is_path_valid_repository(self.path):
253 raise VCSError(
253 raise VCSError(
254 'Path "%s" does not contain a Subversion repository' %
254 'Path "%s" does not contain a Subversion repository' %
255 (self.path, ))
255 (self.path, ))
256
256
257 @LazyProperty
257 @LazyProperty
258 def last_change(self):
258 def last_change(self):
259 """
259 """
260 Returns last change made on this repository as
260 Returns last change made on this repository as
261 `datetime.datetime` object.
261 `datetime.datetime` object.
262 """
262 """
263 # Subversion always has a first commit which has id "0" and contains
263 # Subversion always has a first commit which has id "0" and contains
264 # what we are looking for.
264 # what we are looking for.
265 last_id = len(self.commit_ids)
265 last_id = len(self.commit_ids)
266 properties = self._remote.revision_properties(last_id)
266 properties = self._remote.revision_properties(last_id)
267 return _date_from_svn_properties(properties)
267 return _date_from_svn_properties(properties)
268
268
269 @LazyProperty
269 @LazyProperty
270 def in_memory_commit(self):
270 def in_memory_commit(self):
271 return SubversionInMemoryCommit(self)
271 return SubversionInMemoryCommit(self)
272
272
273 def get_hook_location(self):
273 def get_hook_location(self):
274 """
274 """
275 returns absolute path to location where hooks are stored
275 returns absolute path to location where hooks are stored
276 """
276 """
277 return os.path.join(self.path, 'hooks')
277 return os.path.join(self.path, 'hooks')
278
278
279 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
279 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
280 translate_tag=None, maybe_unreachable=False):
280 if self.is_empty():
281 if self.is_empty():
281 raise EmptyRepositoryError("There are no commits yet")
282 raise EmptyRepositoryError("There are no commits yet")
282 if commit_id is not None:
283 if commit_id is not None:
283 self._validate_commit_id(commit_id)
284 self._validate_commit_id(commit_id)
284 elif commit_idx is not None:
285 elif commit_idx is not None:
285 self._validate_commit_idx(commit_idx)
286 self._validate_commit_idx(commit_idx)
286 try:
287 try:
287 commit_id = self.commit_ids[commit_idx]
288 commit_id = self.commit_ids[commit_idx]
288 except IndexError:
289 except IndexError:
289 raise CommitDoesNotExistError('No commit with idx: {}'.format(commit_idx))
290 raise CommitDoesNotExistError('No commit with idx: {}'.format(commit_idx))
290
291
291 commit_id = self._sanitize_commit_id(commit_id)
292 commit_id = self._sanitize_commit_id(commit_id)
292 commit = SubversionCommit(repository=self, commit_id=commit_id)
293 commit = SubversionCommit(repository=self, commit_id=commit_id)
293 return commit
294 return commit
294
295
295 def get_commits(
296 def get_commits(
296 self, start_id=None, end_id=None, start_date=None, end_date=None,
297 self, start_id=None, end_id=None, start_date=None, end_date=None,
297 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
298 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
298 if self.is_empty():
299 if self.is_empty():
299 raise EmptyRepositoryError("There are no commit_ids yet")
300 raise EmptyRepositoryError("There are no commit_ids yet")
300 self._validate_branch_name(branch_name)
301 self._validate_branch_name(branch_name)
301
302
302 if start_id is not None:
303 if start_id is not None:
303 self._validate_commit_id(start_id)
304 self._validate_commit_id(start_id)
304 if end_id is not None:
305 if end_id is not None:
305 self._validate_commit_id(end_id)
306 self._validate_commit_id(end_id)
306
307
307 start_raw_id = self._sanitize_commit_id(start_id)
308 start_raw_id = self._sanitize_commit_id(start_id)
308 start_pos = self.commit_ids.index(start_raw_id) if start_id else None
309 start_pos = self.commit_ids.index(start_raw_id) if start_id else None
309 end_raw_id = self._sanitize_commit_id(end_id)
310 end_raw_id = self._sanitize_commit_id(end_id)
310 end_pos = max(0, self.commit_ids.index(end_raw_id)) if end_id else None
311 end_pos = max(0, self.commit_ids.index(end_raw_id)) if end_id else None
311
312
312 if None not in [start_id, end_id] and start_pos > end_pos:
313 if None not in [start_id, end_id] and start_pos > end_pos:
313 raise RepositoryError(
314 raise RepositoryError(
314 "Start commit '%s' cannot be after end commit '%s'" %
315 "Start commit '%s' cannot be after end commit '%s'" %
315 (start_id, end_id))
316 (start_id, end_id))
316 if end_pos is not None:
317 if end_pos is not None:
317 end_pos += 1
318 end_pos += 1
318
319
319 # Date based filtering
320 # Date based filtering
320 if start_date or end_date:
321 if start_date or end_date:
321 start_raw_id, end_raw_id = self._remote.lookup_interval(
322 start_raw_id, end_raw_id = self._remote.lookup_interval(
322 date_astimestamp(start_date) if start_date else None,
323 date_astimestamp(start_date) if start_date else None,
323 date_astimestamp(end_date) if end_date else None)
324 date_astimestamp(end_date) if end_date else None)
324 start_pos = start_raw_id - 1
325 start_pos = start_raw_id - 1
325 end_pos = end_raw_id
326 end_pos = end_raw_id
326
327
327 commit_ids = self.commit_ids
328 commit_ids = self.commit_ids
328
329
329 # TODO: johbo: Reconsider impact of DEFAULT_BRANCH_NAME here
330 # TODO: johbo: Reconsider impact of DEFAULT_BRANCH_NAME here
330 if branch_name not in [None, self.DEFAULT_BRANCH_NAME]:
331 if branch_name not in [None, self.DEFAULT_BRANCH_NAME]:
331 svn_rev = long(self.commit_ids[-1])
332 svn_rev = long(self.commit_ids[-1])
332 commit_ids = self._remote.node_history(
333 commit_ids = self._remote.node_history(
333 path=branch_name, revision=svn_rev, limit=None)
334 path=branch_name, revision=svn_rev, limit=None)
334 commit_ids = [str(i) for i in reversed(commit_ids)]
335 commit_ids = [str(i) for i in reversed(commit_ids)]
335
336
336 if start_pos or end_pos:
337 if start_pos or end_pos:
337 commit_ids = commit_ids[start_pos:end_pos]
338 commit_ids = commit_ids[start_pos:end_pos]
338 return base.CollectionGenerator(self, commit_ids, pre_load=pre_load)
339 return base.CollectionGenerator(self, commit_ids, pre_load=pre_load)
339
340
340 def _sanitize_commit_id(self, commit_id):
341 def _sanitize_commit_id(self, commit_id):
341 if commit_id and commit_id.isdigit():
342 if commit_id and commit_id.isdigit():
342 if int(commit_id) <= len(self.commit_ids):
343 if int(commit_id) <= len(self.commit_ids):
343 return commit_id
344 return commit_id
344 else:
345 else:
345 raise CommitDoesNotExistError(
346 raise CommitDoesNotExistError(
346 "Commit %s does not exist." % (commit_id, ))
347 "Commit %s does not exist." % (commit_id, ))
347 if commit_id not in [
348 if commit_id not in [
348 None, 'HEAD', 'tip', self.DEFAULT_BRANCH_NAME]:
349 None, 'HEAD', 'tip', self.DEFAULT_BRANCH_NAME]:
349 raise CommitDoesNotExistError(
350 raise CommitDoesNotExistError(
350 "Commit id %s not understood." % (commit_id, ))
351 "Commit id %s not understood." % (commit_id, ))
351 svn_rev = self._remote.lookup('HEAD')
352 svn_rev = self._remote.lookup('HEAD')
352 return str(svn_rev)
353 return str(svn_rev)
353
354
354 def get_diff(
355 def get_diff(
355 self, commit1, commit2, path=None, ignore_whitespace=False,
356 self, commit1, commit2, path=None, ignore_whitespace=False,
356 context=3, path1=None):
357 context=3, path1=None):
357 self._validate_diff_commits(commit1, commit2)
358 self._validate_diff_commits(commit1, commit2)
358 svn_rev1 = long(commit1.raw_id)
359 svn_rev1 = long(commit1.raw_id)
359 svn_rev2 = long(commit2.raw_id)
360 svn_rev2 = long(commit2.raw_id)
360 diff = self._remote.diff(
361 diff = self._remote.diff(
361 svn_rev1, svn_rev2, path1=path1, path2=path,
362 svn_rev1, svn_rev2, path1=path1, path2=path,
362 ignore_whitespace=ignore_whitespace, context=context)
363 ignore_whitespace=ignore_whitespace, context=context)
363 return SubversionDiff(diff)
364 return SubversionDiff(diff)
364
365
365
366
366 def _sanitize_url(url):
367 def _sanitize_url(url):
367 if '://' not in url:
368 if '://' not in url:
368 url = 'file://' + urllib.pathname2url(url)
369 url = 'file://' + urllib.pathname2url(url)
369 return url
370 return url
@@ -1,5511 +1,5517 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Database Models for RhodeCode Enterprise
22 Database Models for RhodeCode Enterprise
23 """
23 """
24
24
25 import re
25 import re
26 import os
26 import os
27 import time
27 import time
28 import string
28 import string
29 import hashlib
29 import hashlib
30 import logging
30 import logging
31 import datetime
31 import datetime
32 import uuid
32 import uuid
33 import warnings
33 import warnings
34 import ipaddress
34 import ipaddress
35 import functools
35 import functools
36 import traceback
36 import traceback
37 import collections
37 import collections
38
38
39 from sqlalchemy import (
39 from sqlalchemy import (
40 or_, and_, not_, func, cast, TypeDecorator, event,
40 or_, and_, not_, func, cast, TypeDecorator, event,
41 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
41 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
42 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
42 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
43 Text, Float, PickleType, BigInteger)
43 Text, Float, PickleType, BigInteger)
44 from sqlalchemy.sql.expression import true, false, case
44 from sqlalchemy.sql.expression import true, false, case
45 from sqlalchemy.sql.functions import coalesce, count # pragma: no cover
45 from sqlalchemy.sql.functions import coalesce, count # pragma: no cover
46 from sqlalchemy.orm import (
46 from sqlalchemy.orm import (
47 relationship, joinedload, class_mapper, validates, aliased)
47 relationship, joinedload, class_mapper, validates, aliased)
48 from sqlalchemy.ext.declarative import declared_attr
48 from sqlalchemy.ext.declarative import declared_attr
49 from sqlalchemy.ext.hybrid import hybrid_property
49 from sqlalchemy.ext.hybrid import hybrid_property
50 from sqlalchemy.exc import IntegrityError # pragma: no cover
50 from sqlalchemy.exc import IntegrityError # pragma: no cover
51 from sqlalchemy.dialects.mysql import LONGTEXT
51 from sqlalchemy.dialects.mysql import LONGTEXT
52 from zope.cachedescriptors.property import Lazy as LazyProperty
52 from zope.cachedescriptors.property import Lazy as LazyProperty
53 from pyramid import compat
53 from pyramid import compat
54 from pyramid.threadlocal import get_current_request
54 from pyramid.threadlocal import get_current_request
55 from webhelpers2.text import remove_formatting
55 from webhelpers2.text import remove_formatting
56
56
57 from rhodecode.translation import _
57 from rhodecode.translation import _
58 from rhodecode.lib.vcs import get_vcs_instance, VCSError
58 from rhodecode.lib.vcs import get_vcs_instance, VCSError
59 from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference
59 from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference
60 from rhodecode.lib.utils2 import (
60 from rhodecode.lib.utils2 import (
61 str2bool, safe_str, get_commit_safe, safe_unicode, sha1_safe,
61 str2bool, safe_str, get_commit_safe, safe_unicode, sha1_safe,
62 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
62 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
63 glob2re, StrictAttributeDict, cleaned_uri, datetime_to_time, OrderedDefaultDict)
63 glob2re, StrictAttributeDict, cleaned_uri, datetime_to_time, OrderedDefaultDict)
64 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \
64 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \
65 JsonRaw
65 JsonRaw
66 from rhodecode.lib.ext_json import json
66 from rhodecode.lib.ext_json import json
67 from rhodecode.lib.caching_query import FromCache
67 from rhodecode.lib.caching_query import FromCache
68 from rhodecode.lib.encrypt import AESCipher, validate_and_get_enc_data
68 from rhodecode.lib.encrypt import AESCipher, validate_and_get_enc_data
69 from rhodecode.lib.encrypt2 import Encryptor
69 from rhodecode.lib.encrypt2 import Encryptor
70 from rhodecode.lib.exceptions import (
70 from rhodecode.lib.exceptions import (
71 ArtifactMetadataDuplicate, ArtifactMetadataBadValueType)
71 ArtifactMetadataDuplicate, ArtifactMetadataBadValueType)
72 from rhodecode.model.meta import Base, Session
72 from rhodecode.model.meta import Base, Session
73
73
74 URL_SEP = '/'
74 URL_SEP = '/'
75 log = logging.getLogger(__name__)
75 log = logging.getLogger(__name__)
76
76
77 # =============================================================================
77 # =============================================================================
78 # BASE CLASSES
78 # BASE CLASSES
79 # =============================================================================
79 # =============================================================================
80
80
81 # this is propagated from .ini file rhodecode.encrypted_values.secret or
81 # this is propagated from .ini file rhodecode.encrypted_values.secret or
82 # beaker.session.secret if first is not set.
82 # beaker.session.secret if first is not set.
83 # and initialized at environment.py
83 # and initialized at environment.py
84 ENCRYPTION_KEY = None
84 ENCRYPTION_KEY = None
85
85
86 # used to sort permissions by types, '#' used here is not allowed to be in
86 # used to sort permissions by types, '#' used here is not allowed to be in
87 # usernames, and it's very early in sorted string.printable table.
87 # usernames, and it's very early in sorted string.printable table.
88 PERMISSION_TYPE_SORT = {
88 PERMISSION_TYPE_SORT = {
89 'admin': '####',
89 'admin': '####',
90 'write': '###',
90 'write': '###',
91 'read': '##',
91 'read': '##',
92 'none': '#',
92 'none': '#',
93 }
93 }
94
94
95
95
96 def display_user_sort(obj):
96 def display_user_sort(obj):
97 """
97 """
98 Sort function used to sort permissions in .permissions() function of
98 Sort function used to sort permissions in .permissions() function of
99 Repository, RepoGroup, UserGroup. Also it put the default user in front
99 Repository, RepoGroup, UserGroup. Also it put the default user in front
100 of all other resources
100 of all other resources
101 """
101 """
102
102
103 if obj.username == User.DEFAULT_USER:
103 if obj.username == User.DEFAULT_USER:
104 return '#####'
104 return '#####'
105 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
105 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
106 return prefix + obj.username
106 return prefix + obj.username
107
107
108
108
109 def display_user_group_sort(obj):
109 def display_user_group_sort(obj):
110 """
110 """
111 Sort function used to sort permissions in .permissions() function of
111 Sort function used to sort permissions in .permissions() function of
112 Repository, RepoGroup, UserGroup. Also it put the default user in front
112 Repository, RepoGroup, UserGroup. Also it put the default user in front
113 of all other resources
113 of all other resources
114 """
114 """
115
115
116 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
116 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
117 return prefix + obj.users_group_name
117 return prefix + obj.users_group_name
118
118
119
119
120 def _hash_key(k):
120 def _hash_key(k):
121 return sha1_safe(k)
121 return sha1_safe(k)
122
122
123
123
124 def in_filter_generator(qry, items, limit=500):
124 def in_filter_generator(qry, items, limit=500):
125 """
125 """
126 Splits IN() into multiple with OR
126 Splits IN() into multiple with OR
127 e.g.::
127 e.g.::
128 cnt = Repository.query().filter(
128 cnt = Repository.query().filter(
129 or_(
129 or_(
130 *in_filter_generator(Repository.repo_id, range(100000))
130 *in_filter_generator(Repository.repo_id, range(100000))
131 )).count()
131 )).count()
132 """
132 """
133 if not items:
133 if not items:
134 # empty list will cause empty query which might cause security issues
134 # empty list will cause empty query which might cause security issues
135 # this can lead to hidden unpleasant results
135 # this can lead to hidden unpleasant results
136 items = [-1]
136 items = [-1]
137
137
138 parts = []
138 parts = []
139 for chunk in xrange(0, len(items), limit):
139 for chunk in xrange(0, len(items), limit):
140 parts.append(
140 parts.append(
141 qry.in_(items[chunk: chunk + limit])
141 qry.in_(items[chunk: chunk + limit])
142 )
142 )
143
143
144 return parts
144 return parts
145
145
146
146
147 base_table_args = {
147 base_table_args = {
148 'extend_existing': True,
148 'extend_existing': True,
149 'mysql_engine': 'InnoDB',
149 'mysql_engine': 'InnoDB',
150 'mysql_charset': 'utf8',
150 'mysql_charset': 'utf8',
151 'sqlite_autoincrement': True
151 'sqlite_autoincrement': True
152 }
152 }
153
153
154
154
155 class EncryptedTextValue(TypeDecorator):
155 class EncryptedTextValue(TypeDecorator):
156 """
156 """
157 Special column for encrypted long text data, use like::
157 Special column for encrypted long text data, use like::
158
158
159 value = Column("encrypted_value", EncryptedValue(), nullable=False)
159 value = Column("encrypted_value", EncryptedValue(), nullable=False)
160
160
161 This column is intelligent so if value is in unencrypted form it return
161 This column is intelligent so if value is in unencrypted form it return
162 unencrypted form, but on save it always encrypts
162 unencrypted form, but on save it always encrypts
163 """
163 """
164 impl = Text
164 impl = Text
165
165
166 def process_bind_param(self, value, dialect):
166 def process_bind_param(self, value, dialect):
167 """
167 """
168 Setter for storing value
168 Setter for storing value
169 """
169 """
170 import rhodecode
170 import rhodecode
171 if not value:
171 if not value:
172 return value
172 return value
173
173
174 # protect against double encrypting if values is already encrypted
174 # protect against double encrypting if values is already encrypted
175 if value.startswith('enc$aes$') \
175 if value.startswith('enc$aes$') \
176 or value.startswith('enc$aes_hmac$') \
176 or value.startswith('enc$aes_hmac$') \
177 or value.startswith('enc2$'):
177 or value.startswith('enc2$'):
178 raise ValueError('value needs to be in unencrypted format, '
178 raise ValueError('value needs to be in unencrypted format, '
179 'ie. not starting with enc$ or enc2$')
179 'ie. not starting with enc$ or enc2$')
180
180
181 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
181 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
182 if algo == 'aes':
182 if algo == 'aes':
183 return 'enc$aes_hmac$%s' % AESCipher(ENCRYPTION_KEY, hmac=True).encrypt(value)
183 return 'enc$aes_hmac$%s' % AESCipher(ENCRYPTION_KEY, hmac=True).encrypt(value)
184 elif algo == 'fernet':
184 elif algo == 'fernet':
185 return Encryptor(ENCRYPTION_KEY).encrypt(value)
185 return Encryptor(ENCRYPTION_KEY).encrypt(value)
186 else:
186 else:
187 ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo))
187 ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo))
188
188
189 def process_result_value(self, value, dialect):
189 def process_result_value(self, value, dialect):
190 """
190 """
191 Getter for retrieving value
191 Getter for retrieving value
192 """
192 """
193
193
194 import rhodecode
194 import rhodecode
195 if not value:
195 if not value:
196 return value
196 return value
197
197
198 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
198 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
199 enc_strict_mode = str2bool(rhodecode.CONFIG.get('rhodecode.encrypted_values.strict') or True)
199 enc_strict_mode = str2bool(rhodecode.CONFIG.get('rhodecode.encrypted_values.strict') or True)
200 if algo == 'aes':
200 if algo == 'aes':
201 decrypted_data = validate_and_get_enc_data(value, ENCRYPTION_KEY, enc_strict_mode)
201 decrypted_data = validate_and_get_enc_data(value, ENCRYPTION_KEY, enc_strict_mode)
202 elif algo == 'fernet':
202 elif algo == 'fernet':
203 return Encryptor(ENCRYPTION_KEY).decrypt(value)
203 return Encryptor(ENCRYPTION_KEY).decrypt(value)
204 else:
204 else:
205 ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo))
205 ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo))
206 return decrypted_data
206 return decrypted_data
207
207
208
208
209 class BaseModel(object):
209 class BaseModel(object):
210 """
210 """
211 Base Model for all classes
211 Base Model for all classes
212 """
212 """
213
213
214 @classmethod
214 @classmethod
215 def _get_keys(cls):
215 def _get_keys(cls):
216 """return column names for this model """
216 """return column names for this model """
217 return class_mapper(cls).c.keys()
217 return class_mapper(cls).c.keys()
218
218
219 def get_dict(self):
219 def get_dict(self):
220 """
220 """
221 return dict with keys and values corresponding
221 return dict with keys and values corresponding
222 to this model data """
222 to this model data """
223
223
224 d = {}
224 d = {}
225 for k in self._get_keys():
225 for k in self._get_keys():
226 d[k] = getattr(self, k)
226 d[k] = getattr(self, k)
227
227
228 # also use __json__() if present to get additional fields
228 # also use __json__() if present to get additional fields
229 _json_attr = getattr(self, '__json__', None)
229 _json_attr = getattr(self, '__json__', None)
230 if _json_attr:
230 if _json_attr:
231 # update with attributes from __json__
231 # update with attributes from __json__
232 if callable(_json_attr):
232 if callable(_json_attr):
233 _json_attr = _json_attr()
233 _json_attr = _json_attr()
234 for k, val in _json_attr.iteritems():
234 for k, val in _json_attr.iteritems():
235 d[k] = val
235 d[k] = val
236 return d
236 return d
237
237
238 def get_appstruct(self):
238 def get_appstruct(self):
239 """return list with keys and values tuples corresponding
239 """return list with keys and values tuples corresponding
240 to this model data """
240 to this model data """
241
241
242 lst = []
242 lst = []
243 for k in self._get_keys():
243 for k in self._get_keys():
244 lst.append((k, getattr(self, k),))
244 lst.append((k, getattr(self, k),))
245 return lst
245 return lst
246
246
247 def populate_obj(self, populate_dict):
247 def populate_obj(self, populate_dict):
248 """populate model with data from given populate_dict"""
248 """populate model with data from given populate_dict"""
249
249
250 for k in self._get_keys():
250 for k in self._get_keys():
251 if k in populate_dict:
251 if k in populate_dict:
252 setattr(self, k, populate_dict[k])
252 setattr(self, k, populate_dict[k])
253
253
254 @classmethod
254 @classmethod
255 def query(cls):
255 def query(cls):
256 return Session().query(cls)
256 return Session().query(cls)
257
257
258 @classmethod
258 @classmethod
259 def get(cls, id_):
259 def get(cls, id_):
260 if id_:
260 if id_:
261 return cls.query().get(id_)
261 return cls.query().get(id_)
262
262
263 @classmethod
263 @classmethod
264 def get_or_404(cls, id_):
264 def get_or_404(cls, id_):
265 from pyramid.httpexceptions import HTTPNotFound
265 from pyramid.httpexceptions import HTTPNotFound
266
266
267 try:
267 try:
268 id_ = int(id_)
268 id_ = int(id_)
269 except (TypeError, ValueError):
269 except (TypeError, ValueError):
270 raise HTTPNotFound()
270 raise HTTPNotFound()
271
271
272 res = cls.query().get(id_)
272 res = cls.query().get(id_)
273 if not res:
273 if not res:
274 raise HTTPNotFound()
274 raise HTTPNotFound()
275 return res
275 return res
276
276
277 @classmethod
277 @classmethod
278 def getAll(cls):
278 def getAll(cls):
279 # deprecated and left for backward compatibility
279 # deprecated and left for backward compatibility
280 return cls.get_all()
280 return cls.get_all()
281
281
282 @classmethod
282 @classmethod
283 def get_all(cls):
283 def get_all(cls):
284 return cls.query().all()
284 return cls.query().all()
285
285
286 @classmethod
286 @classmethod
287 def delete(cls, id_):
287 def delete(cls, id_):
288 obj = cls.query().get(id_)
288 obj = cls.query().get(id_)
289 Session().delete(obj)
289 Session().delete(obj)
290
290
291 @classmethod
291 @classmethod
292 def identity_cache(cls, session, attr_name, value):
292 def identity_cache(cls, session, attr_name, value):
293 exist_in_session = []
293 exist_in_session = []
294 for (item_cls, pkey), instance in session.identity_map.items():
294 for (item_cls, pkey), instance in session.identity_map.items():
295 if cls == item_cls and getattr(instance, attr_name) == value:
295 if cls == item_cls and getattr(instance, attr_name) == value:
296 exist_in_session.append(instance)
296 exist_in_session.append(instance)
297 if exist_in_session:
297 if exist_in_session:
298 if len(exist_in_session) == 1:
298 if len(exist_in_session) == 1:
299 return exist_in_session[0]
299 return exist_in_session[0]
300 log.exception(
300 log.exception(
301 'multiple objects with attr %s and '
301 'multiple objects with attr %s and '
302 'value %s found with same name: %r',
302 'value %s found with same name: %r',
303 attr_name, value, exist_in_session)
303 attr_name, value, exist_in_session)
304
304
305 def __repr__(self):
305 def __repr__(self):
306 if hasattr(self, '__unicode__'):
306 if hasattr(self, '__unicode__'):
307 # python repr needs to return str
307 # python repr needs to return str
308 try:
308 try:
309 return safe_str(self.__unicode__())
309 return safe_str(self.__unicode__())
310 except UnicodeDecodeError:
310 except UnicodeDecodeError:
311 pass
311 pass
312 return '<DB:%s>' % (self.__class__.__name__)
312 return '<DB:%s>' % (self.__class__.__name__)
313
313
314
314
315 class RhodeCodeSetting(Base, BaseModel):
315 class RhodeCodeSetting(Base, BaseModel):
316 __tablename__ = 'rhodecode_settings'
316 __tablename__ = 'rhodecode_settings'
317 __table_args__ = (
317 __table_args__ = (
318 UniqueConstraint('app_settings_name'),
318 UniqueConstraint('app_settings_name'),
319 base_table_args
319 base_table_args
320 )
320 )
321
321
322 SETTINGS_TYPES = {
322 SETTINGS_TYPES = {
323 'str': safe_str,
323 'str': safe_str,
324 'int': safe_int,
324 'int': safe_int,
325 'unicode': safe_unicode,
325 'unicode': safe_unicode,
326 'bool': str2bool,
326 'bool': str2bool,
327 'list': functools.partial(aslist, sep=',')
327 'list': functools.partial(aslist, sep=',')
328 }
328 }
329 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
329 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
330 GLOBAL_CONF_KEY = 'app_settings'
330 GLOBAL_CONF_KEY = 'app_settings'
331
331
332 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
332 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
333 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
333 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
334 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
334 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
335 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
335 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
336
336
337 def __init__(self, key='', val='', type='unicode'):
337 def __init__(self, key='', val='', type='unicode'):
338 self.app_settings_name = key
338 self.app_settings_name = key
339 self.app_settings_type = type
339 self.app_settings_type = type
340 self.app_settings_value = val
340 self.app_settings_value = val
341
341
342 @validates('_app_settings_value')
342 @validates('_app_settings_value')
343 def validate_settings_value(self, key, val):
343 def validate_settings_value(self, key, val):
344 assert type(val) == unicode
344 assert type(val) == unicode
345 return val
345 return val
346
346
347 @hybrid_property
347 @hybrid_property
348 def app_settings_value(self):
348 def app_settings_value(self):
349 v = self._app_settings_value
349 v = self._app_settings_value
350 _type = self.app_settings_type
350 _type = self.app_settings_type
351 if _type:
351 if _type:
352 _type = self.app_settings_type.split('.')[0]
352 _type = self.app_settings_type.split('.')[0]
353 # decode the encrypted value
353 # decode the encrypted value
354 if 'encrypted' in self.app_settings_type:
354 if 'encrypted' in self.app_settings_type:
355 cipher = EncryptedTextValue()
355 cipher = EncryptedTextValue()
356 v = safe_unicode(cipher.process_result_value(v, None))
356 v = safe_unicode(cipher.process_result_value(v, None))
357
357
358 converter = self.SETTINGS_TYPES.get(_type) or \
358 converter = self.SETTINGS_TYPES.get(_type) or \
359 self.SETTINGS_TYPES['unicode']
359 self.SETTINGS_TYPES['unicode']
360 return converter(v)
360 return converter(v)
361
361
362 @app_settings_value.setter
362 @app_settings_value.setter
363 def app_settings_value(self, val):
363 def app_settings_value(self, val):
364 """
364 """
365 Setter that will always make sure we use unicode in app_settings_value
365 Setter that will always make sure we use unicode in app_settings_value
366
366
367 :param val:
367 :param val:
368 """
368 """
369 val = safe_unicode(val)
369 val = safe_unicode(val)
370 # encode the encrypted value
370 # encode the encrypted value
371 if 'encrypted' in self.app_settings_type:
371 if 'encrypted' in self.app_settings_type:
372 cipher = EncryptedTextValue()
372 cipher = EncryptedTextValue()
373 val = safe_unicode(cipher.process_bind_param(val, None))
373 val = safe_unicode(cipher.process_bind_param(val, None))
374 self._app_settings_value = val
374 self._app_settings_value = val
375
375
376 @hybrid_property
376 @hybrid_property
377 def app_settings_type(self):
377 def app_settings_type(self):
378 return self._app_settings_type
378 return self._app_settings_type
379
379
380 @app_settings_type.setter
380 @app_settings_type.setter
381 def app_settings_type(self, val):
381 def app_settings_type(self, val):
382 if val.split('.')[0] not in self.SETTINGS_TYPES:
382 if val.split('.')[0] not in self.SETTINGS_TYPES:
383 raise Exception('type must be one of %s got %s'
383 raise Exception('type must be one of %s got %s'
384 % (self.SETTINGS_TYPES.keys(), val))
384 % (self.SETTINGS_TYPES.keys(), val))
385 self._app_settings_type = val
385 self._app_settings_type = val
386
386
387 @classmethod
387 @classmethod
388 def get_by_prefix(cls, prefix):
388 def get_by_prefix(cls, prefix):
389 return RhodeCodeSetting.query()\
389 return RhodeCodeSetting.query()\
390 .filter(RhodeCodeSetting.app_settings_name.startswith(prefix))\
390 .filter(RhodeCodeSetting.app_settings_name.startswith(prefix))\
391 .all()
391 .all()
392
392
393 def __unicode__(self):
393 def __unicode__(self):
394 return u"<%s('%s:%s[%s]')>" % (
394 return u"<%s('%s:%s[%s]')>" % (
395 self.__class__.__name__,
395 self.__class__.__name__,
396 self.app_settings_name, self.app_settings_value,
396 self.app_settings_name, self.app_settings_value,
397 self.app_settings_type
397 self.app_settings_type
398 )
398 )
399
399
400
400
401 class RhodeCodeUi(Base, BaseModel):
401 class RhodeCodeUi(Base, BaseModel):
402 __tablename__ = 'rhodecode_ui'
402 __tablename__ = 'rhodecode_ui'
403 __table_args__ = (
403 __table_args__ = (
404 UniqueConstraint('ui_key'),
404 UniqueConstraint('ui_key'),
405 base_table_args
405 base_table_args
406 )
406 )
407
407
408 HOOK_REPO_SIZE = 'changegroup.repo_size'
408 HOOK_REPO_SIZE = 'changegroup.repo_size'
409 # HG
409 # HG
410 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
410 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
411 HOOK_PULL = 'outgoing.pull_logger'
411 HOOK_PULL = 'outgoing.pull_logger'
412 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
412 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
413 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
413 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
414 HOOK_PUSH = 'changegroup.push_logger'
414 HOOK_PUSH = 'changegroup.push_logger'
415 HOOK_PUSH_KEY = 'pushkey.key_push'
415 HOOK_PUSH_KEY = 'pushkey.key_push'
416
416
417 HOOKS_BUILTIN = [
417 HOOKS_BUILTIN = [
418 HOOK_PRE_PULL,
418 HOOK_PRE_PULL,
419 HOOK_PULL,
419 HOOK_PULL,
420 HOOK_PRE_PUSH,
420 HOOK_PRE_PUSH,
421 HOOK_PRETX_PUSH,
421 HOOK_PRETX_PUSH,
422 HOOK_PUSH,
422 HOOK_PUSH,
423 HOOK_PUSH_KEY,
423 HOOK_PUSH_KEY,
424 ]
424 ]
425
425
426 # TODO: johbo: Unify way how hooks are configured for git and hg,
426 # TODO: johbo: Unify way how hooks are configured for git and hg,
427 # git part is currently hardcoded.
427 # git part is currently hardcoded.
428
428
429 # SVN PATTERNS
429 # SVN PATTERNS
430 SVN_BRANCH_ID = 'vcs_svn_branch'
430 SVN_BRANCH_ID = 'vcs_svn_branch'
431 SVN_TAG_ID = 'vcs_svn_tag'
431 SVN_TAG_ID = 'vcs_svn_tag'
432
432
433 ui_id = Column(
433 ui_id = Column(
434 "ui_id", Integer(), nullable=False, unique=True, default=None,
434 "ui_id", Integer(), nullable=False, unique=True, default=None,
435 primary_key=True)
435 primary_key=True)
436 ui_section = Column(
436 ui_section = Column(
437 "ui_section", String(255), nullable=True, unique=None, default=None)
437 "ui_section", String(255), nullable=True, unique=None, default=None)
438 ui_key = Column(
438 ui_key = Column(
439 "ui_key", String(255), nullable=True, unique=None, default=None)
439 "ui_key", String(255), nullable=True, unique=None, default=None)
440 ui_value = Column(
440 ui_value = Column(
441 "ui_value", String(255), nullable=True, unique=None, default=None)
441 "ui_value", String(255), nullable=True, unique=None, default=None)
442 ui_active = Column(
442 ui_active = Column(
443 "ui_active", Boolean(), nullable=True, unique=None, default=True)
443 "ui_active", Boolean(), nullable=True, unique=None, default=True)
444
444
445 def __repr__(self):
445 def __repr__(self):
446 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
446 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
447 self.ui_key, self.ui_value)
447 self.ui_key, self.ui_value)
448
448
449
449
450 class RepoRhodeCodeSetting(Base, BaseModel):
450 class RepoRhodeCodeSetting(Base, BaseModel):
451 __tablename__ = 'repo_rhodecode_settings'
451 __tablename__ = 'repo_rhodecode_settings'
452 __table_args__ = (
452 __table_args__ = (
453 UniqueConstraint(
453 UniqueConstraint(
454 'app_settings_name', 'repository_id',
454 'app_settings_name', 'repository_id',
455 name='uq_repo_rhodecode_setting_name_repo_id'),
455 name='uq_repo_rhodecode_setting_name_repo_id'),
456 base_table_args
456 base_table_args
457 )
457 )
458
458
459 repository_id = Column(
459 repository_id = Column(
460 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
460 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
461 nullable=False)
461 nullable=False)
462 app_settings_id = Column(
462 app_settings_id = Column(
463 "app_settings_id", Integer(), nullable=False, unique=True,
463 "app_settings_id", Integer(), nullable=False, unique=True,
464 default=None, primary_key=True)
464 default=None, primary_key=True)
465 app_settings_name = Column(
465 app_settings_name = Column(
466 "app_settings_name", String(255), nullable=True, unique=None,
466 "app_settings_name", String(255), nullable=True, unique=None,
467 default=None)
467 default=None)
468 _app_settings_value = Column(
468 _app_settings_value = Column(
469 "app_settings_value", String(4096), nullable=True, unique=None,
469 "app_settings_value", String(4096), nullable=True, unique=None,
470 default=None)
470 default=None)
471 _app_settings_type = Column(
471 _app_settings_type = Column(
472 "app_settings_type", String(255), nullable=True, unique=None,
472 "app_settings_type", String(255), nullable=True, unique=None,
473 default=None)
473 default=None)
474
474
475 repository = relationship('Repository')
475 repository = relationship('Repository')
476
476
477 def __init__(self, repository_id, key='', val='', type='unicode'):
477 def __init__(self, repository_id, key='', val='', type='unicode'):
478 self.repository_id = repository_id
478 self.repository_id = repository_id
479 self.app_settings_name = key
479 self.app_settings_name = key
480 self.app_settings_type = type
480 self.app_settings_type = type
481 self.app_settings_value = val
481 self.app_settings_value = val
482
482
483 @validates('_app_settings_value')
483 @validates('_app_settings_value')
484 def validate_settings_value(self, key, val):
484 def validate_settings_value(self, key, val):
485 assert type(val) == unicode
485 assert type(val) == unicode
486 return val
486 return val
487
487
488 @hybrid_property
488 @hybrid_property
489 def app_settings_value(self):
489 def app_settings_value(self):
490 v = self._app_settings_value
490 v = self._app_settings_value
491 type_ = self.app_settings_type
491 type_ = self.app_settings_type
492 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
492 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
493 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
493 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
494 return converter(v)
494 return converter(v)
495
495
496 @app_settings_value.setter
496 @app_settings_value.setter
497 def app_settings_value(self, val):
497 def app_settings_value(self, val):
498 """
498 """
499 Setter that will always make sure we use unicode in app_settings_value
499 Setter that will always make sure we use unicode in app_settings_value
500
500
501 :param val:
501 :param val:
502 """
502 """
503 self._app_settings_value = safe_unicode(val)
503 self._app_settings_value = safe_unicode(val)
504
504
505 @hybrid_property
505 @hybrid_property
506 def app_settings_type(self):
506 def app_settings_type(self):
507 return self._app_settings_type
507 return self._app_settings_type
508
508
509 @app_settings_type.setter
509 @app_settings_type.setter
510 def app_settings_type(self, val):
510 def app_settings_type(self, val):
511 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
511 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
512 if val not in SETTINGS_TYPES:
512 if val not in SETTINGS_TYPES:
513 raise Exception('type must be one of %s got %s'
513 raise Exception('type must be one of %s got %s'
514 % (SETTINGS_TYPES.keys(), val))
514 % (SETTINGS_TYPES.keys(), val))
515 self._app_settings_type = val
515 self._app_settings_type = val
516
516
517 def __unicode__(self):
517 def __unicode__(self):
518 return u"<%s('%s:%s:%s[%s]')>" % (
518 return u"<%s('%s:%s:%s[%s]')>" % (
519 self.__class__.__name__, self.repository.repo_name,
519 self.__class__.__name__, self.repository.repo_name,
520 self.app_settings_name, self.app_settings_value,
520 self.app_settings_name, self.app_settings_value,
521 self.app_settings_type
521 self.app_settings_type
522 )
522 )
523
523
524
524
525 class RepoRhodeCodeUi(Base, BaseModel):
525 class RepoRhodeCodeUi(Base, BaseModel):
526 __tablename__ = 'repo_rhodecode_ui'
526 __tablename__ = 'repo_rhodecode_ui'
527 __table_args__ = (
527 __table_args__ = (
528 UniqueConstraint(
528 UniqueConstraint(
529 'repository_id', 'ui_section', 'ui_key',
529 'repository_id', 'ui_section', 'ui_key',
530 name='uq_repo_rhodecode_ui_repository_id_section_key'),
530 name='uq_repo_rhodecode_ui_repository_id_section_key'),
531 base_table_args
531 base_table_args
532 )
532 )
533
533
534 repository_id = Column(
534 repository_id = Column(
535 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
535 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
536 nullable=False)
536 nullable=False)
537 ui_id = Column(
537 ui_id = Column(
538 "ui_id", Integer(), nullable=False, unique=True, default=None,
538 "ui_id", Integer(), nullable=False, unique=True, default=None,
539 primary_key=True)
539 primary_key=True)
540 ui_section = Column(
540 ui_section = Column(
541 "ui_section", String(255), nullable=True, unique=None, default=None)
541 "ui_section", String(255), nullable=True, unique=None, default=None)
542 ui_key = Column(
542 ui_key = Column(
543 "ui_key", String(255), nullable=True, unique=None, default=None)
543 "ui_key", String(255), nullable=True, unique=None, default=None)
544 ui_value = Column(
544 ui_value = Column(
545 "ui_value", String(255), nullable=True, unique=None, default=None)
545 "ui_value", String(255), nullable=True, unique=None, default=None)
546 ui_active = Column(
546 ui_active = Column(
547 "ui_active", Boolean(), nullable=True, unique=None, default=True)
547 "ui_active", Boolean(), nullable=True, unique=None, default=True)
548
548
549 repository = relationship('Repository')
549 repository = relationship('Repository')
550
550
551 def __repr__(self):
551 def __repr__(self):
552 return '<%s[%s:%s]%s=>%s]>' % (
552 return '<%s[%s:%s]%s=>%s]>' % (
553 self.__class__.__name__, self.repository.repo_name,
553 self.__class__.__name__, self.repository.repo_name,
554 self.ui_section, self.ui_key, self.ui_value)
554 self.ui_section, self.ui_key, self.ui_value)
555
555
556
556
557 class User(Base, BaseModel):
557 class User(Base, BaseModel):
558 __tablename__ = 'users'
558 __tablename__ = 'users'
559 __table_args__ = (
559 __table_args__ = (
560 UniqueConstraint('username'), UniqueConstraint('email'),
560 UniqueConstraint('username'), UniqueConstraint('email'),
561 Index('u_username_idx', 'username'),
561 Index('u_username_idx', 'username'),
562 Index('u_email_idx', 'email'),
562 Index('u_email_idx', 'email'),
563 base_table_args
563 base_table_args
564 )
564 )
565
565
566 DEFAULT_USER = 'default'
566 DEFAULT_USER = 'default'
567 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
567 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
568 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
568 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
569
569
570 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
570 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
571 username = Column("username", String(255), nullable=True, unique=None, default=None)
571 username = Column("username", String(255), nullable=True, unique=None, default=None)
572 password = Column("password", String(255), nullable=True, unique=None, default=None)
572 password = Column("password", String(255), nullable=True, unique=None, default=None)
573 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
573 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
574 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
574 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
575 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
575 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
576 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
576 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
577 _email = Column("email", String(255), nullable=True, unique=None, default=None)
577 _email = Column("email", String(255), nullable=True, unique=None, default=None)
578 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
578 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
579 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
579 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
580 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
580 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
581
581
582 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
582 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
583 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
583 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
584 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
584 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
585 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
585 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
586 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
586 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
587 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
587 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
588
588
589 user_log = relationship('UserLog')
589 user_log = relationship('UserLog')
590 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all, delete-orphan')
590 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all, delete-orphan')
591
591
592 repositories = relationship('Repository')
592 repositories = relationship('Repository')
593 repository_groups = relationship('RepoGroup')
593 repository_groups = relationship('RepoGroup')
594 user_groups = relationship('UserGroup')
594 user_groups = relationship('UserGroup')
595
595
596 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
596 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
597 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
597 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
598
598
599 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all, delete-orphan')
599 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all, delete-orphan')
600 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan')
600 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan')
601 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan')
601 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan')
602
602
603 group_member = relationship('UserGroupMember', cascade='all')
603 group_member = relationship('UserGroupMember', cascade='all')
604
604
605 notifications = relationship('UserNotification', cascade='all')
605 notifications = relationship('UserNotification', cascade='all')
606 # notifications assigned to this user
606 # notifications assigned to this user
607 user_created_notifications = relationship('Notification', cascade='all')
607 user_created_notifications = relationship('Notification', cascade='all')
608 # comments created by this user
608 # comments created by this user
609 user_comments = relationship('ChangesetComment', cascade='all')
609 user_comments = relationship('ChangesetComment', cascade='all')
610 # user profile extra info
610 # user profile extra info
611 user_emails = relationship('UserEmailMap', cascade='all')
611 user_emails = relationship('UserEmailMap', cascade='all')
612 user_ip_map = relationship('UserIpMap', cascade='all')
612 user_ip_map = relationship('UserIpMap', cascade='all')
613 user_auth_tokens = relationship('UserApiKeys', cascade='all')
613 user_auth_tokens = relationship('UserApiKeys', cascade='all')
614 user_ssh_keys = relationship('UserSshKeys', cascade='all')
614 user_ssh_keys = relationship('UserSshKeys', cascade='all')
615
615
616 # gists
616 # gists
617 user_gists = relationship('Gist', cascade='all')
617 user_gists = relationship('Gist', cascade='all')
618 # user pull requests
618 # user pull requests
619 user_pull_requests = relationship('PullRequest', cascade='all')
619 user_pull_requests = relationship('PullRequest', cascade='all')
620 # external identities
620 # external identities
621 external_identities = relationship(
621 external_identities = relationship(
622 'ExternalIdentity',
622 'ExternalIdentity',
623 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
623 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
624 cascade='all')
624 cascade='all')
625 # review rules
625 # review rules
626 user_review_rules = relationship('RepoReviewRuleUser', cascade='all')
626 user_review_rules = relationship('RepoReviewRuleUser', cascade='all')
627
627
628 # artifacts owned
628 # artifacts owned
629 artifacts = relationship('FileStore', primaryjoin='FileStore.user_id==User.user_id')
629 artifacts = relationship('FileStore', primaryjoin='FileStore.user_id==User.user_id')
630
630
631 # no cascade, set NULL
631 # no cascade, set NULL
632 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_user_id==User.user_id')
632 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_user_id==User.user_id')
633
633
634 def __unicode__(self):
634 def __unicode__(self):
635 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
635 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
636 self.user_id, self.username)
636 self.user_id, self.username)
637
637
638 @hybrid_property
638 @hybrid_property
639 def email(self):
639 def email(self):
640 return self._email
640 return self._email
641
641
642 @email.setter
642 @email.setter
643 def email(self, val):
643 def email(self, val):
644 self._email = val.lower() if val else None
644 self._email = val.lower() if val else None
645
645
646 @hybrid_property
646 @hybrid_property
647 def first_name(self):
647 def first_name(self):
648 from rhodecode.lib import helpers as h
648 from rhodecode.lib import helpers as h
649 if self.name:
649 if self.name:
650 return h.escape(self.name)
650 return h.escape(self.name)
651 return self.name
651 return self.name
652
652
653 @hybrid_property
653 @hybrid_property
654 def last_name(self):
654 def last_name(self):
655 from rhodecode.lib import helpers as h
655 from rhodecode.lib import helpers as h
656 if self.lastname:
656 if self.lastname:
657 return h.escape(self.lastname)
657 return h.escape(self.lastname)
658 return self.lastname
658 return self.lastname
659
659
660 @hybrid_property
660 @hybrid_property
661 def api_key(self):
661 def api_key(self):
662 """
662 """
663 Fetch if exist an auth-token with role ALL connected to this user
663 Fetch if exist an auth-token with role ALL connected to this user
664 """
664 """
665 user_auth_token = UserApiKeys.query()\
665 user_auth_token = UserApiKeys.query()\
666 .filter(UserApiKeys.user_id == self.user_id)\
666 .filter(UserApiKeys.user_id == self.user_id)\
667 .filter(or_(UserApiKeys.expires == -1,
667 .filter(or_(UserApiKeys.expires == -1,
668 UserApiKeys.expires >= time.time()))\
668 UserApiKeys.expires >= time.time()))\
669 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
669 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
670 if user_auth_token:
670 if user_auth_token:
671 user_auth_token = user_auth_token.api_key
671 user_auth_token = user_auth_token.api_key
672
672
673 return user_auth_token
673 return user_auth_token
674
674
675 @api_key.setter
675 @api_key.setter
676 def api_key(self, val):
676 def api_key(self, val):
677 # don't allow to set API key this is deprecated for now
677 # don't allow to set API key this is deprecated for now
678 self._api_key = None
678 self._api_key = None
679
679
680 @property
680 @property
681 def reviewer_pull_requests(self):
681 def reviewer_pull_requests(self):
682 return PullRequestReviewers.query() \
682 return PullRequestReviewers.query() \
683 .options(joinedload(PullRequestReviewers.pull_request)) \
683 .options(joinedload(PullRequestReviewers.pull_request)) \
684 .filter(PullRequestReviewers.user_id == self.user_id) \
684 .filter(PullRequestReviewers.user_id == self.user_id) \
685 .all()
685 .all()
686
686
687 @property
687 @property
688 def firstname(self):
688 def firstname(self):
689 # alias for future
689 # alias for future
690 return self.name
690 return self.name
691
691
692 @property
692 @property
693 def emails(self):
693 def emails(self):
694 other = UserEmailMap.query()\
694 other = UserEmailMap.query()\
695 .filter(UserEmailMap.user == self) \
695 .filter(UserEmailMap.user == self) \
696 .order_by(UserEmailMap.email_id.asc()) \
696 .order_by(UserEmailMap.email_id.asc()) \
697 .all()
697 .all()
698 return [self.email] + [x.email for x in other]
698 return [self.email] + [x.email for x in other]
699
699
700 def emails_cached(self):
700 def emails_cached(self):
701 emails = UserEmailMap.query()\
701 emails = UserEmailMap.query()\
702 .filter(UserEmailMap.user == self) \
702 .filter(UserEmailMap.user == self) \
703 .order_by(UserEmailMap.email_id.asc())
703 .order_by(UserEmailMap.email_id.asc())
704
704
705 emails = emails.options(
705 emails = emails.options(
706 FromCache("sql_cache_short", "get_user_{}_emails".format(self.user_id))
706 FromCache("sql_cache_short", "get_user_{}_emails".format(self.user_id))
707 )
707 )
708
708
709 return [self.email] + [x.email for x in emails]
709 return [self.email] + [x.email for x in emails]
710
710
711 @property
711 @property
712 def auth_tokens(self):
712 def auth_tokens(self):
713 auth_tokens = self.get_auth_tokens()
713 auth_tokens = self.get_auth_tokens()
714 return [x.api_key for x in auth_tokens]
714 return [x.api_key for x in auth_tokens]
715
715
716 def get_auth_tokens(self):
716 def get_auth_tokens(self):
717 return UserApiKeys.query()\
717 return UserApiKeys.query()\
718 .filter(UserApiKeys.user == self)\
718 .filter(UserApiKeys.user == self)\
719 .order_by(UserApiKeys.user_api_key_id.asc())\
719 .order_by(UserApiKeys.user_api_key_id.asc())\
720 .all()
720 .all()
721
721
722 @LazyProperty
722 @LazyProperty
723 def feed_token(self):
723 def feed_token(self):
724 return self.get_feed_token()
724 return self.get_feed_token()
725
725
726 def get_feed_token(self, cache=True):
726 def get_feed_token(self, cache=True):
727 feed_tokens = UserApiKeys.query()\
727 feed_tokens = UserApiKeys.query()\
728 .filter(UserApiKeys.user == self)\
728 .filter(UserApiKeys.user == self)\
729 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
729 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
730 if cache:
730 if cache:
731 feed_tokens = feed_tokens.options(
731 feed_tokens = feed_tokens.options(
732 FromCache("sql_cache_short", "get_user_feed_token_%s" % self.user_id))
732 FromCache("sql_cache_short", "get_user_feed_token_%s" % self.user_id))
733
733
734 feed_tokens = feed_tokens.all()
734 feed_tokens = feed_tokens.all()
735 if feed_tokens:
735 if feed_tokens:
736 return feed_tokens[0].api_key
736 return feed_tokens[0].api_key
737 return 'NO_FEED_TOKEN_AVAILABLE'
737 return 'NO_FEED_TOKEN_AVAILABLE'
738
738
739 @LazyProperty
739 @LazyProperty
740 def artifact_token(self):
740 def artifact_token(self):
741 return self.get_artifact_token()
741 return self.get_artifact_token()
742
742
743 def get_artifact_token(self, cache=True):
743 def get_artifact_token(self, cache=True):
744 artifacts_tokens = UserApiKeys.query()\
744 artifacts_tokens = UserApiKeys.query()\
745 .filter(UserApiKeys.user == self)\
745 .filter(UserApiKeys.user == self)\
746 .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
746 .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
747 if cache:
747 if cache:
748 artifacts_tokens = artifacts_tokens.options(
748 artifacts_tokens = artifacts_tokens.options(
749 FromCache("sql_cache_short", "get_user_artifact_token_%s" % self.user_id))
749 FromCache("sql_cache_short", "get_user_artifact_token_%s" % self.user_id))
750
750
751 artifacts_tokens = artifacts_tokens.all()
751 artifacts_tokens = artifacts_tokens.all()
752 if artifacts_tokens:
752 if artifacts_tokens:
753 return artifacts_tokens[0].api_key
753 return artifacts_tokens[0].api_key
754 return 'NO_ARTIFACT_TOKEN_AVAILABLE'
754 return 'NO_ARTIFACT_TOKEN_AVAILABLE'
755
755
756 @classmethod
756 @classmethod
757 def get(cls, user_id, cache=False):
757 def get(cls, user_id, cache=False):
758 if not user_id:
758 if not user_id:
759 return
759 return
760
760
761 user = cls.query()
761 user = cls.query()
762 if cache:
762 if cache:
763 user = user.options(
763 user = user.options(
764 FromCache("sql_cache_short", "get_users_%s" % user_id))
764 FromCache("sql_cache_short", "get_users_%s" % user_id))
765 return user.get(user_id)
765 return user.get(user_id)
766
766
767 @classmethod
767 @classmethod
768 def extra_valid_auth_tokens(cls, user, role=None):
768 def extra_valid_auth_tokens(cls, user, role=None):
769 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
769 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
770 .filter(or_(UserApiKeys.expires == -1,
770 .filter(or_(UserApiKeys.expires == -1,
771 UserApiKeys.expires >= time.time()))
771 UserApiKeys.expires >= time.time()))
772 if role:
772 if role:
773 tokens = tokens.filter(or_(UserApiKeys.role == role,
773 tokens = tokens.filter(or_(UserApiKeys.role == role,
774 UserApiKeys.role == UserApiKeys.ROLE_ALL))
774 UserApiKeys.role == UserApiKeys.ROLE_ALL))
775 return tokens.all()
775 return tokens.all()
776
776
777 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
777 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
778 from rhodecode.lib import auth
778 from rhodecode.lib import auth
779
779
780 log.debug('Trying to authenticate user: %s via auth-token, '
780 log.debug('Trying to authenticate user: %s via auth-token, '
781 'and roles: %s', self, roles)
781 'and roles: %s', self, roles)
782
782
783 if not auth_token:
783 if not auth_token:
784 return False
784 return False
785
785
786 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
786 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
787 tokens_q = UserApiKeys.query()\
787 tokens_q = UserApiKeys.query()\
788 .filter(UserApiKeys.user_id == self.user_id)\
788 .filter(UserApiKeys.user_id == self.user_id)\
789 .filter(or_(UserApiKeys.expires == -1,
789 .filter(or_(UserApiKeys.expires == -1,
790 UserApiKeys.expires >= time.time()))
790 UserApiKeys.expires >= time.time()))
791
791
792 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
792 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
793
793
794 crypto_backend = auth.crypto_backend()
794 crypto_backend = auth.crypto_backend()
795 enc_token_map = {}
795 enc_token_map = {}
796 plain_token_map = {}
796 plain_token_map = {}
797 for token in tokens_q:
797 for token in tokens_q:
798 if token.api_key.startswith(crypto_backend.ENC_PREF):
798 if token.api_key.startswith(crypto_backend.ENC_PREF):
799 enc_token_map[token.api_key] = token
799 enc_token_map[token.api_key] = token
800 else:
800 else:
801 plain_token_map[token.api_key] = token
801 plain_token_map[token.api_key] = token
802 log.debug(
802 log.debug(
803 'Found %s plain and %s encrypted tokens to check for authentication for this user',
803 'Found %s plain and %s encrypted tokens to check for authentication for this user',
804 len(plain_token_map), len(enc_token_map))
804 len(plain_token_map), len(enc_token_map))
805
805
806 # plain token match comes first
806 # plain token match comes first
807 match = plain_token_map.get(auth_token)
807 match = plain_token_map.get(auth_token)
808
808
809 # check encrypted tokens now
809 # check encrypted tokens now
810 if not match:
810 if not match:
811 for token_hash, token in enc_token_map.items():
811 for token_hash, token in enc_token_map.items():
812 # NOTE(marcink): this is expensive to calculate, but most secure
812 # NOTE(marcink): this is expensive to calculate, but most secure
813 if crypto_backend.hash_check(auth_token, token_hash):
813 if crypto_backend.hash_check(auth_token, token_hash):
814 match = token
814 match = token
815 break
815 break
816
816
817 if match:
817 if match:
818 log.debug('Found matching token %s', match)
818 log.debug('Found matching token %s', match)
819 if match.repo_id:
819 if match.repo_id:
820 log.debug('Found scope, checking for scope match of token %s', match)
820 log.debug('Found scope, checking for scope match of token %s', match)
821 if match.repo_id == scope_repo_id:
821 if match.repo_id == scope_repo_id:
822 return True
822 return True
823 else:
823 else:
824 log.debug(
824 log.debug(
825 'AUTH_TOKEN: scope mismatch, token has a set repo scope: %s, '
825 'AUTH_TOKEN: scope mismatch, token has a set repo scope: %s, '
826 'and calling scope is:%s, skipping further checks',
826 'and calling scope is:%s, skipping further checks',
827 match.repo, scope_repo_id)
827 match.repo, scope_repo_id)
828 return False
828 return False
829 else:
829 else:
830 return True
830 return True
831
831
832 return False
832 return False
833
833
834 @property
834 @property
835 def ip_addresses(self):
835 def ip_addresses(self):
836 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
836 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
837 return [x.ip_addr for x in ret]
837 return [x.ip_addr for x in ret]
838
838
839 @property
839 @property
840 def username_and_name(self):
840 def username_and_name(self):
841 return '%s (%s %s)' % (self.username, self.first_name, self.last_name)
841 return '%s (%s %s)' % (self.username, self.first_name, self.last_name)
842
842
843 @property
843 @property
844 def username_or_name_or_email(self):
844 def username_or_name_or_email(self):
845 full_name = self.full_name if self.full_name is not ' ' else None
845 full_name = self.full_name if self.full_name is not ' ' else None
846 return self.username or full_name or self.email
846 return self.username or full_name or self.email
847
847
848 @property
848 @property
849 def full_name(self):
849 def full_name(self):
850 return '%s %s' % (self.first_name, self.last_name)
850 return '%s %s' % (self.first_name, self.last_name)
851
851
852 @property
852 @property
853 def full_name_or_username(self):
853 def full_name_or_username(self):
854 return ('%s %s' % (self.first_name, self.last_name)
854 return ('%s %s' % (self.first_name, self.last_name)
855 if (self.first_name and self.last_name) else self.username)
855 if (self.first_name and self.last_name) else self.username)
856
856
857 @property
857 @property
858 def full_contact(self):
858 def full_contact(self):
859 return '%s %s <%s>' % (self.first_name, self.last_name, self.email)
859 return '%s %s <%s>' % (self.first_name, self.last_name, self.email)
860
860
861 @property
861 @property
862 def short_contact(self):
862 def short_contact(self):
863 return '%s %s' % (self.first_name, self.last_name)
863 return '%s %s' % (self.first_name, self.last_name)
864
864
865 @property
865 @property
866 def is_admin(self):
866 def is_admin(self):
867 return self.admin
867 return self.admin
868
868
869 @property
869 @property
870 def language(self):
870 def language(self):
871 return self.user_data.get('language')
871 return self.user_data.get('language')
872
872
873 def AuthUser(self, **kwargs):
873 def AuthUser(self, **kwargs):
874 """
874 """
875 Returns instance of AuthUser for this user
875 Returns instance of AuthUser for this user
876 """
876 """
877 from rhodecode.lib.auth import AuthUser
877 from rhodecode.lib.auth import AuthUser
878 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
878 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
879
879
880 @hybrid_property
880 @hybrid_property
881 def user_data(self):
881 def user_data(self):
882 if not self._user_data:
882 if not self._user_data:
883 return {}
883 return {}
884
884
885 try:
885 try:
886 return json.loads(self._user_data)
886 return json.loads(self._user_data)
887 except TypeError:
887 except TypeError:
888 return {}
888 return {}
889
889
890 @user_data.setter
890 @user_data.setter
891 def user_data(self, val):
891 def user_data(self, val):
892 if not isinstance(val, dict):
892 if not isinstance(val, dict):
893 raise Exception('user_data must be dict, got %s' % type(val))
893 raise Exception('user_data must be dict, got %s' % type(val))
894 try:
894 try:
895 self._user_data = json.dumps(val)
895 self._user_data = json.dumps(val)
896 except Exception:
896 except Exception:
897 log.error(traceback.format_exc())
897 log.error(traceback.format_exc())
898
898
899 @classmethod
899 @classmethod
900 def get_by_username(cls, username, case_insensitive=False,
900 def get_by_username(cls, username, case_insensitive=False,
901 cache=False, identity_cache=False):
901 cache=False, identity_cache=False):
902 session = Session()
902 session = Session()
903
903
904 if case_insensitive:
904 if case_insensitive:
905 q = cls.query().filter(
905 q = cls.query().filter(
906 func.lower(cls.username) == func.lower(username))
906 func.lower(cls.username) == func.lower(username))
907 else:
907 else:
908 q = cls.query().filter(cls.username == username)
908 q = cls.query().filter(cls.username == username)
909
909
910 if cache:
910 if cache:
911 if identity_cache:
911 if identity_cache:
912 val = cls.identity_cache(session, 'username', username)
912 val = cls.identity_cache(session, 'username', username)
913 if val:
913 if val:
914 return val
914 return val
915 else:
915 else:
916 cache_key = "get_user_by_name_%s" % _hash_key(username)
916 cache_key = "get_user_by_name_%s" % _hash_key(username)
917 q = q.options(
917 q = q.options(
918 FromCache("sql_cache_short", cache_key))
918 FromCache("sql_cache_short", cache_key))
919
919
920 return q.scalar()
920 return q.scalar()
921
921
922 @classmethod
922 @classmethod
923 def get_by_auth_token(cls, auth_token, cache=False):
923 def get_by_auth_token(cls, auth_token, cache=False):
924 q = UserApiKeys.query()\
924 q = UserApiKeys.query()\
925 .filter(UserApiKeys.api_key == auth_token)\
925 .filter(UserApiKeys.api_key == auth_token)\
926 .filter(or_(UserApiKeys.expires == -1,
926 .filter(or_(UserApiKeys.expires == -1,
927 UserApiKeys.expires >= time.time()))
927 UserApiKeys.expires >= time.time()))
928 if cache:
928 if cache:
929 q = q.options(
929 q = q.options(
930 FromCache("sql_cache_short", "get_auth_token_%s" % auth_token))
930 FromCache("sql_cache_short", "get_auth_token_%s" % auth_token))
931
931
932 match = q.first()
932 match = q.first()
933 if match:
933 if match:
934 return match.user
934 return match.user
935
935
936 @classmethod
936 @classmethod
937 def get_by_email(cls, email, case_insensitive=False, cache=False):
937 def get_by_email(cls, email, case_insensitive=False, cache=False):
938
938
939 if case_insensitive:
939 if case_insensitive:
940 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
940 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
941
941
942 else:
942 else:
943 q = cls.query().filter(cls.email == email)
943 q = cls.query().filter(cls.email == email)
944
944
945 email_key = _hash_key(email)
945 email_key = _hash_key(email)
946 if cache:
946 if cache:
947 q = q.options(
947 q = q.options(
948 FromCache("sql_cache_short", "get_email_key_%s" % email_key))
948 FromCache("sql_cache_short", "get_email_key_%s" % email_key))
949
949
950 ret = q.scalar()
950 ret = q.scalar()
951 if ret is None:
951 if ret is None:
952 q = UserEmailMap.query()
952 q = UserEmailMap.query()
953 # try fetching in alternate email map
953 # try fetching in alternate email map
954 if case_insensitive:
954 if case_insensitive:
955 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
955 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
956 else:
956 else:
957 q = q.filter(UserEmailMap.email == email)
957 q = q.filter(UserEmailMap.email == email)
958 q = q.options(joinedload(UserEmailMap.user))
958 q = q.options(joinedload(UserEmailMap.user))
959 if cache:
959 if cache:
960 q = q.options(
960 q = q.options(
961 FromCache("sql_cache_short", "get_email_map_key_%s" % email_key))
961 FromCache("sql_cache_short", "get_email_map_key_%s" % email_key))
962 ret = getattr(q.scalar(), 'user', None)
962 ret = getattr(q.scalar(), 'user', None)
963
963
964 return ret
964 return ret
965
965
966 @classmethod
966 @classmethod
967 def get_from_cs_author(cls, author):
967 def get_from_cs_author(cls, author):
968 """
968 """
969 Tries to get User objects out of commit author string
969 Tries to get User objects out of commit author string
970
970
971 :param author:
971 :param author:
972 """
972 """
973 from rhodecode.lib.helpers import email, author_name
973 from rhodecode.lib.helpers import email, author_name
974 # Valid email in the attribute passed, see if they're in the system
974 # Valid email in the attribute passed, see if they're in the system
975 _email = email(author)
975 _email = email(author)
976 if _email:
976 if _email:
977 user = cls.get_by_email(_email, case_insensitive=True)
977 user = cls.get_by_email(_email, case_insensitive=True)
978 if user:
978 if user:
979 return user
979 return user
980 # Maybe we can match by username?
980 # Maybe we can match by username?
981 _author = author_name(author)
981 _author = author_name(author)
982 user = cls.get_by_username(_author, case_insensitive=True)
982 user = cls.get_by_username(_author, case_insensitive=True)
983 if user:
983 if user:
984 return user
984 return user
985
985
986 def update_userdata(self, **kwargs):
986 def update_userdata(self, **kwargs):
987 usr = self
987 usr = self
988 old = usr.user_data
988 old = usr.user_data
989 old.update(**kwargs)
989 old.update(**kwargs)
990 usr.user_data = old
990 usr.user_data = old
991 Session().add(usr)
991 Session().add(usr)
992 log.debug('updated userdata with %s', kwargs)
992 log.debug('updated userdata with %s', kwargs)
993
993
994 def update_lastlogin(self):
994 def update_lastlogin(self):
995 """Update user lastlogin"""
995 """Update user lastlogin"""
996 self.last_login = datetime.datetime.now()
996 self.last_login = datetime.datetime.now()
997 Session().add(self)
997 Session().add(self)
998 log.debug('updated user %s lastlogin', self.username)
998 log.debug('updated user %s lastlogin', self.username)
999
999
1000 def update_password(self, new_password):
1000 def update_password(self, new_password):
1001 from rhodecode.lib.auth import get_crypt_password
1001 from rhodecode.lib.auth import get_crypt_password
1002
1002
1003 self.password = get_crypt_password(new_password)
1003 self.password = get_crypt_password(new_password)
1004 Session().add(self)
1004 Session().add(self)
1005
1005
1006 @classmethod
1006 @classmethod
1007 def get_first_super_admin(cls):
1007 def get_first_super_admin(cls):
1008 user = User.query()\
1008 user = User.query()\
1009 .filter(User.admin == true()) \
1009 .filter(User.admin == true()) \
1010 .order_by(User.user_id.asc()) \
1010 .order_by(User.user_id.asc()) \
1011 .first()
1011 .first()
1012
1012
1013 if user is None:
1013 if user is None:
1014 raise Exception('FATAL: Missing administrative account!')
1014 raise Exception('FATAL: Missing administrative account!')
1015 return user
1015 return user
1016
1016
1017 @classmethod
1017 @classmethod
1018 def get_all_super_admins(cls, only_active=False):
1018 def get_all_super_admins(cls, only_active=False):
1019 """
1019 """
1020 Returns all admin accounts sorted by username
1020 Returns all admin accounts sorted by username
1021 """
1021 """
1022 qry = User.query().filter(User.admin == true()).order_by(User.username.asc())
1022 qry = User.query().filter(User.admin == true()).order_by(User.username.asc())
1023 if only_active:
1023 if only_active:
1024 qry = qry.filter(User.active == true())
1024 qry = qry.filter(User.active == true())
1025 return qry.all()
1025 return qry.all()
1026
1026
1027 @classmethod
1027 @classmethod
1028 def get_all_user_ids(cls, only_active=True):
1028 def get_all_user_ids(cls, only_active=True):
1029 """
1029 """
1030 Returns all users IDs
1030 Returns all users IDs
1031 """
1031 """
1032 qry = Session().query(User.user_id)
1032 qry = Session().query(User.user_id)
1033
1033
1034 if only_active:
1034 if only_active:
1035 qry = qry.filter(User.active == true())
1035 qry = qry.filter(User.active == true())
1036 return [x.user_id for x in qry]
1036 return [x.user_id for x in qry]
1037
1037
1038 @classmethod
1038 @classmethod
1039 def get_default_user(cls, cache=False, refresh=False):
1039 def get_default_user(cls, cache=False, refresh=False):
1040 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
1040 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
1041 if user is None:
1041 if user is None:
1042 raise Exception('FATAL: Missing default account!')
1042 raise Exception('FATAL: Missing default account!')
1043 if refresh:
1043 if refresh:
1044 # The default user might be based on outdated state which
1044 # The default user might be based on outdated state which
1045 # has been loaded from the cache.
1045 # has been loaded from the cache.
1046 # A call to refresh() ensures that the
1046 # A call to refresh() ensures that the
1047 # latest state from the database is used.
1047 # latest state from the database is used.
1048 Session().refresh(user)
1048 Session().refresh(user)
1049 return user
1049 return user
1050
1050
1051 def _get_default_perms(self, user, suffix=''):
1051 def _get_default_perms(self, user, suffix=''):
1052 from rhodecode.model.permission import PermissionModel
1052 from rhodecode.model.permission import PermissionModel
1053 return PermissionModel().get_default_perms(user.user_perms, suffix)
1053 return PermissionModel().get_default_perms(user.user_perms, suffix)
1054
1054
1055 def get_default_perms(self, suffix=''):
1055 def get_default_perms(self, suffix=''):
1056 return self._get_default_perms(self, suffix)
1056 return self._get_default_perms(self, suffix)
1057
1057
1058 def get_api_data(self, include_secrets=False, details='full'):
1058 def get_api_data(self, include_secrets=False, details='full'):
1059 """
1059 """
1060 Common function for generating user related data for API
1060 Common function for generating user related data for API
1061
1061
1062 :param include_secrets: By default secrets in the API data will be replaced
1062 :param include_secrets: By default secrets in the API data will be replaced
1063 by a placeholder value to prevent exposing this data by accident. In case
1063 by a placeholder value to prevent exposing this data by accident. In case
1064 this data shall be exposed, set this flag to ``True``.
1064 this data shall be exposed, set this flag to ``True``.
1065
1065
1066 :param details: details can be 'basic|full' basic gives only a subset of
1066 :param details: details can be 'basic|full' basic gives only a subset of
1067 the available user information that includes user_id, name and emails.
1067 the available user information that includes user_id, name and emails.
1068 """
1068 """
1069 user = self
1069 user = self
1070 user_data = self.user_data
1070 user_data = self.user_data
1071 data = {
1071 data = {
1072 'user_id': user.user_id,
1072 'user_id': user.user_id,
1073 'username': user.username,
1073 'username': user.username,
1074 'firstname': user.name,
1074 'firstname': user.name,
1075 'lastname': user.lastname,
1075 'lastname': user.lastname,
1076 'description': user.description,
1076 'description': user.description,
1077 'email': user.email,
1077 'email': user.email,
1078 'emails': user.emails,
1078 'emails': user.emails,
1079 }
1079 }
1080 if details == 'basic':
1080 if details == 'basic':
1081 return data
1081 return data
1082
1082
1083 auth_token_length = 40
1083 auth_token_length = 40
1084 auth_token_replacement = '*' * auth_token_length
1084 auth_token_replacement = '*' * auth_token_length
1085
1085
1086 extras = {
1086 extras = {
1087 'auth_tokens': [auth_token_replacement],
1087 'auth_tokens': [auth_token_replacement],
1088 'active': user.active,
1088 'active': user.active,
1089 'admin': user.admin,
1089 'admin': user.admin,
1090 'extern_type': user.extern_type,
1090 'extern_type': user.extern_type,
1091 'extern_name': user.extern_name,
1091 'extern_name': user.extern_name,
1092 'last_login': user.last_login,
1092 'last_login': user.last_login,
1093 'last_activity': user.last_activity,
1093 'last_activity': user.last_activity,
1094 'ip_addresses': user.ip_addresses,
1094 'ip_addresses': user.ip_addresses,
1095 'language': user_data.get('language')
1095 'language': user_data.get('language')
1096 }
1096 }
1097 data.update(extras)
1097 data.update(extras)
1098
1098
1099 if include_secrets:
1099 if include_secrets:
1100 data['auth_tokens'] = user.auth_tokens
1100 data['auth_tokens'] = user.auth_tokens
1101 return data
1101 return data
1102
1102
1103 def __json__(self):
1103 def __json__(self):
1104 data = {
1104 data = {
1105 'full_name': self.full_name,
1105 'full_name': self.full_name,
1106 'full_name_or_username': self.full_name_or_username,
1106 'full_name_or_username': self.full_name_or_username,
1107 'short_contact': self.short_contact,
1107 'short_contact': self.short_contact,
1108 'full_contact': self.full_contact,
1108 'full_contact': self.full_contact,
1109 }
1109 }
1110 data.update(self.get_api_data())
1110 data.update(self.get_api_data())
1111 return data
1111 return data
1112
1112
1113
1113
1114 class UserApiKeys(Base, BaseModel):
1114 class UserApiKeys(Base, BaseModel):
1115 __tablename__ = 'user_api_keys'
1115 __tablename__ = 'user_api_keys'
1116 __table_args__ = (
1116 __table_args__ = (
1117 Index('uak_api_key_idx', 'api_key'),
1117 Index('uak_api_key_idx', 'api_key'),
1118 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1118 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1119 base_table_args
1119 base_table_args
1120 )
1120 )
1121 __mapper_args__ = {}
1121 __mapper_args__ = {}
1122
1122
1123 # ApiKey role
1123 # ApiKey role
1124 ROLE_ALL = 'token_role_all'
1124 ROLE_ALL = 'token_role_all'
1125 ROLE_HTTP = 'token_role_http'
1125 ROLE_HTTP = 'token_role_http'
1126 ROLE_VCS = 'token_role_vcs'
1126 ROLE_VCS = 'token_role_vcs'
1127 ROLE_API = 'token_role_api'
1127 ROLE_API = 'token_role_api'
1128 ROLE_FEED = 'token_role_feed'
1128 ROLE_FEED = 'token_role_feed'
1129 ROLE_ARTIFACT_DOWNLOAD = 'role_artifact_download'
1129 ROLE_ARTIFACT_DOWNLOAD = 'role_artifact_download'
1130 ROLE_PASSWORD_RESET = 'token_password_reset'
1130 ROLE_PASSWORD_RESET = 'token_password_reset'
1131
1131
1132 ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED, ROLE_ARTIFACT_DOWNLOAD]
1132 ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED, ROLE_ARTIFACT_DOWNLOAD]
1133
1133
1134 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1134 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1135 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1135 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1136 api_key = Column("api_key", String(255), nullable=False, unique=True)
1136 api_key = Column("api_key", String(255), nullable=False, unique=True)
1137 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1137 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1138 expires = Column('expires', Float(53), nullable=False)
1138 expires = Column('expires', Float(53), nullable=False)
1139 role = Column('role', String(255), nullable=True)
1139 role = Column('role', String(255), nullable=True)
1140 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1140 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1141
1141
1142 # scope columns
1142 # scope columns
1143 repo_id = Column(
1143 repo_id = Column(
1144 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1144 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1145 nullable=True, unique=None, default=None)
1145 nullable=True, unique=None, default=None)
1146 repo = relationship('Repository', lazy='joined')
1146 repo = relationship('Repository', lazy='joined')
1147
1147
1148 repo_group_id = Column(
1148 repo_group_id = Column(
1149 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1149 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1150 nullable=True, unique=None, default=None)
1150 nullable=True, unique=None, default=None)
1151 repo_group = relationship('RepoGroup', lazy='joined')
1151 repo_group = relationship('RepoGroup', lazy='joined')
1152
1152
1153 user = relationship('User', lazy='joined')
1153 user = relationship('User', lazy='joined')
1154
1154
1155 def __unicode__(self):
1155 def __unicode__(self):
1156 return u"<%s('%s')>" % (self.__class__.__name__, self.role)
1156 return u"<%s('%s')>" % (self.__class__.__name__, self.role)
1157
1157
1158 def __json__(self):
1158 def __json__(self):
1159 data = {
1159 data = {
1160 'auth_token': self.api_key,
1160 'auth_token': self.api_key,
1161 'role': self.role,
1161 'role': self.role,
1162 'scope': self.scope_humanized,
1162 'scope': self.scope_humanized,
1163 'expired': self.expired
1163 'expired': self.expired
1164 }
1164 }
1165 return data
1165 return data
1166
1166
1167 def get_api_data(self, include_secrets=False):
1167 def get_api_data(self, include_secrets=False):
1168 data = self.__json__()
1168 data = self.__json__()
1169 if include_secrets:
1169 if include_secrets:
1170 return data
1170 return data
1171 else:
1171 else:
1172 data['auth_token'] = self.token_obfuscated
1172 data['auth_token'] = self.token_obfuscated
1173 return data
1173 return data
1174
1174
1175 @hybrid_property
1175 @hybrid_property
1176 def description_safe(self):
1176 def description_safe(self):
1177 from rhodecode.lib import helpers as h
1177 from rhodecode.lib import helpers as h
1178 return h.escape(self.description)
1178 return h.escape(self.description)
1179
1179
1180 @property
1180 @property
1181 def expired(self):
1181 def expired(self):
1182 if self.expires == -1:
1182 if self.expires == -1:
1183 return False
1183 return False
1184 return time.time() > self.expires
1184 return time.time() > self.expires
1185
1185
1186 @classmethod
1186 @classmethod
1187 def _get_role_name(cls, role):
1187 def _get_role_name(cls, role):
1188 return {
1188 return {
1189 cls.ROLE_ALL: _('all'),
1189 cls.ROLE_ALL: _('all'),
1190 cls.ROLE_HTTP: _('http/web interface'),
1190 cls.ROLE_HTTP: _('http/web interface'),
1191 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1191 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1192 cls.ROLE_API: _('api calls'),
1192 cls.ROLE_API: _('api calls'),
1193 cls.ROLE_FEED: _('feed access'),
1193 cls.ROLE_FEED: _('feed access'),
1194 cls.ROLE_ARTIFACT_DOWNLOAD: _('artifacts downloads'),
1194 cls.ROLE_ARTIFACT_DOWNLOAD: _('artifacts downloads'),
1195 }.get(role, role)
1195 }.get(role, role)
1196
1196
1197 @property
1197 @property
1198 def role_humanized(self):
1198 def role_humanized(self):
1199 return self._get_role_name(self.role)
1199 return self._get_role_name(self.role)
1200
1200
1201 def _get_scope(self):
1201 def _get_scope(self):
1202 if self.repo:
1202 if self.repo:
1203 return 'Repository: {}'.format(self.repo.repo_name)
1203 return 'Repository: {}'.format(self.repo.repo_name)
1204 if self.repo_group:
1204 if self.repo_group:
1205 return 'RepositoryGroup: {} (recursive)'.format(self.repo_group.group_name)
1205 return 'RepositoryGroup: {} (recursive)'.format(self.repo_group.group_name)
1206 return 'Global'
1206 return 'Global'
1207
1207
1208 @property
1208 @property
1209 def scope_humanized(self):
1209 def scope_humanized(self):
1210 return self._get_scope()
1210 return self._get_scope()
1211
1211
1212 @property
1212 @property
1213 def token_obfuscated(self):
1213 def token_obfuscated(self):
1214 if self.api_key:
1214 if self.api_key:
1215 return self.api_key[:4] + "****"
1215 return self.api_key[:4] + "****"
1216
1216
1217
1217
1218 class UserEmailMap(Base, BaseModel):
1218 class UserEmailMap(Base, BaseModel):
1219 __tablename__ = 'user_email_map'
1219 __tablename__ = 'user_email_map'
1220 __table_args__ = (
1220 __table_args__ = (
1221 Index('uem_email_idx', 'email'),
1221 Index('uem_email_idx', 'email'),
1222 UniqueConstraint('email'),
1222 UniqueConstraint('email'),
1223 base_table_args
1223 base_table_args
1224 )
1224 )
1225 __mapper_args__ = {}
1225 __mapper_args__ = {}
1226
1226
1227 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1227 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1228 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1228 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1229 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1229 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1230 user = relationship('User', lazy='joined')
1230 user = relationship('User', lazy='joined')
1231
1231
1232 @validates('_email')
1232 @validates('_email')
1233 def validate_email(self, key, email):
1233 def validate_email(self, key, email):
1234 # check if this email is not main one
1234 # check if this email is not main one
1235 main_email = Session().query(User).filter(User.email == email).scalar()
1235 main_email = Session().query(User).filter(User.email == email).scalar()
1236 if main_email is not None:
1236 if main_email is not None:
1237 raise AttributeError('email %s is present is user table' % email)
1237 raise AttributeError('email %s is present is user table' % email)
1238 return email
1238 return email
1239
1239
1240 @hybrid_property
1240 @hybrid_property
1241 def email(self):
1241 def email(self):
1242 return self._email
1242 return self._email
1243
1243
1244 @email.setter
1244 @email.setter
1245 def email(self, val):
1245 def email(self, val):
1246 self._email = val.lower() if val else None
1246 self._email = val.lower() if val else None
1247
1247
1248
1248
1249 class UserIpMap(Base, BaseModel):
1249 class UserIpMap(Base, BaseModel):
1250 __tablename__ = 'user_ip_map'
1250 __tablename__ = 'user_ip_map'
1251 __table_args__ = (
1251 __table_args__ = (
1252 UniqueConstraint('user_id', 'ip_addr'),
1252 UniqueConstraint('user_id', 'ip_addr'),
1253 base_table_args
1253 base_table_args
1254 )
1254 )
1255 __mapper_args__ = {}
1255 __mapper_args__ = {}
1256
1256
1257 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1257 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1258 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1258 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1259 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1259 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1260 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1260 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1261 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1261 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1262 user = relationship('User', lazy='joined')
1262 user = relationship('User', lazy='joined')
1263
1263
1264 @hybrid_property
1264 @hybrid_property
1265 def description_safe(self):
1265 def description_safe(self):
1266 from rhodecode.lib import helpers as h
1266 from rhodecode.lib import helpers as h
1267 return h.escape(self.description)
1267 return h.escape(self.description)
1268
1268
1269 @classmethod
1269 @classmethod
1270 def _get_ip_range(cls, ip_addr):
1270 def _get_ip_range(cls, ip_addr):
1271 net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False)
1271 net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False)
1272 return [str(net.network_address), str(net.broadcast_address)]
1272 return [str(net.network_address), str(net.broadcast_address)]
1273
1273
1274 def __json__(self):
1274 def __json__(self):
1275 return {
1275 return {
1276 'ip_addr': self.ip_addr,
1276 'ip_addr': self.ip_addr,
1277 'ip_range': self._get_ip_range(self.ip_addr),
1277 'ip_range': self._get_ip_range(self.ip_addr),
1278 }
1278 }
1279
1279
1280 def __unicode__(self):
1280 def __unicode__(self):
1281 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
1281 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
1282 self.user_id, self.ip_addr)
1282 self.user_id, self.ip_addr)
1283
1283
1284
1284
1285 class UserSshKeys(Base, BaseModel):
1285 class UserSshKeys(Base, BaseModel):
1286 __tablename__ = 'user_ssh_keys'
1286 __tablename__ = 'user_ssh_keys'
1287 __table_args__ = (
1287 __table_args__ = (
1288 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1288 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1289
1289
1290 UniqueConstraint('ssh_key_fingerprint'),
1290 UniqueConstraint('ssh_key_fingerprint'),
1291
1291
1292 base_table_args
1292 base_table_args
1293 )
1293 )
1294 __mapper_args__ = {}
1294 __mapper_args__ = {}
1295
1295
1296 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1296 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1297 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1297 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1298 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1298 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1299
1299
1300 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1300 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1301
1301
1302 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1302 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1303 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1303 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1304 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1304 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1305
1305
1306 user = relationship('User', lazy='joined')
1306 user = relationship('User', lazy='joined')
1307
1307
1308 def __json__(self):
1308 def __json__(self):
1309 data = {
1309 data = {
1310 'ssh_fingerprint': self.ssh_key_fingerprint,
1310 'ssh_fingerprint': self.ssh_key_fingerprint,
1311 'description': self.description,
1311 'description': self.description,
1312 'created_on': self.created_on
1312 'created_on': self.created_on
1313 }
1313 }
1314 return data
1314 return data
1315
1315
1316 def get_api_data(self):
1316 def get_api_data(self):
1317 data = self.__json__()
1317 data = self.__json__()
1318 return data
1318 return data
1319
1319
1320
1320
1321 class UserLog(Base, BaseModel):
1321 class UserLog(Base, BaseModel):
1322 __tablename__ = 'user_logs'
1322 __tablename__ = 'user_logs'
1323 __table_args__ = (
1323 __table_args__ = (
1324 base_table_args,
1324 base_table_args,
1325 )
1325 )
1326
1326
1327 VERSION_1 = 'v1'
1327 VERSION_1 = 'v1'
1328 VERSION_2 = 'v2'
1328 VERSION_2 = 'v2'
1329 VERSIONS = [VERSION_1, VERSION_2]
1329 VERSIONS = [VERSION_1, VERSION_2]
1330
1330
1331 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1331 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1332 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1332 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1333 username = Column("username", String(255), nullable=True, unique=None, default=None)
1333 username = Column("username", String(255), nullable=True, unique=None, default=None)
1334 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1334 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1335 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1335 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1336 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1336 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1337 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1337 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1338 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1338 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1339
1339
1340 version = Column("version", String(255), nullable=True, default=VERSION_1)
1340 version = Column("version", String(255), nullable=True, default=VERSION_1)
1341 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1341 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1342 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1342 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1343
1343
1344 def __unicode__(self):
1344 def __unicode__(self):
1345 return u"<%s('id:%s:%s')>" % (
1345 return u"<%s('id:%s:%s')>" % (
1346 self.__class__.__name__, self.repository_name, self.action)
1346 self.__class__.__name__, self.repository_name, self.action)
1347
1347
1348 def __json__(self):
1348 def __json__(self):
1349 return {
1349 return {
1350 'user_id': self.user_id,
1350 'user_id': self.user_id,
1351 'username': self.username,
1351 'username': self.username,
1352 'repository_id': self.repository_id,
1352 'repository_id': self.repository_id,
1353 'repository_name': self.repository_name,
1353 'repository_name': self.repository_name,
1354 'user_ip': self.user_ip,
1354 'user_ip': self.user_ip,
1355 'action_date': self.action_date,
1355 'action_date': self.action_date,
1356 'action': self.action,
1356 'action': self.action,
1357 }
1357 }
1358
1358
1359 @hybrid_property
1359 @hybrid_property
1360 def entry_id(self):
1360 def entry_id(self):
1361 return self.user_log_id
1361 return self.user_log_id
1362
1362
1363 @property
1363 @property
1364 def action_as_day(self):
1364 def action_as_day(self):
1365 return datetime.date(*self.action_date.timetuple()[:3])
1365 return datetime.date(*self.action_date.timetuple()[:3])
1366
1366
1367 user = relationship('User')
1367 user = relationship('User')
1368 repository = relationship('Repository', cascade='')
1368 repository = relationship('Repository', cascade='')
1369
1369
1370
1370
1371 class UserGroup(Base, BaseModel):
1371 class UserGroup(Base, BaseModel):
1372 __tablename__ = 'users_groups'
1372 __tablename__ = 'users_groups'
1373 __table_args__ = (
1373 __table_args__ = (
1374 base_table_args,
1374 base_table_args,
1375 )
1375 )
1376
1376
1377 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1377 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1378 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1378 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1379 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1379 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1380 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1380 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1381 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1381 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1382 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1382 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1383 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1383 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1384 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1384 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1385
1385
1386 members = relationship('UserGroupMember', cascade="all, delete-orphan", lazy="joined")
1386 members = relationship('UserGroupMember', cascade="all, delete-orphan", lazy="joined")
1387 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1387 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1388 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1388 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1389 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1389 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1390 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1390 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1391 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1391 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1392
1392
1393 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all')
1393 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all')
1394 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id")
1394 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id")
1395
1395
1396 @classmethod
1396 @classmethod
1397 def _load_group_data(cls, column):
1397 def _load_group_data(cls, column):
1398 if not column:
1398 if not column:
1399 return {}
1399 return {}
1400
1400
1401 try:
1401 try:
1402 return json.loads(column) or {}
1402 return json.loads(column) or {}
1403 except TypeError:
1403 except TypeError:
1404 return {}
1404 return {}
1405
1405
1406 @hybrid_property
1406 @hybrid_property
1407 def description_safe(self):
1407 def description_safe(self):
1408 from rhodecode.lib import helpers as h
1408 from rhodecode.lib import helpers as h
1409 return h.escape(self.user_group_description)
1409 return h.escape(self.user_group_description)
1410
1410
1411 @hybrid_property
1411 @hybrid_property
1412 def group_data(self):
1412 def group_data(self):
1413 return self._load_group_data(self._group_data)
1413 return self._load_group_data(self._group_data)
1414
1414
1415 @group_data.expression
1415 @group_data.expression
1416 def group_data(self, **kwargs):
1416 def group_data(self, **kwargs):
1417 return self._group_data
1417 return self._group_data
1418
1418
1419 @group_data.setter
1419 @group_data.setter
1420 def group_data(self, val):
1420 def group_data(self, val):
1421 try:
1421 try:
1422 self._group_data = json.dumps(val)
1422 self._group_data = json.dumps(val)
1423 except Exception:
1423 except Exception:
1424 log.error(traceback.format_exc())
1424 log.error(traceback.format_exc())
1425
1425
1426 @classmethod
1426 @classmethod
1427 def _load_sync(cls, group_data):
1427 def _load_sync(cls, group_data):
1428 if group_data:
1428 if group_data:
1429 return group_data.get('extern_type')
1429 return group_data.get('extern_type')
1430
1430
1431 @property
1431 @property
1432 def sync(self):
1432 def sync(self):
1433 return self._load_sync(self.group_data)
1433 return self._load_sync(self.group_data)
1434
1434
1435 def __unicode__(self):
1435 def __unicode__(self):
1436 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1436 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1437 self.users_group_id,
1437 self.users_group_id,
1438 self.users_group_name)
1438 self.users_group_name)
1439
1439
1440 @classmethod
1440 @classmethod
1441 def get_by_group_name(cls, group_name, cache=False,
1441 def get_by_group_name(cls, group_name, cache=False,
1442 case_insensitive=False):
1442 case_insensitive=False):
1443 if case_insensitive:
1443 if case_insensitive:
1444 q = cls.query().filter(func.lower(cls.users_group_name) ==
1444 q = cls.query().filter(func.lower(cls.users_group_name) ==
1445 func.lower(group_name))
1445 func.lower(group_name))
1446
1446
1447 else:
1447 else:
1448 q = cls.query().filter(cls.users_group_name == group_name)
1448 q = cls.query().filter(cls.users_group_name == group_name)
1449 if cache:
1449 if cache:
1450 q = q.options(
1450 q = q.options(
1451 FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name)))
1451 FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name)))
1452 return q.scalar()
1452 return q.scalar()
1453
1453
1454 @classmethod
1454 @classmethod
1455 def get(cls, user_group_id, cache=False):
1455 def get(cls, user_group_id, cache=False):
1456 if not user_group_id:
1456 if not user_group_id:
1457 return
1457 return
1458
1458
1459 user_group = cls.query()
1459 user_group = cls.query()
1460 if cache:
1460 if cache:
1461 user_group = user_group.options(
1461 user_group = user_group.options(
1462 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1462 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1463 return user_group.get(user_group_id)
1463 return user_group.get(user_group_id)
1464
1464
1465 def permissions(self, with_admins=True, with_owner=True,
1465 def permissions(self, with_admins=True, with_owner=True,
1466 expand_from_user_groups=False):
1466 expand_from_user_groups=False):
1467 """
1467 """
1468 Permissions for user groups
1468 Permissions for user groups
1469 """
1469 """
1470 _admin_perm = 'usergroup.admin'
1470 _admin_perm = 'usergroup.admin'
1471
1471
1472 owner_row = []
1472 owner_row = []
1473 if with_owner:
1473 if with_owner:
1474 usr = AttributeDict(self.user.get_dict())
1474 usr = AttributeDict(self.user.get_dict())
1475 usr.owner_row = True
1475 usr.owner_row = True
1476 usr.permission = _admin_perm
1476 usr.permission = _admin_perm
1477 owner_row.append(usr)
1477 owner_row.append(usr)
1478
1478
1479 super_admin_ids = []
1479 super_admin_ids = []
1480 super_admin_rows = []
1480 super_admin_rows = []
1481 if with_admins:
1481 if with_admins:
1482 for usr in User.get_all_super_admins():
1482 for usr in User.get_all_super_admins():
1483 super_admin_ids.append(usr.user_id)
1483 super_admin_ids.append(usr.user_id)
1484 # if this admin is also owner, don't double the record
1484 # if this admin is also owner, don't double the record
1485 if usr.user_id == owner_row[0].user_id:
1485 if usr.user_id == owner_row[0].user_id:
1486 owner_row[0].admin_row = True
1486 owner_row[0].admin_row = True
1487 else:
1487 else:
1488 usr = AttributeDict(usr.get_dict())
1488 usr = AttributeDict(usr.get_dict())
1489 usr.admin_row = True
1489 usr.admin_row = True
1490 usr.permission = _admin_perm
1490 usr.permission = _admin_perm
1491 super_admin_rows.append(usr)
1491 super_admin_rows.append(usr)
1492
1492
1493 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1493 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1494 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1494 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1495 joinedload(UserUserGroupToPerm.user),
1495 joinedload(UserUserGroupToPerm.user),
1496 joinedload(UserUserGroupToPerm.permission),)
1496 joinedload(UserUserGroupToPerm.permission),)
1497
1497
1498 # get owners and admins and permissions. We do a trick of re-writing
1498 # get owners and admins and permissions. We do a trick of re-writing
1499 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1499 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1500 # has a global reference and changing one object propagates to all
1500 # has a global reference and changing one object propagates to all
1501 # others. This means if admin is also an owner admin_row that change
1501 # others. This means if admin is also an owner admin_row that change
1502 # would propagate to both objects
1502 # would propagate to both objects
1503 perm_rows = []
1503 perm_rows = []
1504 for _usr in q.all():
1504 for _usr in q.all():
1505 usr = AttributeDict(_usr.user.get_dict())
1505 usr = AttributeDict(_usr.user.get_dict())
1506 # if this user is also owner/admin, mark as duplicate record
1506 # if this user is also owner/admin, mark as duplicate record
1507 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
1507 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
1508 usr.duplicate_perm = True
1508 usr.duplicate_perm = True
1509 usr.permission = _usr.permission.permission_name
1509 usr.permission = _usr.permission.permission_name
1510 perm_rows.append(usr)
1510 perm_rows.append(usr)
1511
1511
1512 # filter the perm rows by 'default' first and then sort them by
1512 # filter the perm rows by 'default' first and then sort them by
1513 # admin,write,read,none permissions sorted again alphabetically in
1513 # admin,write,read,none permissions sorted again alphabetically in
1514 # each group
1514 # each group
1515 perm_rows = sorted(perm_rows, key=display_user_sort)
1515 perm_rows = sorted(perm_rows, key=display_user_sort)
1516
1516
1517 user_groups_rows = []
1517 user_groups_rows = []
1518 if expand_from_user_groups:
1518 if expand_from_user_groups:
1519 for ug in self.permission_user_groups(with_members=True):
1519 for ug in self.permission_user_groups(with_members=True):
1520 for user_data in ug.members:
1520 for user_data in ug.members:
1521 user_groups_rows.append(user_data)
1521 user_groups_rows.append(user_data)
1522
1522
1523 return super_admin_rows + owner_row + perm_rows + user_groups_rows
1523 return super_admin_rows + owner_row + perm_rows + user_groups_rows
1524
1524
1525 def permission_user_groups(self, with_members=False):
1525 def permission_user_groups(self, with_members=False):
1526 q = UserGroupUserGroupToPerm.query()\
1526 q = UserGroupUserGroupToPerm.query()\
1527 .filter(UserGroupUserGroupToPerm.target_user_group == self)
1527 .filter(UserGroupUserGroupToPerm.target_user_group == self)
1528 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1528 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1529 joinedload(UserGroupUserGroupToPerm.target_user_group),
1529 joinedload(UserGroupUserGroupToPerm.target_user_group),
1530 joinedload(UserGroupUserGroupToPerm.permission),)
1530 joinedload(UserGroupUserGroupToPerm.permission),)
1531
1531
1532 perm_rows = []
1532 perm_rows = []
1533 for _user_group in q.all():
1533 for _user_group in q.all():
1534 entry = AttributeDict(_user_group.user_group.get_dict())
1534 entry = AttributeDict(_user_group.user_group.get_dict())
1535 entry.permission = _user_group.permission.permission_name
1535 entry.permission = _user_group.permission.permission_name
1536 if with_members:
1536 if with_members:
1537 entry.members = [x.user.get_dict()
1537 entry.members = [x.user.get_dict()
1538 for x in _user_group.user_group.members]
1538 for x in _user_group.user_group.members]
1539 perm_rows.append(entry)
1539 perm_rows.append(entry)
1540
1540
1541 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1541 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1542 return perm_rows
1542 return perm_rows
1543
1543
1544 def _get_default_perms(self, user_group, suffix=''):
1544 def _get_default_perms(self, user_group, suffix=''):
1545 from rhodecode.model.permission import PermissionModel
1545 from rhodecode.model.permission import PermissionModel
1546 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1546 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1547
1547
1548 def get_default_perms(self, suffix=''):
1548 def get_default_perms(self, suffix=''):
1549 return self._get_default_perms(self, suffix)
1549 return self._get_default_perms(self, suffix)
1550
1550
1551 def get_api_data(self, with_group_members=True, include_secrets=False):
1551 def get_api_data(self, with_group_members=True, include_secrets=False):
1552 """
1552 """
1553 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1553 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1554 basically forwarded.
1554 basically forwarded.
1555
1555
1556 """
1556 """
1557 user_group = self
1557 user_group = self
1558 data = {
1558 data = {
1559 'users_group_id': user_group.users_group_id,
1559 'users_group_id': user_group.users_group_id,
1560 'group_name': user_group.users_group_name,
1560 'group_name': user_group.users_group_name,
1561 'group_description': user_group.user_group_description,
1561 'group_description': user_group.user_group_description,
1562 'active': user_group.users_group_active,
1562 'active': user_group.users_group_active,
1563 'owner': user_group.user.username,
1563 'owner': user_group.user.username,
1564 'sync': user_group.sync,
1564 'sync': user_group.sync,
1565 'owner_email': user_group.user.email,
1565 'owner_email': user_group.user.email,
1566 }
1566 }
1567
1567
1568 if with_group_members:
1568 if with_group_members:
1569 users = []
1569 users = []
1570 for user in user_group.members:
1570 for user in user_group.members:
1571 user = user.user
1571 user = user.user
1572 users.append(user.get_api_data(include_secrets=include_secrets))
1572 users.append(user.get_api_data(include_secrets=include_secrets))
1573 data['users'] = users
1573 data['users'] = users
1574
1574
1575 return data
1575 return data
1576
1576
1577
1577
1578 class UserGroupMember(Base, BaseModel):
1578 class UserGroupMember(Base, BaseModel):
1579 __tablename__ = 'users_groups_members'
1579 __tablename__ = 'users_groups_members'
1580 __table_args__ = (
1580 __table_args__ = (
1581 base_table_args,
1581 base_table_args,
1582 )
1582 )
1583
1583
1584 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1584 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1585 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1585 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1586 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1586 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1587
1587
1588 user = relationship('User', lazy='joined')
1588 user = relationship('User', lazy='joined')
1589 users_group = relationship('UserGroup')
1589 users_group = relationship('UserGroup')
1590
1590
1591 def __init__(self, gr_id='', u_id=''):
1591 def __init__(self, gr_id='', u_id=''):
1592 self.users_group_id = gr_id
1592 self.users_group_id = gr_id
1593 self.user_id = u_id
1593 self.user_id = u_id
1594
1594
1595
1595
1596 class RepositoryField(Base, BaseModel):
1596 class RepositoryField(Base, BaseModel):
1597 __tablename__ = 'repositories_fields'
1597 __tablename__ = 'repositories_fields'
1598 __table_args__ = (
1598 __table_args__ = (
1599 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1599 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1600 base_table_args,
1600 base_table_args,
1601 )
1601 )
1602
1602
1603 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1603 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1604
1604
1605 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1605 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1606 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1606 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1607 field_key = Column("field_key", String(250))
1607 field_key = Column("field_key", String(250))
1608 field_label = Column("field_label", String(1024), nullable=False)
1608 field_label = Column("field_label", String(1024), nullable=False)
1609 field_value = Column("field_value", String(10000), nullable=False)
1609 field_value = Column("field_value", String(10000), nullable=False)
1610 field_desc = Column("field_desc", String(1024), nullable=False)
1610 field_desc = Column("field_desc", String(1024), nullable=False)
1611 field_type = Column("field_type", String(255), nullable=False, unique=None)
1611 field_type = Column("field_type", String(255), nullable=False, unique=None)
1612 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1612 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1613
1613
1614 repository = relationship('Repository')
1614 repository = relationship('Repository')
1615
1615
1616 @property
1616 @property
1617 def field_key_prefixed(self):
1617 def field_key_prefixed(self):
1618 return 'ex_%s' % self.field_key
1618 return 'ex_%s' % self.field_key
1619
1619
1620 @classmethod
1620 @classmethod
1621 def un_prefix_key(cls, key):
1621 def un_prefix_key(cls, key):
1622 if key.startswith(cls.PREFIX):
1622 if key.startswith(cls.PREFIX):
1623 return key[len(cls.PREFIX):]
1623 return key[len(cls.PREFIX):]
1624 return key
1624 return key
1625
1625
1626 @classmethod
1626 @classmethod
1627 def get_by_key_name(cls, key, repo):
1627 def get_by_key_name(cls, key, repo):
1628 row = cls.query()\
1628 row = cls.query()\
1629 .filter(cls.repository == repo)\
1629 .filter(cls.repository == repo)\
1630 .filter(cls.field_key == key).scalar()
1630 .filter(cls.field_key == key).scalar()
1631 return row
1631 return row
1632
1632
1633
1633
1634 class Repository(Base, BaseModel):
1634 class Repository(Base, BaseModel):
1635 __tablename__ = 'repositories'
1635 __tablename__ = 'repositories'
1636 __table_args__ = (
1636 __table_args__ = (
1637 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1637 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1638 base_table_args,
1638 base_table_args,
1639 )
1639 )
1640 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1640 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1641 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1641 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1642 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1642 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1643
1643
1644 STATE_CREATED = 'repo_state_created'
1644 STATE_CREATED = 'repo_state_created'
1645 STATE_PENDING = 'repo_state_pending'
1645 STATE_PENDING = 'repo_state_pending'
1646 STATE_ERROR = 'repo_state_error'
1646 STATE_ERROR = 'repo_state_error'
1647
1647
1648 LOCK_AUTOMATIC = 'lock_auto'
1648 LOCK_AUTOMATIC = 'lock_auto'
1649 LOCK_API = 'lock_api'
1649 LOCK_API = 'lock_api'
1650 LOCK_WEB = 'lock_web'
1650 LOCK_WEB = 'lock_web'
1651 LOCK_PULL = 'lock_pull'
1651 LOCK_PULL = 'lock_pull'
1652
1652
1653 NAME_SEP = URL_SEP
1653 NAME_SEP = URL_SEP
1654
1654
1655 repo_id = Column(
1655 repo_id = Column(
1656 "repo_id", Integer(), nullable=False, unique=True, default=None,
1656 "repo_id", Integer(), nullable=False, unique=True, default=None,
1657 primary_key=True)
1657 primary_key=True)
1658 _repo_name = Column(
1658 _repo_name = Column(
1659 "repo_name", Text(), nullable=False, default=None)
1659 "repo_name", Text(), nullable=False, default=None)
1660 repo_name_hash = Column(
1660 repo_name_hash = Column(
1661 "repo_name_hash", String(255), nullable=False, unique=True)
1661 "repo_name_hash", String(255), nullable=False, unique=True)
1662 repo_state = Column("repo_state", String(255), nullable=True)
1662 repo_state = Column("repo_state", String(255), nullable=True)
1663
1663
1664 clone_uri = Column(
1664 clone_uri = Column(
1665 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1665 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1666 default=None)
1666 default=None)
1667 push_uri = Column(
1667 push_uri = Column(
1668 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1668 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1669 default=None)
1669 default=None)
1670 repo_type = Column(
1670 repo_type = Column(
1671 "repo_type", String(255), nullable=False, unique=False, default=None)
1671 "repo_type", String(255), nullable=False, unique=False, default=None)
1672 user_id = Column(
1672 user_id = Column(
1673 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1673 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1674 unique=False, default=None)
1674 unique=False, default=None)
1675 private = Column(
1675 private = Column(
1676 "private", Boolean(), nullable=True, unique=None, default=None)
1676 "private", Boolean(), nullable=True, unique=None, default=None)
1677 archived = Column(
1677 archived = Column(
1678 "archived", Boolean(), nullable=True, unique=None, default=None)
1678 "archived", Boolean(), nullable=True, unique=None, default=None)
1679 enable_statistics = Column(
1679 enable_statistics = Column(
1680 "statistics", Boolean(), nullable=True, unique=None, default=True)
1680 "statistics", Boolean(), nullable=True, unique=None, default=True)
1681 enable_downloads = Column(
1681 enable_downloads = Column(
1682 "downloads", Boolean(), nullable=True, unique=None, default=True)
1682 "downloads", Boolean(), nullable=True, unique=None, default=True)
1683 description = Column(
1683 description = Column(
1684 "description", String(10000), nullable=True, unique=None, default=None)
1684 "description", String(10000), nullable=True, unique=None, default=None)
1685 created_on = Column(
1685 created_on = Column(
1686 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1686 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1687 default=datetime.datetime.now)
1687 default=datetime.datetime.now)
1688 updated_on = Column(
1688 updated_on = Column(
1689 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1689 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1690 default=datetime.datetime.now)
1690 default=datetime.datetime.now)
1691 _landing_revision = Column(
1691 _landing_revision = Column(
1692 "landing_revision", String(255), nullable=False, unique=False,
1692 "landing_revision", String(255), nullable=False, unique=False,
1693 default=None)
1693 default=None)
1694 enable_locking = Column(
1694 enable_locking = Column(
1695 "enable_locking", Boolean(), nullable=False, unique=None,
1695 "enable_locking", Boolean(), nullable=False, unique=None,
1696 default=False)
1696 default=False)
1697 _locked = Column(
1697 _locked = Column(
1698 "locked", String(255), nullable=True, unique=False, default=None)
1698 "locked", String(255), nullable=True, unique=False, default=None)
1699 _changeset_cache = Column(
1699 _changeset_cache = Column(
1700 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1700 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1701
1701
1702 fork_id = Column(
1702 fork_id = Column(
1703 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1703 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1704 nullable=True, unique=False, default=None)
1704 nullable=True, unique=False, default=None)
1705 group_id = Column(
1705 group_id = Column(
1706 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1706 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1707 unique=False, default=None)
1707 unique=False, default=None)
1708
1708
1709 user = relationship('User', lazy='joined')
1709 user = relationship('User', lazy='joined')
1710 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1710 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1711 group = relationship('RepoGroup', lazy='joined')
1711 group = relationship('RepoGroup', lazy='joined')
1712 repo_to_perm = relationship(
1712 repo_to_perm = relationship(
1713 'UserRepoToPerm', cascade='all',
1713 'UserRepoToPerm', cascade='all',
1714 order_by='UserRepoToPerm.repo_to_perm_id')
1714 order_by='UserRepoToPerm.repo_to_perm_id')
1715 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1715 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1716 stats = relationship('Statistics', cascade='all', uselist=False)
1716 stats = relationship('Statistics', cascade='all', uselist=False)
1717
1717
1718 followers = relationship(
1718 followers = relationship(
1719 'UserFollowing',
1719 'UserFollowing',
1720 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1720 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1721 cascade='all')
1721 cascade='all')
1722 extra_fields = relationship(
1722 extra_fields = relationship(
1723 'RepositoryField', cascade="all, delete-orphan")
1723 'RepositoryField', cascade="all, delete-orphan")
1724 logs = relationship('UserLog')
1724 logs = relationship('UserLog')
1725 comments = relationship(
1725 comments = relationship(
1726 'ChangesetComment', cascade="all, delete-orphan")
1726 'ChangesetComment', cascade="all, delete-orphan")
1727 pull_requests_source = relationship(
1727 pull_requests_source = relationship(
1728 'PullRequest',
1728 'PullRequest',
1729 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1729 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1730 cascade="all, delete-orphan")
1730 cascade="all, delete-orphan")
1731 pull_requests_target = relationship(
1731 pull_requests_target = relationship(
1732 'PullRequest',
1732 'PullRequest',
1733 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1733 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1734 cascade="all, delete-orphan")
1734 cascade="all, delete-orphan")
1735 ui = relationship('RepoRhodeCodeUi', cascade="all")
1735 ui = relationship('RepoRhodeCodeUi', cascade="all")
1736 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1736 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1737 integrations = relationship('Integration', cascade="all, delete-orphan")
1737 integrations = relationship('Integration', cascade="all, delete-orphan")
1738
1738
1739 scoped_tokens = relationship('UserApiKeys', cascade="all")
1739 scoped_tokens = relationship('UserApiKeys', cascade="all")
1740
1740
1741 # no cascade, set NULL
1741 # no cascade, set NULL
1742 artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_id==Repository.repo_id')
1742 artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_id==Repository.repo_id')
1743
1743
1744 def __unicode__(self):
1744 def __unicode__(self):
1745 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1745 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1746 safe_unicode(self.repo_name))
1746 safe_unicode(self.repo_name))
1747
1747
1748 @hybrid_property
1748 @hybrid_property
1749 def description_safe(self):
1749 def description_safe(self):
1750 from rhodecode.lib import helpers as h
1750 from rhodecode.lib import helpers as h
1751 return h.escape(self.description)
1751 return h.escape(self.description)
1752
1752
1753 @hybrid_property
1753 @hybrid_property
1754 def landing_rev(self):
1754 def landing_rev(self):
1755 # always should return [rev_type, rev]
1755 # always should return [rev_type, rev]
1756 if self._landing_revision:
1756 if self._landing_revision:
1757 _rev_info = self._landing_revision.split(':')
1757 _rev_info = self._landing_revision.split(':')
1758 if len(_rev_info) < 2:
1758 if len(_rev_info) < 2:
1759 _rev_info.insert(0, 'rev')
1759 _rev_info.insert(0, 'rev')
1760 return [_rev_info[0], _rev_info[1]]
1760 return [_rev_info[0], _rev_info[1]]
1761 return [None, None]
1761 return [None, None]
1762
1762
1763 @landing_rev.setter
1763 @landing_rev.setter
1764 def landing_rev(self, val):
1764 def landing_rev(self, val):
1765 if ':' not in val:
1765 if ':' not in val:
1766 raise ValueError('value must be delimited with `:` and consist '
1766 raise ValueError('value must be delimited with `:` and consist '
1767 'of <rev_type>:<rev>, got %s instead' % val)
1767 'of <rev_type>:<rev>, got %s instead' % val)
1768 self._landing_revision = val
1768 self._landing_revision = val
1769
1769
1770 @hybrid_property
1770 @hybrid_property
1771 def locked(self):
1771 def locked(self):
1772 if self._locked:
1772 if self._locked:
1773 user_id, timelocked, reason = self._locked.split(':')
1773 user_id, timelocked, reason = self._locked.split(':')
1774 lock_values = int(user_id), timelocked, reason
1774 lock_values = int(user_id), timelocked, reason
1775 else:
1775 else:
1776 lock_values = [None, None, None]
1776 lock_values = [None, None, None]
1777 return lock_values
1777 return lock_values
1778
1778
1779 @locked.setter
1779 @locked.setter
1780 def locked(self, val):
1780 def locked(self, val):
1781 if val and isinstance(val, (list, tuple)):
1781 if val and isinstance(val, (list, tuple)):
1782 self._locked = ':'.join(map(str, val))
1782 self._locked = ':'.join(map(str, val))
1783 else:
1783 else:
1784 self._locked = None
1784 self._locked = None
1785
1785
1786 @classmethod
1786 @classmethod
1787 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
1787 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
1788 from rhodecode.lib.vcs.backends.base import EmptyCommit
1788 from rhodecode.lib.vcs.backends.base import EmptyCommit
1789 dummy = EmptyCommit().__json__()
1789 dummy = EmptyCommit().__json__()
1790 if not changeset_cache_raw:
1790 if not changeset_cache_raw:
1791 dummy['source_repo_id'] = repo_id
1791 dummy['source_repo_id'] = repo_id
1792 return json.loads(json.dumps(dummy))
1792 return json.loads(json.dumps(dummy))
1793
1793
1794 try:
1794 try:
1795 return json.loads(changeset_cache_raw)
1795 return json.loads(changeset_cache_raw)
1796 except TypeError:
1796 except TypeError:
1797 return dummy
1797 return dummy
1798 except Exception:
1798 except Exception:
1799 log.error(traceback.format_exc())
1799 log.error(traceback.format_exc())
1800 return dummy
1800 return dummy
1801
1801
1802 @hybrid_property
1802 @hybrid_property
1803 def changeset_cache(self):
1803 def changeset_cache(self):
1804 return self._load_changeset_cache(self.repo_id, self._changeset_cache)
1804 return self._load_changeset_cache(self.repo_id, self._changeset_cache)
1805
1805
1806 @changeset_cache.setter
1806 @changeset_cache.setter
1807 def changeset_cache(self, val):
1807 def changeset_cache(self, val):
1808 try:
1808 try:
1809 self._changeset_cache = json.dumps(val)
1809 self._changeset_cache = json.dumps(val)
1810 except Exception:
1810 except Exception:
1811 log.error(traceback.format_exc())
1811 log.error(traceback.format_exc())
1812
1812
1813 @hybrid_property
1813 @hybrid_property
1814 def repo_name(self):
1814 def repo_name(self):
1815 return self._repo_name
1815 return self._repo_name
1816
1816
1817 @repo_name.setter
1817 @repo_name.setter
1818 def repo_name(self, value):
1818 def repo_name(self, value):
1819 self._repo_name = value
1819 self._repo_name = value
1820 self.repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1820 self.repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1821
1821
1822 @classmethod
1822 @classmethod
1823 def normalize_repo_name(cls, repo_name):
1823 def normalize_repo_name(cls, repo_name):
1824 """
1824 """
1825 Normalizes os specific repo_name to the format internally stored inside
1825 Normalizes os specific repo_name to the format internally stored inside
1826 database using URL_SEP
1826 database using URL_SEP
1827
1827
1828 :param cls:
1828 :param cls:
1829 :param repo_name:
1829 :param repo_name:
1830 """
1830 """
1831 return cls.NAME_SEP.join(repo_name.split(os.sep))
1831 return cls.NAME_SEP.join(repo_name.split(os.sep))
1832
1832
1833 @classmethod
1833 @classmethod
1834 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1834 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1835 session = Session()
1835 session = Session()
1836 q = session.query(cls).filter(cls.repo_name == repo_name)
1836 q = session.query(cls).filter(cls.repo_name == repo_name)
1837
1837
1838 if cache:
1838 if cache:
1839 if identity_cache:
1839 if identity_cache:
1840 val = cls.identity_cache(session, 'repo_name', repo_name)
1840 val = cls.identity_cache(session, 'repo_name', repo_name)
1841 if val:
1841 if val:
1842 return val
1842 return val
1843 else:
1843 else:
1844 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1844 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1845 q = q.options(
1845 q = q.options(
1846 FromCache("sql_cache_short", cache_key))
1846 FromCache("sql_cache_short", cache_key))
1847
1847
1848 return q.scalar()
1848 return q.scalar()
1849
1849
1850 @classmethod
1850 @classmethod
1851 def get_by_id_or_repo_name(cls, repoid):
1851 def get_by_id_or_repo_name(cls, repoid):
1852 if isinstance(repoid, (int, long)):
1852 if isinstance(repoid, (int, long)):
1853 try:
1853 try:
1854 repo = cls.get(repoid)
1854 repo = cls.get(repoid)
1855 except ValueError:
1855 except ValueError:
1856 repo = None
1856 repo = None
1857 else:
1857 else:
1858 repo = cls.get_by_repo_name(repoid)
1858 repo = cls.get_by_repo_name(repoid)
1859 return repo
1859 return repo
1860
1860
1861 @classmethod
1861 @classmethod
1862 def get_by_full_path(cls, repo_full_path):
1862 def get_by_full_path(cls, repo_full_path):
1863 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1863 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1864 repo_name = cls.normalize_repo_name(repo_name)
1864 repo_name = cls.normalize_repo_name(repo_name)
1865 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1865 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1866
1866
1867 @classmethod
1867 @classmethod
1868 def get_repo_forks(cls, repo_id):
1868 def get_repo_forks(cls, repo_id):
1869 return cls.query().filter(Repository.fork_id == repo_id)
1869 return cls.query().filter(Repository.fork_id == repo_id)
1870
1870
1871 @classmethod
1871 @classmethod
1872 def base_path(cls):
1872 def base_path(cls):
1873 """
1873 """
1874 Returns base path when all repos are stored
1874 Returns base path when all repos are stored
1875
1875
1876 :param cls:
1876 :param cls:
1877 """
1877 """
1878 q = Session().query(RhodeCodeUi)\
1878 q = Session().query(RhodeCodeUi)\
1879 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1879 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1880 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1880 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1881 return q.one().ui_value
1881 return q.one().ui_value
1882
1882
1883 @classmethod
1883 @classmethod
1884 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1884 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1885 case_insensitive=True, archived=False):
1885 case_insensitive=True, archived=False):
1886 q = Repository.query()
1886 q = Repository.query()
1887
1887
1888 if not archived:
1888 if not archived:
1889 q = q.filter(Repository.archived.isnot(true()))
1889 q = q.filter(Repository.archived.isnot(true()))
1890
1890
1891 if not isinstance(user_id, Optional):
1891 if not isinstance(user_id, Optional):
1892 q = q.filter(Repository.user_id == user_id)
1892 q = q.filter(Repository.user_id == user_id)
1893
1893
1894 if not isinstance(group_id, Optional):
1894 if not isinstance(group_id, Optional):
1895 q = q.filter(Repository.group_id == group_id)
1895 q = q.filter(Repository.group_id == group_id)
1896
1896
1897 if case_insensitive:
1897 if case_insensitive:
1898 q = q.order_by(func.lower(Repository.repo_name))
1898 q = q.order_by(func.lower(Repository.repo_name))
1899 else:
1899 else:
1900 q = q.order_by(Repository.repo_name)
1900 q = q.order_by(Repository.repo_name)
1901
1901
1902 return q.all()
1902 return q.all()
1903
1903
1904 @property
1904 @property
1905 def repo_uid(self):
1905 def repo_uid(self):
1906 return '_{}'.format(self.repo_id)
1906 return '_{}'.format(self.repo_id)
1907
1907
1908 @property
1908 @property
1909 def forks(self):
1909 def forks(self):
1910 """
1910 """
1911 Return forks of this repo
1911 Return forks of this repo
1912 """
1912 """
1913 return Repository.get_repo_forks(self.repo_id)
1913 return Repository.get_repo_forks(self.repo_id)
1914
1914
1915 @property
1915 @property
1916 def parent(self):
1916 def parent(self):
1917 """
1917 """
1918 Returns fork parent
1918 Returns fork parent
1919 """
1919 """
1920 return self.fork
1920 return self.fork
1921
1921
1922 @property
1922 @property
1923 def just_name(self):
1923 def just_name(self):
1924 return self.repo_name.split(self.NAME_SEP)[-1]
1924 return self.repo_name.split(self.NAME_SEP)[-1]
1925
1925
1926 @property
1926 @property
1927 def groups_with_parents(self):
1927 def groups_with_parents(self):
1928 groups = []
1928 groups = []
1929 if self.group is None:
1929 if self.group is None:
1930 return groups
1930 return groups
1931
1931
1932 cur_gr = self.group
1932 cur_gr = self.group
1933 groups.insert(0, cur_gr)
1933 groups.insert(0, cur_gr)
1934 while 1:
1934 while 1:
1935 gr = getattr(cur_gr, 'parent_group', None)
1935 gr = getattr(cur_gr, 'parent_group', None)
1936 cur_gr = cur_gr.parent_group
1936 cur_gr = cur_gr.parent_group
1937 if gr is None:
1937 if gr is None:
1938 break
1938 break
1939 groups.insert(0, gr)
1939 groups.insert(0, gr)
1940
1940
1941 return groups
1941 return groups
1942
1942
1943 @property
1943 @property
1944 def groups_and_repo(self):
1944 def groups_and_repo(self):
1945 return self.groups_with_parents, self
1945 return self.groups_with_parents, self
1946
1946
1947 @LazyProperty
1947 @LazyProperty
1948 def repo_path(self):
1948 def repo_path(self):
1949 """
1949 """
1950 Returns base full path for that repository means where it actually
1950 Returns base full path for that repository means where it actually
1951 exists on a filesystem
1951 exists on a filesystem
1952 """
1952 """
1953 q = Session().query(RhodeCodeUi).filter(
1953 q = Session().query(RhodeCodeUi).filter(
1954 RhodeCodeUi.ui_key == self.NAME_SEP)
1954 RhodeCodeUi.ui_key == self.NAME_SEP)
1955 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1955 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1956 return q.one().ui_value
1956 return q.one().ui_value
1957
1957
1958 @property
1958 @property
1959 def repo_full_path(self):
1959 def repo_full_path(self):
1960 p = [self.repo_path]
1960 p = [self.repo_path]
1961 # we need to split the name by / since this is how we store the
1961 # we need to split the name by / since this is how we store the
1962 # names in the database, but that eventually needs to be converted
1962 # names in the database, but that eventually needs to be converted
1963 # into a valid system path
1963 # into a valid system path
1964 p += self.repo_name.split(self.NAME_SEP)
1964 p += self.repo_name.split(self.NAME_SEP)
1965 return os.path.join(*map(safe_unicode, p))
1965 return os.path.join(*map(safe_unicode, p))
1966
1966
1967 @property
1967 @property
1968 def cache_keys(self):
1968 def cache_keys(self):
1969 """
1969 """
1970 Returns associated cache keys for that repo
1970 Returns associated cache keys for that repo
1971 """
1971 """
1972 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
1972 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
1973 repo_id=self.repo_id)
1973 repo_id=self.repo_id)
1974 return CacheKey.query()\
1974 return CacheKey.query()\
1975 .filter(CacheKey.cache_args == invalidation_namespace)\
1975 .filter(CacheKey.cache_args == invalidation_namespace)\
1976 .order_by(CacheKey.cache_key)\
1976 .order_by(CacheKey.cache_key)\
1977 .all()
1977 .all()
1978
1978
1979 @property
1979 @property
1980 def cached_diffs_relative_dir(self):
1980 def cached_diffs_relative_dir(self):
1981 """
1981 """
1982 Return a relative to the repository store path of cached diffs
1982 Return a relative to the repository store path of cached diffs
1983 used for safe display for users, who shouldn't know the absolute store
1983 used for safe display for users, who shouldn't know the absolute store
1984 path
1984 path
1985 """
1985 """
1986 return os.path.join(
1986 return os.path.join(
1987 os.path.dirname(self.repo_name),
1987 os.path.dirname(self.repo_name),
1988 self.cached_diffs_dir.split(os.path.sep)[-1])
1988 self.cached_diffs_dir.split(os.path.sep)[-1])
1989
1989
1990 @property
1990 @property
1991 def cached_diffs_dir(self):
1991 def cached_diffs_dir(self):
1992 path = self.repo_full_path
1992 path = self.repo_full_path
1993 return os.path.join(
1993 return os.path.join(
1994 os.path.dirname(path),
1994 os.path.dirname(path),
1995 '.__shadow_diff_cache_repo_{}'.format(self.repo_id))
1995 '.__shadow_diff_cache_repo_{}'.format(self.repo_id))
1996
1996
1997 def cached_diffs(self):
1997 def cached_diffs(self):
1998 diff_cache_dir = self.cached_diffs_dir
1998 diff_cache_dir = self.cached_diffs_dir
1999 if os.path.isdir(diff_cache_dir):
1999 if os.path.isdir(diff_cache_dir):
2000 return os.listdir(diff_cache_dir)
2000 return os.listdir(diff_cache_dir)
2001 return []
2001 return []
2002
2002
2003 def shadow_repos(self):
2003 def shadow_repos(self):
2004 shadow_repos_pattern = '.__shadow_repo_{}'.format(self.repo_id)
2004 shadow_repos_pattern = '.__shadow_repo_{}'.format(self.repo_id)
2005 return [
2005 return [
2006 x for x in os.listdir(os.path.dirname(self.repo_full_path))
2006 x for x in os.listdir(os.path.dirname(self.repo_full_path))
2007 if x.startswith(shadow_repos_pattern)]
2007 if x.startswith(shadow_repos_pattern)]
2008
2008
2009 def get_new_name(self, repo_name):
2009 def get_new_name(self, repo_name):
2010 """
2010 """
2011 returns new full repository name based on assigned group and new new
2011 returns new full repository name based on assigned group and new new
2012
2012
2013 :param group_name:
2013 :param group_name:
2014 """
2014 """
2015 path_prefix = self.group.full_path_splitted if self.group else []
2015 path_prefix = self.group.full_path_splitted if self.group else []
2016 return self.NAME_SEP.join(path_prefix + [repo_name])
2016 return self.NAME_SEP.join(path_prefix + [repo_name])
2017
2017
2018 @property
2018 @property
2019 def _config(self):
2019 def _config(self):
2020 """
2020 """
2021 Returns db based config object.
2021 Returns db based config object.
2022 """
2022 """
2023 from rhodecode.lib.utils import make_db_config
2023 from rhodecode.lib.utils import make_db_config
2024 return make_db_config(clear_session=False, repo=self)
2024 return make_db_config(clear_session=False, repo=self)
2025
2025
2026 def permissions(self, with_admins=True, with_owner=True,
2026 def permissions(self, with_admins=True, with_owner=True,
2027 expand_from_user_groups=False):
2027 expand_from_user_groups=False):
2028 """
2028 """
2029 Permissions for repositories
2029 Permissions for repositories
2030 """
2030 """
2031 _admin_perm = 'repository.admin'
2031 _admin_perm = 'repository.admin'
2032
2032
2033 owner_row = []
2033 owner_row = []
2034 if with_owner:
2034 if with_owner:
2035 usr = AttributeDict(self.user.get_dict())
2035 usr = AttributeDict(self.user.get_dict())
2036 usr.owner_row = True
2036 usr.owner_row = True
2037 usr.permission = _admin_perm
2037 usr.permission = _admin_perm
2038 usr.permission_id = None
2038 usr.permission_id = None
2039 owner_row.append(usr)
2039 owner_row.append(usr)
2040
2040
2041 super_admin_ids = []
2041 super_admin_ids = []
2042 super_admin_rows = []
2042 super_admin_rows = []
2043 if with_admins:
2043 if with_admins:
2044 for usr in User.get_all_super_admins():
2044 for usr in User.get_all_super_admins():
2045 super_admin_ids.append(usr.user_id)
2045 super_admin_ids.append(usr.user_id)
2046 # if this admin is also owner, don't double the record
2046 # if this admin is also owner, don't double the record
2047 if usr.user_id == owner_row[0].user_id:
2047 if usr.user_id == owner_row[0].user_id:
2048 owner_row[0].admin_row = True
2048 owner_row[0].admin_row = True
2049 else:
2049 else:
2050 usr = AttributeDict(usr.get_dict())
2050 usr = AttributeDict(usr.get_dict())
2051 usr.admin_row = True
2051 usr.admin_row = True
2052 usr.permission = _admin_perm
2052 usr.permission = _admin_perm
2053 usr.permission_id = None
2053 usr.permission_id = None
2054 super_admin_rows.append(usr)
2054 super_admin_rows.append(usr)
2055
2055
2056 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
2056 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
2057 q = q.options(joinedload(UserRepoToPerm.repository),
2057 q = q.options(joinedload(UserRepoToPerm.repository),
2058 joinedload(UserRepoToPerm.user),
2058 joinedload(UserRepoToPerm.user),
2059 joinedload(UserRepoToPerm.permission),)
2059 joinedload(UserRepoToPerm.permission),)
2060
2060
2061 # get owners and admins and permissions. We do a trick of re-writing
2061 # get owners and admins and permissions. We do a trick of re-writing
2062 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2062 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2063 # has a global reference and changing one object propagates to all
2063 # has a global reference and changing one object propagates to all
2064 # others. This means if admin is also an owner admin_row that change
2064 # others. This means if admin is also an owner admin_row that change
2065 # would propagate to both objects
2065 # would propagate to both objects
2066 perm_rows = []
2066 perm_rows = []
2067 for _usr in q.all():
2067 for _usr in q.all():
2068 usr = AttributeDict(_usr.user.get_dict())
2068 usr = AttributeDict(_usr.user.get_dict())
2069 # if this user is also owner/admin, mark as duplicate record
2069 # if this user is also owner/admin, mark as duplicate record
2070 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
2070 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
2071 usr.duplicate_perm = True
2071 usr.duplicate_perm = True
2072 # also check if this permission is maybe used by branch_permissions
2072 # also check if this permission is maybe used by branch_permissions
2073 if _usr.branch_perm_entry:
2073 if _usr.branch_perm_entry:
2074 usr.branch_rules = [x.branch_rule_id for x in _usr.branch_perm_entry]
2074 usr.branch_rules = [x.branch_rule_id for x in _usr.branch_perm_entry]
2075
2075
2076 usr.permission = _usr.permission.permission_name
2076 usr.permission = _usr.permission.permission_name
2077 usr.permission_id = _usr.repo_to_perm_id
2077 usr.permission_id = _usr.repo_to_perm_id
2078 perm_rows.append(usr)
2078 perm_rows.append(usr)
2079
2079
2080 # filter the perm rows by 'default' first and then sort them by
2080 # filter the perm rows by 'default' first and then sort them by
2081 # admin,write,read,none permissions sorted again alphabetically in
2081 # admin,write,read,none permissions sorted again alphabetically in
2082 # each group
2082 # each group
2083 perm_rows = sorted(perm_rows, key=display_user_sort)
2083 perm_rows = sorted(perm_rows, key=display_user_sort)
2084
2084
2085 user_groups_rows = []
2085 user_groups_rows = []
2086 if expand_from_user_groups:
2086 if expand_from_user_groups:
2087 for ug in self.permission_user_groups(with_members=True):
2087 for ug in self.permission_user_groups(with_members=True):
2088 for user_data in ug.members:
2088 for user_data in ug.members:
2089 user_groups_rows.append(user_data)
2089 user_groups_rows.append(user_data)
2090
2090
2091 return super_admin_rows + owner_row + perm_rows + user_groups_rows
2091 return super_admin_rows + owner_row + perm_rows + user_groups_rows
2092
2092
2093 def permission_user_groups(self, with_members=True):
2093 def permission_user_groups(self, with_members=True):
2094 q = UserGroupRepoToPerm.query()\
2094 q = UserGroupRepoToPerm.query()\
2095 .filter(UserGroupRepoToPerm.repository == self)
2095 .filter(UserGroupRepoToPerm.repository == self)
2096 q = q.options(joinedload(UserGroupRepoToPerm.repository),
2096 q = q.options(joinedload(UserGroupRepoToPerm.repository),
2097 joinedload(UserGroupRepoToPerm.users_group),
2097 joinedload(UserGroupRepoToPerm.users_group),
2098 joinedload(UserGroupRepoToPerm.permission),)
2098 joinedload(UserGroupRepoToPerm.permission),)
2099
2099
2100 perm_rows = []
2100 perm_rows = []
2101 for _user_group in q.all():
2101 for _user_group in q.all():
2102 entry = AttributeDict(_user_group.users_group.get_dict())
2102 entry = AttributeDict(_user_group.users_group.get_dict())
2103 entry.permission = _user_group.permission.permission_name
2103 entry.permission = _user_group.permission.permission_name
2104 if with_members:
2104 if with_members:
2105 entry.members = [x.user.get_dict()
2105 entry.members = [x.user.get_dict()
2106 for x in _user_group.users_group.members]
2106 for x in _user_group.users_group.members]
2107 perm_rows.append(entry)
2107 perm_rows.append(entry)
2108
2108
2109 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2109 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2110 return perm_rows
2110 return perm_rows
2111
2111
2112 def get_api_data(self, include_secrets=False):
2112 def get_api_data(self, include_secrets=False):
2113 """
2113 """
2114 Common function for generating repo api data
2114 Common function for generating repo api data
2115
2115
2116 :param include_secrets: See :meth:`User.get_api_data`.
2116 :param include_secrets: See :meth:`User.get_api_data`.
2117
2117
2118 """
2118 """
2119 # TODO: mikhail: Here there is an anti-pattern, we probably need to
2119 # TODO: mikhail: Here there is an anti-pattern, we probably need to
2120 # move this methods on models level.
2120 # move this methods on models level.
2121 from rhodecode.model.settings import SettingsModel
2121 from rhodecode.model.settings import SettingsModel
2122 from rhodecode.model.repo import RepoModel
2122 from rhodecode.model.repo import RepoModel
2123
2123
2124 repo = self
2124 repo = self
2125 _user_id, _time, _reason = self.locked
2125 _user_id, _time, _reason = self.locked
2126
2126
2127 data = {
2127 data = {
2128 'repo_id': repo.repo_id,
2128 'repo_id': repo.repo_id,
2129 'repo_name': repo.repo_name,
2129 'repo_name': repo.repo_name,
2130 'repo_type': repo.repo_type,
2130 'repo_type': repo.repo_type,
2131 'clone_uri': repo.clone_uri or '',
2131 'clone_uri': repo.clone_uri or '',
2132 'push_uri': repo.push_uri or '',
2132 'push_uri': repo.push_uri or '',
2133 'url': RepoModel().get_url(self),
2133 'url': RepoModel().get_url(self),
2134 'private': repo.private,
2134 'private': repo.private,
2135 'created_on': repo.created_on,
2135 'created_on': repo.created_on,
2136 'description': repo.description_safe,
2136 'description': repo.description_safe,
2137 'landing_rev': repo.landing_rev,
2137 'landing_rev': repo.landing_rev,
2138 'owner': repo.user.username,
2138 'owner': repo.user.username,
2139 'fork_of': repo.fork.repo_name if repo.fork else None,
2139 'fork_of': repo.fork.repo_name if repo.fork else None,
2140 'fork_of_id': repo.fork.repo_id if repo.fork else None,
2140 'fork_of_id': repo.fork.repo_id if repo.fork else None,
2141 'enable_statistics': repo.enable_statistics,
2141 'enable_statistics': repo.enable_statistics,
2142 'enable_locking': repo.enable_locking,
2142 'enable_locking': repo.enable_locking,
2143 'enable_downloads': repo.enable_downloads,
2143 'enable_downloads': repo.enable_downloads,
2144 'last_changeset': repo.changeset_cache,
2144 'last_changeset': repo.changeset_cache,
2145 'locked_by': User.get(_user_id).get_api_data(
2145 'locked_by': User.get(_user_id).get_api_data(
2146 include_secrets=include_secrets) if _user_id else None,
2146 include_secrets=include_secrets) if _user_id else None,
2147 'locked_date': time_to_datetime(_time) if _time else None,
2147 'locked_date': time_to_datetime(_time) if _time else None,
2148 'lock_reason': _reason if _reason else None,
2148 'lock_reason': _reason if _reason else None,
2149 }
2149 }
2150
2150
2151 # TODO: mikhail: should be per-repo settings here
2151 # TODO: mikhail: should be per-repo settings here
2152 rc_config = SettingsModel().get_all_settings()
2152 rc_config = SettingsModel().get_all_settings()
2153 repository_fields = str2bool(
2153 repository_fields = str2bool(
2154 rc_config.get('rhodecode_repository_fields'))
2154 rc_config.get('rhodecode_repository_fields'))
2155 if repository_fields:
2155 if repository_fields:
2156 for f in self.extra_fields:
2156 for f in self.extra_fields:
2157 data[f.field_key_prefixed] = f.field_value
2157 data[f.field_key_prefixed] = f.field_value
2158
2158
2159 return data
2159 return data
2160
2160
2161 @classmethod
2161 @classmethod
2162 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2162 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2163 if not lock_time:
2163 if not lock_time:
2164 lock_time = time.time()
2164 lock_time = time.time()
2165 if not lock_reason:
2165 if not lock_reason:
2166 lock_reason = cls.LOCK_AUTOMATIC
2166 lock_reason = cls.LOCK_AUTOMATIC
2167 repo.locked = [user_id, lock_time, lock_reason]
2167 repo.locked = [user_id, lock_time, lock_reason]
2168 Session().add(repo)
2168 Session().add(repo)
2169 Session().commit()
2169 Session().commit()
2170
2170
2171 @classmethod
2171 @classmethod
2172 def unlock(cls, repo):
2172 def unlock(cls, repo):
2173 repo.locked = None
2173 repo.locked = None
2174 Session().add(repo)
2174 Session().add(repo)
2175 Session().commit()
2175 Session().commit()
2176
2176
2177 @classmethod
2177 @classmethod
2178 def getlock(cls, repo):
2178 def getlock(cls, repo):
2179 return repo.locked
2179 return repo.locked
2180
2180
2181 def is_user_lock(self, user_id):
2181 def is_user_lock(self, user_id):
2182 if self.lock[0]:
2182 if self.lock[0]:
2183 lock_user_id = safe_int(self.lock[0])
2183 lock_user_id = safe_int(self.lock[0])
2184 user_id = safe_int(user_id)
2184 user_id = safe_int(user_id)
2185 # both are ints, and they are equal
2185 # both are ints, and they are equal
2186 return all([lock_user_id, user_id]) and lock_user_id == user_id
2186 return all([lock_user_id, user_id]) and lock_user_id == user_id
2187
2187
2188 return False
2188 return False
2189
2189
2190 def get_locking_state(self, action, user_id, only_when_enabled=True):
2190 def get_locking_state(self, action, user_id, only_when_enabled=True):
2191 """
2191 """
2192 Checks locking on this repository, if locking is enabled and lock is
2192 Checks locking on this repository, if locking is enabled and lock is
2193 present returns a tuple of make_lock, locked, locked_by.
2193 present returns a tuple of make_lock, locked, locked_by.
2194 make_lock can have 3 states None (do nothing) True, make lock
2194 make_lock can have 3 states None (do nothing) True, make lock
2195 False release lock, This value is later propagated to hooks, which
2195 False release lock, This value is later propagated to hooks, which
2196 do the locking. Think about this as signals passed to hooks what to do.
2196 do the locking. Think about this as signals passed to hooks what to do.
2197
2197
2198 """
2198 """
2199 # TODO: johbo: This is part of the business logic and should be moved
2199 # TODO: johbo: This is part of the business logic and should be moved
2200 # into the RepositoryModel.
2200 # into the RepositoryModel.
2201
2201
2202 if action not in ('push', 'pull'):
2202 if action not in ('push', 'pull'):
2203 raise ValueError("Invalid action value: %s" % repr(action))
2203 raise ValueError("Invalid action value: %s" % repr(action))
2204
2204
2205 # defines if locked error should be thrown to user
2205 # defines if locked error should be thrown to user
2206 currently_locked = False
2206 currently_locked = False
2207 # defines if new lock should be made, tri-state
2207 # defines if new lock should be made, tri-state
2208 make_lock = None
2208 make_lock = None
2209 repo = self
2209 repo = self
2210 user = User.get(user_id)
2210 user = User.get(user_id)
2211
2211
2212 lock_info = repo.locked
2212 lock_info = repo.locked
2213
2213
2214 if repo and (repo.enable_locking or not only_when_enabled):
2214 if repo and (repo.enable_locking or not only_when_enabled):
2215 if action == 'push':
2215 if action == 'push':
2216 # check if it's already locked !, if it is compare users
2216 # check if it's already locked !, if it is compare users
2217 locked_by_user_id = lock_info[0]
2217 locked_by_user_id = lock_info[0]
2218 if user.user_id == locked_by_user_id:
2218 if user.user_id == locked_by_user_id:
2219 log.debug(
2219 log.debug(
2220 'Got `push` action from user %s, now unlocking', user)
2220 'Got `push` action from user %s, now unlocking', user)
2221 # unlock if we have push from user who locked
2221 # unlock if we have push from user who locked
2222 make_lock = False
2222 make_lock = False
2223 else:
2223 else:
2224 # we're not the same user who locked, ban with
2224 # we're not the same user who locked, ban with
2225 # code defined in settings (default is 423 HTTP Locked) !
2225 # code defined in settings (default is 423 HTTP Locked) !
2226 log.debug('Repo %s is currently locked by %s', repo, user)
2226 log.debug('Repo %s is currently locked by %s', repo, user)
2227 currently_locked = True
2227 currently_locked = True
2228 elif action == 'pull':
2228 elif action == 'pull':
2229 # [0] user [1] date
2229 # [0] user [1] date
2230 if lock_info[0] and lock_info[1]:
2230 if lock_info[0] and lock_info[1]:
2231 log.debug('Repo %s is currently locked by %s', repo, user)
2231 log.debug('Repo %s is currently locked by %s', repo, user)
2232 currently_locked = True
2232 currently_locked = True
2233 else:
2233 else:
2234 log.debug('Setting lock on repo %s by %s', repo, user)
2234 log.debug('Setting lock on repo %s by %s', repo, user)
2235 make_lock = True
2235 make_lock = True
2236
2236
2237 else:
2237 else:
2238 log.debug('Repository %s do not have locking enabled', repo)
2238 log.debug('Repository %s do not have locking enabled', repo)
2239
2239
2240 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2240 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2241 make_lock, currently_locked, lock_info)
2241 make_lock, currently_locked, lock_info)
2242
2242
2243 from rhodecode.lib.auth import HasRepoPermissionAny
2243 from rhodecode.lib.auth import HasRepoPermissionAny
2244 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2244 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2245 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2245 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2246 # if we don't have at least write permission we cannot make a lock
2246 # if we don't have at least write permission we cannot make a lock
2247 log.debug('lock state reset back to FALSE due to lack '
2247 log.debug('lock state reset back to FALSE due to lack '
2248 'of at least read permission')
2248 'of at least read permission')
2249 make_lock = False
2249 make_lock = False
2250
2250
2251 return make_lock, currently_locked, lock_info
2251 return make_lock, currently_locked, lock_info
2252
2252
2253 @property
2253 @property
2254 def last_commit_cache_update_diff(self):
2254 def last_commit_cache_update_diff(self):
2255 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2255 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2256
2256
2257 @classmethod
2257 @classmethod
2258 def _load_commit_change(cls, last_commit_cache):
2258 def _load_commit_change(cls, last_commit_cache):
2259 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2259 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2260 empty_date = datetime.datetime.fromtimestamp(0)
2260 empty_date = datetime.datetime.fromtimestamp(0)
2261 date_latest = last_commit_cache.get('date', empty_date)
2261 date_latest = last_commit_cache.get('date', empty_date)
2262 try:
2262 try:
2263 return parse_datetime(date_latest)
2263 return parse_datetime(date_latest)
2264 except Exception:
2264 except Exception:
2265 return empty_date
2265 return empty_date
2266
2266
2267 @property
2267 @property
2268 def last_commit_change(self):
2268 def last_commit_change(self):
2269 return self._load_commit_change(self.changeset_cache)
2269 return self._load_commit_change(self.changeset_cache)
2270
2270
2271 @property
2271 @property
2272 def last_db_change(self):
2272 def last_db_change(self):
2273 return self.updated_on
2273 return self.updated_on
2274
2274
2275 @property
2275 @property
2276 def clone_uri_hidden(self):
2276 def clone_uri_hidden(self):
2277 clone_uri = self.clone_uri
2277 clone_uri = self.clone_uri
2278 if clone_uri:
2278 if clone_uri:
2279 import urlobject
2279 import urlobject
2280 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2280 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2281 if url_obj.password:
2281 if url_obj.password:
2282 clone_uri = url_obj.with_password('*****')
2282 clone_uri = url_obj.with_password('*****')
2283 return clone_uri
2283 return clone_uri
2284
2284
2285 @property
2285 @property
2286 def push_uri_hidden(self):
2286 def push_uri_hidden(self):
2287 push_uri = self.push_uri
2287 push_uri = self.push_uri
2288 if push_uri:
2288 if push_uri:
2289 import urlobject
2289 import urlobject
2290 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2290 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2291 if url_obj.password:
2291 if url_obj.password:
2292 push_uri = url_obj.with_password('*****')
2292 push_uri = url_obj.with_password('*****')
2293 return push_uri
2293 return push_uri
2294
2294
2295 def clone_url(self, **override):
2295 def clone_url(self, **override):
2296 from rhodecode.model.settings import SettingsModel
2296 from rhodecode.model.settings import SettingsModel
2297
2297
2298 uri_tmpl = None
2298 uri_tmpl = None
2299 if 'with_id' in override:
2299 if 'with_id' in override:
2300 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2300 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2301 del override['with_id']
2301 del override['with_id']
2302
2302
2303 if 'uri_tmpl' in override:
2303 if 'uri_tmpl' in override:
2304 uri_tmpl = override['uri_tmpl']
2304 uri_tmpl = override['uri_tmpl']
2305 del override['uri_tmpl']
2305 del override['uri_tmpl']
2306
2306
2307 ssh = False
2307 ssh = False
2308 if 'ssh' in override:
2308 if 'ssh' in override:
2309 ssh = True
2309 ssh = True
2310 del override['ssh']
2310 del override['ssh']
2311
2311
2312 # we didn't override our tmpl from **overrides
2312 # we didn't override our tmpl from **overrides
2313 request = get_current_request()
2313 request = get_current_request()
2314 if not uri_tmpl:
2314 if not uri_tmpl:
2315 if hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'):
2315 if hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'):
2316 rc_config = request.call_context.rc_config
2316 rc_config = request.call_context.rc_config
2317 else:
2317 else:
2318 rc_config = SettingsModel().get_all_settings(cache=True)
2318 rc_config = SettingsModel().get_all_settings(cache=True)
2319
2319
2320 if ssh:
2320 if ssh:
2321 uri_tmpl = rc_config.get(
2321 uri_tmpl = rc_config.get(
2322 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2322 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2323
2323
2324 else:
2324 else:
2325 uri_tmpl = rc_config.get(
2325 uri_tmpl = rc_config.get(
2326 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2326 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2327
2327
2328 return get_clone_url(request=request,
2328 return get_clone_url(request=request,
2329 uri_tmpl=uri_tmpl,
2329 uri_tmpl=uri_tmpl,
2330 repo_name=self.repo_name,
2330 repo_name=self.repo_name,
2331 repo_id=self.repo_id,
2331 repo_id=self.repo_id,
2332 repo_type=self.repo_type,
2332 repo_type=self.repo_type,
2333 **override)
2333 **override)
2334
2334
2335 def set_state(self, state):
2335 def set_state(self, state):
2336 self.repo_state = state
2336 self.repo_state = state
2337 Session().add(self)
2337 Session().add(self)
2338 #==========================================================================
2338 #==========================================================================
2339 # SCM PROPERTIES
2339 # SCM PROPERTIES
2340 #==========================================================================
2340 #==========================================================================
2341
2341
2342 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
2342 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, maybe_unreachable=False):
2343 return get_commit_safe(
2343 return get_commit_safe(
2344 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load)
2344 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load,
2345 maybe_unreachable=maybe_unreachable)
2345
2346
2346 def get_changeset(self, rev=None, pre_load=None):
2347 def get_changeset(self, rev=None, pre_load=None):
2347 warnings.warn("Use get_commit", DeprecationWarning)
2348 warnings.warn("Use get_commit", DeprecationWarning)
2348 commit_id = None
2349 commit_id = None
2349 commit_idx = None
2350 commit_idx = None
2350 if isinstance(rev, compat.string_types):
2351 if isinstance(rev, compat.string_types):
2351 commit_id = rev
2352 commit_id = rev
2352 else:
2353 else:
2353 commit_idx = rev
2354 commit_idx = rev
2354 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2355 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2355 pre_load=pre_load)
2356 pre_load=pre_load)
2356
2357
2357 def get_landing_commit(self):
2358 def get_landing_commit(self):
2358 """
2359 """
2359 Returns landing commit, or if that doesn't exist returns the tip
2360 Returns landing commit, or if that doesn't exist returns the tip
2360 """
2361 """
2361 _rev_type, _rev = self.landing_rev
2362 _rev_type, _rev = self.landing_rev
2362 commit = self.get_commit(_rev)
2363 commit = self.get_commit(_rev)
2363 if isinstance(commit, EmptyCommit):
2364 if isinstance(commit, EmptyCommit):
2364 return self.get_commit()
2365 return self.get_commit()
2365 return commit
2366 return commit
2366
2367
2367 def flush_commit_cache(self):
2368 def flush_commit_cache(self):
2368 self.update_commit_cache(cs_cache={'raw_id':'0'})
2369 self.update_commit_cache(cs_cache={'raw_id':'0'})
2369 self.update_commit_cache()
2370 self.update_commit_cache()
2370
2371
2371 def update_commit_cache(self, cs_cache=None, config=None):
2372 def update_commit_cache(self, cs_cache=None, config=None):
2372 """
2373 """
2373 Update cache of last commit for repository
2374 Update cache of last commit for repository
2374 cache_keys should be::
2375 cache_keys should be::
2375
2376
2376 source_repo_id
2377 source_repo_id
2377 short_id
2378 short_id
2378 raw_id
2379 raw_id
2379 revision
2380 revision
2380 parents
2381 parents
2381 message
2382 message
2382 date
2383 date
2383 author
2384 author
2384 updated_on
2385 updated_on
2385
2386
2386 """
2387 """
2387 from rhodecode.lib.vcs.backends.base import BaseChangeset
2388 from rhodecode.lib.vcs.backends.base import BaseChangeset
2388 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2389 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2389 empty_date = datetime.datetime.fromtimestamp(0)
2390 empty_date = datetime.datetime.fromtimestamp(0)
2390
2391
2391 if cs_cache is None:
2392 if cs_cache is None:
2392 # use no-cache version here
2393 # use no-cache version here
2393 try:
2394 try:
2394 scm_repo = self.scm_instance(cache=False, config=config)
2395 scm_repo = self.scm_instance(cache=False, config=config)
2395 except VCSError:
2396 except VCSError:
2396 scm_repo = None
2397 scm_repo = None
2397 empty = scm_repo is None or scm_repo.is_empty()
2398 empty = scm_repo is None or scm_repo.is_empty()
2398
2399
2399 if not empty:
2400 if not empty:
2400 cs_cache = scm_repo.get_commit(
2401 cs_cache = scm_repo.get_commit(
2401 pre_load=["author", "date", "message", "parents", "branch"])
2402 pre_load=["author", "date", "message", "parents", "branch"])
2402 else:
2403 else:
2403 cs_cache = EmptyCommit()
2404 cs_cache = EmptyCommit()
2404
2405
2405 if isinstance(cs_cache, BaseChangeset):
2406 if isinstance(cs_cache, BaseChangeset):
2406 cs_cache = cs_cache.__json__()
2407 cs_cache = cs_cache.__json__()
2407
2408
2408 def is_outdated(new_cs_cache):
2409 def is_outdated(new_cs_cache):
2409 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2410 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2410 new_cs_cache['revision'] != self.changeset_cache['revision']):
2411 new_cs_cache['revision'] != self.changeset_cache['revision']):
2411 return True
2412 return True
2412 return False
2413 return False
2413
2414
2414 # check if we have maybe already latest cached revision
2415 # check if we have maybe already latest cached revision
2415 if is_outdated(cs_cache) or not self.changeset_cache:
2416 if is_outdated(cs_cache) or not self.changeset_cache:
2416 _current_datetime = datetime.datetime.utcnow()
2417 _current_datetime = datetime.datetime.utcnow()
2417 last_change = cs_cache.get('date') or _current_datetime
2418 last_change = cs_cache.get('date') or _current_datetime
2418 # we check if last update is newer than the new value
2419 # we check if last update is newer than the new value
2419 # if yes, we use the current timestamp instead. Imagine you get
2420 # if yes, we use the current timestamp instead. Imagine you get
2420 # old commit pushed 1y ago, we'd set last update 1y to ago.
2421 # old commit pushed 1y ago, we'd set last update 1y to ago.
2421 last_change_timestamp = datetime_to_time(last_change)
2422 last_change_timestamp = datetime_to_time(last_change)
2422 current_timestamp = datetime_to_time(last_change)
2423 current_timestamp = datetime_to_time(last_change)
2423 if last_change_timestamp > current_timestamp and not empty:
2424 if last_change_timestamp > current_timestamp and not empty:
2424 cs_cache['date'] = _current_datetime
2425 cs_cache['date'] = _current_datetime
2425
2426
2426 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2427 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2427 cs_cache['updated_on'] = time.time()
2428 cs_cache['updated_on'] = time.time()
2428 self.changeset_cache = cs_cache
2429 self.changeset_cache = cs_cache
2429 self.updated_on = last_change
2430 self.updated_on = last_change
2430 Session().add(self)
2431 Session().add(self)
2431 Session().commit()
2432 Session().commit()
2432
2433
2433 else:
2434 else:
2434 if empty:
2435 if empty:
2435 cs_cache = EmptyCommit().__json__()
2436 cs_cache = EmptyCommit().__json__()
2436 else:
2437 else:
2437 cs_cache = self.changeset_cache
2438 cs_cache = self.changeset_cache
2438
2439
2439 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2440 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2440
2441
2441 cs_cache['updated_on'] = time.time()
2442 cs_cache['updated_on'] = time.time()
2442 self.changeset_cache = cs_cache
2443 self.changeset_cache = cs_cache
2443 self.updated_on = _date_latest
2444 self.updated_on = _date_latest
2444 Session().add(self)
2445 Session().add(self)
2445 Session().commit()
2446 Session().commit()
2446
2447
2447 log.debug('updated repo `%s` with new commit cache %s, and last update_date: %s',
2448 log.debug('updated repo `%s` with new commit cache %s, and last update_date: %s',
2448 self.repo_name, cs_cache, _date_latest)
2449 self.repo_name, cs_cache, _date_latest)
2449
2450
2450 @property
2451 @property
2451 def tip(self):
2452 def tip(self):
2452 return self.get_commit('tip')
2453 return self.get_commit('tip')
2453
2454
2454 @property
2455 @property
2455 def author(self):
2456 def author(self):
2456 return self.tip.author
2457 return self.tip.author
2457
2458
2458 @property
2459 @property
2459 def last_change(self):
2460 def last_change(self):
2460 return self.scm_instance().last_change
2461 return self.scm_instance().last_change
2461
2462
2462 def get_comments(self, revisions=None):
2463 def get_comments(self, revisions=None):
2463 """
2464 """
2464 Returns comments for this repository grouped by revisions
2465 Returns comments for this repository grouped by revisions
2465
2466
2466 :param revisions: filter query by revisions only
2467 :param revisions: filter query by revisions only
2467 """
2468 """
2468 cmts = ChangesetComment.query()\
2469 cmts = ChangesetComment.query()\
2469 .filter(ChangesetComment.repo == self)
2470 .filter(ChangesetComment.repo == self)
2470 if revisions:
2471 if revisions:
2471 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2472 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2472 grouped = collections.defaultdict(list)
2473 grouped = collections.defaultdict(list)
2473 for cmt in cmts.all():
2474 for cmt in cmts.all():
2474 grouped[cmt.revision].append(cmt)
2475 grouped[cmt.revision].append(cmt)
2475 return grouped
2476 return grouped
2476
2477
2477 def statuses(self, revisions=None):
2478 def statuses(self, revisions=None):
2478 """
2479 """
2479 Returns statuses for this repository
2480 Returns statuses for this repository
2480
2481
2481 :param revisions: list of revisions to get statuses for
2482 :param revisions: list of revisions to get statuses for
2482 """
2483 """
2483 statuses = ChangesetStatus.query()\
2484 statuses = ChangesetStatus.query()\
2484 .filter(ChangesetStatus.repo == self)\
2485 .filter(ChangesetStatus.repo == self)\
2485 .filter(ChangesetStatus.version == 0)
2486 .filter(ChangesetStatus.version == 0)
2486
2487
2487 if revisions:
2488 if revisions:
2488 # Try doing the filtering in chunks to avoid hitting limits
2489 # Try doing the filtering in chunks to avoid hitting limits
2489 size = 500
2490 size = 500
2490 status_results = []
2491 status_results = []
2491 for chunk in xrange(0, len(revisions), size):
2492 for chunk in xrange(0, len(revisions), size):
2492 status_results += statuses.filter(
2493 status_results += statuses.filter(
2493 ChangesetStatus.revision.in_(
2494 ChangesetStatus.revision.in_(
2494 revisions[chunk: chunk+size])
2495 revisions[chunk: chunk+size])
2495 ).all()
2496 ).all()
2496 else:
2497 else:
2497 status_results = statuses.all()
2498 status_results = statuses.all()
2498
2499
2499 grouped = {}
2500 grouped = {}
2500
2501
2501 # maybe we have open new pullrequest without a status?
2502 # maybe we have open new pullrequest without a status?
2502 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2503 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2503 status_lbl = ChangesetStatus.get_status_lbl(stat)
2504 status_lbl = ChangesetStatus.get_status_lbl(stat)
2504 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2505 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2505 for rev in pr.revisions:
2506 for rev in pr.revisions:
2506 pr_id = pr.pull_request_id
2507 pr_id = pr.pull_request_id
2507 pr_repo = pr.target_repo.repo_name
2508 pr_repo = pr.target_repo.repo_name
2508 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2509 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2509
2510
2510 for stat in status_results:
2511 for stat in status_results:
2511 pr_id = pr_repo = None
2512 pr_id = pr_repo = None
2512 if stat.pull_request:
2513 if stat.pull_request:
2513 pr_id = stat.pull_request.pull_request_id
2514 pr_id = stat.pull_request.pull_request_id
2514 pr_repo = stat.pull_request.target_repo.repo_name
2515 pr_repo = stat.pull_request.target_repo.repo_name
2515 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2516 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2516 pr_id, pr_repo]
2517 pr_id, pr_repo]
2517 return grouped
2518 return grouped
2518
2519
2519 # ==========================================================================
2520 # ==========================================================================
2520 # SCM CACHE INSTANCE
2521 # SCM CACHE INSTANCE
2521 # ==========================================================================
2522 # ==========================================================================
2522
2523
2523 def scm_instance(self, **kwargs):
2524 def scm_instance(self, **kwargs):
2524 import rhodecode
2525 import rhodecode
2525
2526
2526 # Passing a config will not hit the cache currently only used
2527 # Passing a config will not hit the cache currently only used
2527 # for repo2dbmapper
2528 # for repo2dbmapper
2528 config = kwargs.pop('config', None)
2529 config = kwargs.pop('config', None)
2529 cache = kwargs.pop('cache', None)
2530 cache = kwargs.pop('cache', None)
2530 vcs_full_cache = kwargs.pop('vcs_full_cache', None)
2531 vcs_full_cache = kwargs.pop('vcs_full_cache', None)
2531 if vcs_full_cache is not None:
2532 if vcs_full_cache is not None:
2532 # allows override global config
2533 # allows override global config
2533 full_cache = vcs_full_cache
2534 full_cache = vcs_full_cache
2534 else:
2535 else:
2535 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2536 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2536 # if cache is NOT defined use default global, else we have a full
2537 # if cache is NOT defined use default global, else we have a full
2537 # control over cache behaviour
2538 # control over cache behaviour
2538 if cache is None and full_cache and not config:
2539 if cache is None and full_cache and not config:
2539 log.debug('Initializing pure cached instance for %s', self.repo_path)
2540 log.debug('Initializing pure cached instance for %s', self.repo_path)
2540 return self._get_instance_cached()
2541 return self._get_instance_cached()
2541
2542
2542 # cache here is sent to the "vcs server"
2543 # cache here is sent to the "vcs server"
2543 return self._get_instance(cache=bool(cache), config=config)
2544 return self._get_instance(cache=bool(cache), config=config)
2544
2545
2545 def _get_instance_cached(self):
2546 def _get_instance_cached(self):
2546 from rhodecode.lib import rc_cache
2547 from rhodecode.lib import rc_cache
2547
2548
2548 cache_namespace_uid = 'cache_repo_instance.{}'.format(self.repo_id)
2549 cache_namespace_uid = 'cache_repo_instance.{}'.format(self.repo_id)
2549 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2550 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2550 repo_id=self.repo_id)
2551 repo_id=self.repo_id)
2551 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
2552 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
2552
2553
2553 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
2554 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
2554 def get_instance_cached(repo_id, context_id, _cache_state_uid):
2555 def get_instance_cached(repo_id, context_id, _cache_state_uid):
2555 return self._get_instance(repo_state_uid=_cache_state_uid)
2556 return self._get_instance(repo_state_uid=_cache_state_uid)
2556
2557
2557 # we must use thread scoped cache here,
2558 # we must use thread scoped cache here,
2558 # because each thread of gevent needs it's own not shared connection and cache
2559 # because each thread of gevent needs it's own not shared connection and cache
2559 # we also alter `args` so the cache key is individual for every green thread.
2560 # we also alter `args` so the cache key is individual for every green thread.
2560 inv_context_manager = rc_cache.InvalidationContext(
2561 inv_context_manager = rc_cache.InvalidationContext(
2561 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace,
2562 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace,
2562 thread_scoped=True)
2563 thread_scoped=True)
2563 with inv_context_manager as invalidation_context:
2564 with inv_context_manager as invalidation_context:
2564 cache_state_uid = invalidation_context.cache_data['cache_state_uid']
2565 cache_state_uid = invalidation_context.cache_data['cache_state_uid']
2565 args = (self.repo_id, inv_context_manager.cache_key, cache_state_uid)
2566 args = (self.repo_id, inv_context_manager.cache_key, cache_state_uid)
2566
2567
2567 # re-compute and store cache if we get invalidate signal
2568 # re-compute and store cache if we get invalidate signal
2568 if invalidation_context.should_invalidate():
2569 if invalidation_context.should_invalidate():
2569 instance = get_instance_cached.refresh(*args)
2570 instance = get_instance_cached.refresh(*args)
2570 else:
2571 else:
2571 instance = get_instance_cached(*args)
2572 instance = get_instance_cached(*args)
2572
2573
2573 log.debug('Repo instance fetched in %.4fs', inv_context_manager.compute_time)
2574 log.debug('Repo instance fetched in %.4fs', inv_context_manager.compute_time)
2574 return instance
2575 return instance
2575
2576
2576 def _get_instance(self, cache=True, config=None, repo_state_uid=None):
2577 def _get_instance(self, cache=True, config=None, repo_state_uid=None):
2577 log.debug('Initializing %s instance `%s` with cache flag set to: %s',
2578 log.debug('Initializing %s instance `%s` with cache flag set to: %s',
2578 self.repo_type, self.repo_path, cache)
2579 self.repo_type, self.repo_path, cache)
2579 config = config or self._config
2580 config = config or self._config
2580 custom_wire = {
2581 custom_wire = {
2581 'cache': cache, # controls the vcs.remote cache
2582 'cache': cache, # controls the vcs.remote cache
2582 'repo_state_uid': repo_state_uid
2583 'repo_state_uid': repo_state_uid
2583 }
2584 }
2584 repo = get_vcs_instance(
2585 repo = get_vcs_instance(
2585 repo_path=safe_str(self.repo_full_path),
2586 repo_path=safe_str(self.repo_full_path),
2586 config=config,
2587 config=config,
2587 with_wire=custom_wire,
2588 with_wire=custom_wire,
2588 create=False,
2589 create=False,
2589 _vcs_alias=self.repo_type)
2590 _vcs_alias=self.repo_type)
2590 if repo is not None:
2591 if repo is not None:
2591 repo.count() # cache rebuild
2592 repo.count() # cache rebuild
2592 return repo
2593 return repo
2593
2594
2594 def get_shadow_repository_path(self, workspace_id):
2595 def get_shadow_repository_path(self, workspace_id):
2595 from rhodecode.lib.vcs.backends.base import BaseRepository
2596 from rhodecode.lib.vcs.backends.base import BaseRepository
2596 shadow_repo_path = BaseRepository._get_shadow_repository_path(
2597 shadow_repo_path = BaseRepository._get_shadow_repository_path(
2597 self.repo_full_path, self.repo_id, workspace_id)
2598 self.repo_full_path, self.repo_id, workspace_id)
2598 return shadow_repo_path
2599 return shadow_repo_path
2599
2600
2600 def __json__(self):
2601 def __json__(self):
2601 return {'landing_rev': self.landing_rev}
2602 return {'landing_rev': self.landing_rev}
2602
2603
2603 def get_dict(self):
2604 def get_dict(self):
2604
2605
2605 # Since we transformed `repo_name` to a hybrid property, we need to
2606 # Since we transformed `repo_name` to a hybrid property, we need to
2606 # keep compatibility with the code which uses `repo_name` field.
2607 # keep compatibility with the code which uses `repo_name` field.
2607
2608
2608 result = super(Repository, self).get_dict()
2609 result = super(Repository, self).get_dict()
2609 result['repo_name'] = result.pop('_repo_name', None)
2610 result['repo_name'] = result.pop('_repo_name', None)
2610 return result
2611 return result
2611
2612
2612
2613
2613 class RepoGroup(Base, BaseModel):
2614 class RepoGroup(Base, BaseModel):
2614 __tablename__ = 'groups'
2615 __tablename__ = 'groups'
2615 __table_args__ = (
2616 __table_args__ = (
2616 UniqueConstraint('group_name', 'group_parent_id'),
2617 UniqueConstraint('group_name', 'group_parent_id'),
2617 base_table_args,
2618 base_table_args,
2618 )
2619 )
2619 __mapper_args__ = {'order_by': 'group_name'}
2620 __mapper_args__ = {'order_by': 'group_name'}
2620
2621
2621 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2622 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2622
2623
2623 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2624 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2624 _group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2625 _group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2625 group_name_hash = Column("repo_group_name_hash", String(1024), nullable=False, unique=False)
2626 group_name_hash = Column("repo_group_name_hash", String(1024), nullable=False, unique=False)
2626 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2627 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2627 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2628 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2628 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2629 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2629 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2630 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2630 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2631 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2631 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2632 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2632 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2633 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2633 _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) # JSON data
2634 _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) # JSON data
2634
2635
2635 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2636 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2636 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2637 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2637 parent_group = relationship('RepoGroup', remote_side=group_id)
2638 parent_group = relationship('RepoGroup', remote_side=group_id)
2638 user = relationship('User')
2639 user = relationship('User')
2639 integrations = relationship('Integration', cascade="all, delete-orphan")
2640 integrations = relationship('Integration', cascade="all, delete-orphan")
2640
2641
2641 # no cascade, set NULL
2642 # no cascade, set NULL
2642 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_group_id==RepoGroup.group_id')
2643 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_group_id==RepoGroup.group_id')
2643
2644
2644 def __init__(self, group_name='', parent_group=None):
2645 def __init__(self, group_name='', parent_group=None):
2645 self.group_name = group_name
2646 self.group_name = group_name
2646 self.parent_group = parent_group
2647 self.parent_group = parent_group
2647
2648
2648 def __unicode__(self):
2649 def __unicode__(self):
2649 return u"<%s('id:%s:%s')>" % (
2650 return u"<%s('id:%s:%s')>" % (
2650 self.__class__.__name__, self.group_id, self.group_name)
2651 self.__class__.__name__, self.group_id, self.group_name)
2651
2652
2652 @hybrid_property
2653 @hybrid_property
2653 def group_name(self):
2654 def group_name(self):
2654 return self._group_name
2655 return self._group_name
2655
2656
2656 @group_name.setter
2657 @group_name.setter
2657 def group_name(self, value):
2658 def group_name(self, value):
2658 self._group_name = value
2659 self._group_name = value
2659 self.group_name_hash = self.hash_repo_group_name(value)
2660 self.group_name_hash = self.hash_repo_group_name(value)
2660
2661
2661 @classmethod
2662 @classmethod
2662 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
2663 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
2663 from rhodecode.lib.vcs.backends.base import EmptyCommit
2664 from rhodecode.lib.vcs.backends.base import EmptyCommit
2664 dummy = EmptyCommit().__json__()
2665 dummy = EmptyCommit().__json__()
2665 if not changeset_cache_raw:
2666 if not changeset_cache_raw:
2666 dummy['source_repo_id'] = repo_id
2667 dummy['source_repo_id'] = repo_id
2667 return json.loads(json.dumps(dummy))
2668 return json.loads(json.dumps(dummy))
2668
2669
2669 try:
2670 try:
2670 return json.loads(changeset_cache_raw)
2671 return json.loads(changeset_cache_raw)
2671 except TypeError:
2672 except TypeError:
2672 return dummy
2673 return dummy
2673 except Exception:
2674 except Exception:
2674 log.error(traceback.format_exc())
2675 log.error(traceback.format_exc())
2675 return dummy
2676 return dummy
2676
2677
2677 @hybrid_property
2678 @hybrid_property
2678 def changeset_cache(self):
2679 def changeset_cache(self):
2679 return self._load_changeset_cache('', self._changeset_cache)
2680 return self._load_changeset_cache('', self._changeset_cache)
2680
2681
2681 @changeset_cache.setter
2682 @changeset_cache.setter
2682 def changeset_cache(self, val):
2683 def changeset_cache(self, val):
2683 try:
2684 try:
2684 self._changeset_cache = json.dumps(val)
2685 self._changeset_cache = json.dumps(val)
2685 except Exception:
2686 except Exception:
2686 log.error(traceback.format_exc())
2687 log.error(traceback.format_exc())
2687
2688
2688 @validates('group_parent_id')
2689 @validates('group_parent_id')
2689 def validate_group_parent_id(self, key, val):
2690 def validate_group_parent_id(self, key, val):
2690 """
2691 """
2691 Check cycle references for a parent group to self
2692 Check cycle references for a parent group to self
2692 """
2693 """
2693 if self.group_id and val:
2694 if self.group_id and val:
2694 assert val != self.group_id
2695 assert val != self.group_id
2695
2696
2696 return val
2697 return val
2697
2698
2698 @hybrid_property
2699 @hybrid_property
2699 def description_safe(self):
2700 def description_safe(self):
2700 from rhodecode.lib import helpers as h
2701 from rhodecode.lib import helpers as h
2701 return h.escape(self.group_description)
2702 return h.escape(self.group_description)
2702
2703
2703 @classmethod
2704 @classmethod
2704 def hash_repo_group_name(cls, repo_group_name):
2705 def hash_repo_group_name(cls, repo_group_name):
2705 val = remove_formatting(repo_group_name)
2706 val = remove_formatting(repo_group_name)
2706 val = safe_str(val).lower()
2707 val = safe_str(val).lower()
2707 chars = []
2708 chars = []
2708 for c in val:
2709 for c in val:
2709 if c not in string.ascii_letters:
2710 if c not in string.ascii_letters:
2710 c = str(ord(c))
2711 c = str(ord(c))
2711 chars.append(c)
2712 chars.append(c)
2712
2713
2713 return ''.join(chars)
2714 return ''.join(chars)
2714
2715
2715 @classmethod
2716 @classmethod
2716 def _generate_choice(cls, repo_group):
2717 def _generate_choice(cls, repo_group):
2717 from webhelpers2.html import literal as _literal
2718 from webhelpers2.html import literal as _literal
2718 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2719 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2719 return repo_group.group_id, _name(repo_group.full_path_splitted)
2720 return repo_group.group_id, _name(repo_group.full_path_splitted)
2720
2721
2721 @classmethod
2722 @classmethod
2722 def groups_choices(cls, groups=None, show_empty_group=True):
2723 def groups_choices(cls, groups=None, show_empty_group=True):
2723 if not groups:
2724 if not groups:
2724 groups = cls.query().all()
2725 groups = cls.query().all()
2725
2726
2726 repo_groups = []
2727 repo_groups = []
2727 if show_empty_group:
2728 if show_empty_group:
2728 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2729 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2729
2730
2730 repo_groups.extend([cls._generate_choice(x) for x in groups])
2731 repo_groups.extend([cls._generate_choice(x) for x in groups])
2731
2732
2732 repo_groups = sorted(
2733 repo_groups = sorted(
2733 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2734 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2734 return repo_groups
2735 return repo_groups
2735
2736
2736 @classmethod
2737 @classmethod
2737 def url_sep(cls):
2738 def url_sep(cls):
2738 return URL_SEP
2739 return URL_SEP
2739
2740
2740 @classmethod
2741 @classmethod
2741 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2742 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2742 if case_insensitive:
2743 if case_insensitive:
2743 gr = cls.query().filter(func.lower(cls.group_name)
2744 gr = cls.query().filter(func.lower(cls.group_name)
2744 == func.lower(group_name))
2745 == func.lower(group_name))
2745 else:
2746 else:
2746 gr = cls.query().filter(cls.group_name == group_name)
2747 gr = cls.query().filter(cls.group_name == group_name)
2747 if cache:
2748 if cache:
2748 name_key = _hash_key(group_name)
2749 name_key = _hash_key(group_name)
2749 gr = gr.options(
2750 gr = gr.options(
2750 FromCache("sql_cache_short", "get_group_%s" % name_key))
2751 FromCache("sql_cache_short", "get_group_%s" % name_key))
2751 return gr.scalar()
2752 return gr.scalar()
2752
2753
2753 @classmethod
2754 @classmethod
2754 def get_user_personal_repo_group(cls, user_id):
2755 def get_user_personal_repo_group(cls, user_id):
2755 user = User.get(user_id)
2756 user = User.get(user_id)
2756 if user.username == User.DEFAULT_USER:
2757 if user.username == User.DEFAULT_USER:
2757 return None
2758 return None
2758
2759
2759 return cls.query()\
2760 return cls.query()\
2760 .filter(cls.personal == true()) \
2761 .filter(cls.personal == true()) \
2761 .filter(cls.user == user) \
2762 .filter(cls.user == user) \
2762 .order_by(cls.group_id.asc()) \
2763 .order_by(cls.group_id.asc()) \
2763 .first()
2764 .first()
2764
2765
2765 @classmethod
2766 @classmethod
2766 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2767 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2767 case_insensitive=True):
2768 case_insensitive=True):
2768 q = RepoGroup.query()
2769 q = RepoGroup.query()
2769
2770
2770 if not isinstance(user_id, Optional):
2771 if not isinstance(user_id, Optional):
2771 q = q.filter(RepoGroup.user_id == user_id)
2772 q = q.filter(RepoGroup.user_id == user_id)
2772
2773
2773 if not isinstance(group_id, Optional):
2774 if not isinstance(group_id, Optional):
2774 q = q.filter(RepoGroup.group_parent_id == group_id)
2775 q = q.filter(RepoGroup.group_parent_id == group_id)
2775
2776
2776 if case_insensitive:
2777 if case_insensitive:
2777 q = q.order_by(func.lower(RepoGroup.group_name))
2778 q = q.order_by(func.lower(RepoGroup.group_name))
2778 else:
2779 else:
2779 q = q.order_by(RepoGroup.group_name)
2780 q = q.order_by(RepoGroup.group_name)
2780 return q.all()
2781 return q.all()
2781
2782
2782 @property
2783 @property
2783 def parents(self, parents_recursion_limit=10):
2784 def parents(self, parents_recursion_limit=10):
2784 groups = []
2785 groups = []
2785 if self.parent_group is None:
2786 if self.parent_group is None:
2786 return groups
2787 return groups
2787 cur_gr = self.parent_group
2788 cur_gr = self.parent_group
2788 groups.insert(0, cur_gr)
2789 groups.insert(0, cur_gr)
2789 cnt = 0
2790 cnt = 0
2790 while 1:
2791 while 1:
2791 cnt += 1
2792 cnt += 1
2792 gr = getattr(cur_gr, 'parent_group', None)
2793 gr = getattr(cur_gr, 'parent_group', None)
2793 cur_gr = cur_gr.parent_group
2794 cur_gr = cur_gr.parent_group
2794 if gr is None:
2795 if gr is None:
2795 break
2796 break
2796 if cnt == parents_recursion_limit:
2797 if cnt == parents_recursion_limit:
2797 # this will prevent accidental infinit loops
2798 # this will prevent accidental infinit loops
2798 log.error('more than %s parents found for group %s, stopping '
2799 log.error('more than %s parents found for group %s, stopping '
2799 'recursive parent fetching', parents_recursion_limit, self)
2800 'recursive parent fetching', parents_recursion_limit, self)
2800 break
2801 break
2801
2802
2802 groups.insert(0, gr)
2803 groups.insert(0, gr)
2803 return groups
2804 return groups
2804
2805
2805 @property
2806 @property
2806 def last_commit_cache_update_diff(self):
2807 def last_commit_cache_update_diff(self):
2807 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2808 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2808
2809
2809 @classmethod
2810 @classmethod
2810 def _load_commit_change(cls, last_commit_cache):
2811 def _load_commit_change(cls, last_commit_cache):
2811 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2812 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2812 empty_date = datetime.datetime.fromtimestamp(0)
2813 empty_date = datetime.datetime.fromtimestamp(0)
2813 date_latest = last_commit_cache.get('date', empty_date)
2814 date_latest = last_commit_cache.get('date', empty_date)
2814 try:
2815 try:
2815 return parse_datetime(date_latest)
2816 return parse_datetime(date_latest)
2816 except Exception:
2817 except Exception:
2817 return empty_date
2818 return empty_date
2818
2819
2819 @property
2820 @property
2820 def last_commit_change(self):
2821 def last_commit_change(self):
2821 return self._load_commit_change(self.changeset_cache)
2822 return self._load_commit_change(self.changeset_cache)
2822
2823
2823 @property
2824 @property
2824 def last_db_change(self):
2825 def last_db_change(self):
2825 return self.updated_on
2826 return self.updated_on
2826
2827
2827 @property
2828 @property
2828 def children(self):
2829 def children(self):
2829 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2830 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2830
2831
2831 @property
2832 @property
2832 def name(self):
2833 def name(self):
2833 return self.group_name.split(RepoGroup.url_sep())[-1]
2834 return self.group_name.split(RepoGroup.url_sep())[-1]
2834
2835
2835 @property
2836 @property
2836 def full_path(self):
2837 def full_path(self):
2837 return self.group_name
2838 return self.group_name
2838
2839
2839 @property
2840 @property
2840 def full_path_splitted(self):
2841 def full_path_splitted(self):
2841 return self.group_name.split(RepoGroup.url_sep())
2842 return self.group_name.split(RepoGroup.url_sep())
2842
2843
2843 @property
2844 @property
2844 def repositories(self):
2845 def repositories(self):
2845 return Repository.query()\
2846 return Repository.query()\
2846 .filter(Repository.group == self)\
2847 .filter(Repository.group == self)\
2847 .order_by(Repository.repo_name)
2848 .order_by(Repository.repo_name)
2848
2849
2849 @property
2850 @property
2850 def repositories_recursive_count(self):
2851 def repositories_recursive_count(self):
2851 cnt = self.repositories.count()
2852 cnt = self.repositories.count()
2852
2853
2853 def children_count(group):
2854 def children_count(group):
2854 cnt = 0
2855 cnt = 0
2855 for child in group.children:
2856 for child in group.children:
2856 cnt += child.repositories.count()
2857 cnt += child.repositories.count()
2857 cnt += children_count(child)
2858 cnt += children_count(child)
2858 return cnt
2859 return cnt
2859
2860
2860 return cnt + children_count(self)
2861 return cnt + children_count(self)
2861
2862
2862 def _recursive_objects(self, include_repos=True, include_groups=True):
2863 def _recursive_objects(self, include_repos=True, include_groups=True):
2863 all_ = []
2864 all_ = []
2864
2865
2865 def _get_members(root_gr):
2866 def _get_members(root_gr):
2866 if include_repos:
2867 if include_repos:
2867 for r in root_gr.repositories:
2868 for r in root_gr.repositories:
2868 all_.append(r)
2869 all_.append(r)
2869 childs = root_gr.children.all()
2870 childs = root_gr.children.all()
2870 if childs:
2871 if childs:
2871 for gr in childs:
2872 for gr in childs:
2872 if include_groups:
2873 if include_groups:
2873 all_.append(gr)
2874 all_.append(gr)
2874 _get_members(gr)
2875 _get_members(gr)
2875
2876
2876 root_group = []
2877 root_group = []
2877 if include_groups:
2878 if include_groups:
2878 root_group = [self]
2879 root_group = [self]
2879
2880
2880 _get_members(self)
2881 _get_members(self)
2881 return root_group + all_
2882 return root_group + all_
2882
2883
2883 def recursive_groups_and_repos(self):
2884 def recursive_groups_and_repos(self):
2884 """
2885 """
2885 Recursive return all groups, with repositories in those groups
2886 Recursive return all groups, with repositories in those groups
2886 """
2887 """
2887 return self._recursive_objects()
2888 return self._recursive_objects()
2888
2889
2889 def recursive_groups(self):
2890 def recursive_groups(self):
2890 """
2891 """
2891 Returns all children groups for this group including children of children
2892 Returns all children groups for this group including children of children
2892 """
2893 """
2893 return self._recursive_objects(include_repos=False)
2894 return self._recursive_objects(include_repos=False)
2894
2895
2895 def recursive_repos(self):
2896 def recursive_repos(self):
2896 """
2897 """
2897 Returns all children repositories for this group
2898 Returns all children repositories for this group
2898 """
2899 """
2899 return self._recursive_objects(include_groups=False)
2900 return self._recursive_objects(include_groups=False)
2900
2901
2901 def get_new_name(self, group_name):
2902 def get_new_name(self, group_name):
2902 """
2903 """
2903 returns new full group name based on parent and new name
2904 returns new full group name based on parent and new name
2904
2905
2905 :param group_name:
2906 :param group_name:
2906 """
2907 """
2907 path_prefix = (self.parent_group.full_path_splitted if
2908 path_prefix = (self.parent_group.full_path_splitted if
2908 self.parent_group else [])
2909 self.parent_group else [])
2909 return RepoGroup.url_sep().join(path_prefix + [group_name])
2910 return RepoGroup.url_sep().join(path_prefix + [group_name])
2910
2911
2911 def update_commit_cache(self, config=None):
2912 def update_commit_cache(self, config=None):
2912 """
2913 """
2913 Update cache of last commit for newest repository inside this repository group.
2914 Update cache of last commit for newest repository inside this repository group.
2914 cache_keys should be::
2915 cache_keys should be::
2915
2916
2916 source_repo_id
2917 source_repo_id
2917 short_id
2918 short_id
2918 raw_id
2919 raw_id
2919 revision
2920 revision
2920 parents
2921 parents
2921 message
2922 message
2922 date
2923 date
2923 author
2924 author
2924
2925
2925 """
2926 """
2926 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2927 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2927 empty_date = datetime.datetime.fromtimestamp(0)
2928 empty_date = datetime.datetime.fromtimestamp(0)
2928
2929
2929 def repo_groups_and_repos(root_gr):
2930 def repo_groups_and_repos(root_gr):
2930 for _repo in root_gr.repositories:
2931 for _repo in root_gr.repositories:
2931 yield _repo
2932 yield _repo
2932 for child_group in root_gr.children.all():
2933 for child_group in root_gr.children.all():
2933 yield child_group
2934 yield child_group
2934
2935
2935 latest_repo_cs_cache = {}
2936 latest_repo_cs_cache = {}
2936 for obj in repo_groups_and_repos(self):
2937 for obj in repo_groups_and_repos(self):
2937 repo_cs_cache = obj.changeset_cache
2938 repo_cs_cache = obj.changeset_cache
2938 date_latest = latest_repo_cs_cache.get('date', empty_date)
2939 date_latest = latest_repo_cs_cache.get('date', empty_date)
2939 date_current = repo_cs_cache.get('date', empty_date)
2940 date_current = repo_cs_cache.get('date', empty_date)
2940 current_timestamp = datetime_to_time(parse_datetime(date_latest))
2941 current_timestamp = datetime_to_time(parse_datetime(date_latest))
2941 if current_timestamp < datetime_to_time(parse_datetime(date_current)):
2942 if current_timestamp < datetime_to_time(parse_datetime(date_current)):
2942 latest_repo_cs_cache = repo_cs_cache
2943 latest_repo_cs_cache = repo_cs_cache
2943 if hasattr(obj, 'repo_id'):
2944 if hasattr(obj, 'repo_id'):
2944 latest_repo_cs_cache['source_repo_id'] = obj.repo_id
2945 latest_repo_cs_cache['source_repo_id'] = obj.repo_id
2945 else:
2946 else:
2946 latest_repo_cs_cache['source_repo_id'] = repo_cs_cache.get('source_repo_id')
2947 latest_repo_cs_cache['source_repo_id'] = repo_cs_cache.get('source_repo_id')
2947
2948
2948 _date_latest = parse_datetime(latest_repo_cs_cache.get('date') or empty_date)
2949 _date_latest = parse_datetime(latest_repo_cs_cache.get('date') or empty_date)
2949
2950
2950 latest_repo_cs_cache['updated_on'] = time.time()
2951 latest_repo_cs_cache['updated_on'] = time.time()
2951 self.changeset_cache = latest_repo_cs_cache
2952 self.changeset_cache = latest_repo_cs_cache
2952 self.updated_on = _date_latest
2953 self.updated_on = _date_latest
2953 Session().add(self)
2954 Session().add(self)
2954 Session().commit()
2955 Session().commit()
2955
2956
2956 log.debug('updated repo group `%s` with new commit cache %s, and last update_date: %s',
2957 log.debug('updated repo group `%s` with new commit cache %s, and last update_date: %s',
2957 self.group_name, latest_repo_cs_cache, _date_latest)
2958 self.group_name, latest_repo_cs_cache, _date_latest)
2958
2959
2959 def permissions(self, with_admins=True, with_owner=True,
2960 def permissions(self, with_admins=True, with_owner=True,
2960 expand_from_user_groups=False):
2961 expand_from_user_groups=False):
2961 """
2962 """
2962 Permissions for repository groups
2963 Permissions for repository groups
2963 """
2964 """
2964 _admin_perm = 'group.admin'
2965 _admin_perm = 'group.admin'
2965
2966
2966 owner_row = []
2967 owner_row = []
2967 if with_owner:
2968 if with_owner:
2968 usr = AttributeDict(self.user.get_dict())
2969 usr = AttributeDict(self.user.get_dict())
2969 usr.owner_row = True
2970 usr.owner_row = True
2970 usr.permission = _admin_perm
2971 usr.permission = _admin_perm
2971 owner_row.append(usr)
2972 owner_row.append(usr)
2972
2973
2973 super_admin_ids = []
2974 super_admin_ids = []
2974 super_admin_rows = []
2975 super_admin_rows = []
2975 if with_admins:
2976 if with_admins:
2976 for usr in User.get_all_super_admins():
2977 for usr in User.get_all_super_admins():
2977 super_admin_ids.append(usr.user_id)
2978 super_admin_ids.append(usr.user_id)
2978 # if this admin is also owner, don't double the record
2979 # if this admin is also owner, don't double the record
2979 if usr.user_id == owner_row[0].user_id:
2980 if usr.user_id == owner_row[0].user_id:
2980 owner_row[0].admin_row = True
2981 owner_row[0].admin_row = True
2981 else:
2982 else:
2982 usr = AttributeDict(usr.get_dict())
2983 usr = AttributeDict(usr.get_dict())
2983 usr.admin_row = True
2984 usr.admin_row = True
2984 usr.permission = _admin_perm
2985 usr.permission = _admin_perm
2985 super_admin_rows.append(usr)
2986 super_admin_rows.append(usr)
2986
2987
2987 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
2988 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
2988 q = q.options(joinedload(UserRepoGroupToPerm.group),
2989 q = q.options(joinedload(UserRepoGroupToPerm.group),
2989 joinedload(UserRepoGroupToPerm.user),
2990 joinedload(UserRepoGroupToPerm.user),
2990 joinedload(UserRepoGroupToPerm.permission),)
2991 joinedload(UserRepoGroupToPerm.permission),)
2991
2992
2992 # get owners and admins and permissions. We do a trick of re-writing
2993 # get owners and admins and permissions. We do a trick of re-writing
2993 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2994 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2994 # has a global reference and changing one object propagates to all
2995 # has a global reference and changing one object propagates to all
2995 # others. This means if admin is also an owner admin_row that change
2996 # others. This means if admin is also an owner admin_row that change
2996 # would propagate to both objects
2997 # would propagate to both objects
2997 perm_rows = []
2998 perm_rows = []
2998 for _usr in q.all():
2999 for _usr in q.all():
2999 usr = AttributeDict(_usr.user.get_dict())
3000 usr = AttributeDict(_usr.user.get_dict())
3000 # if this user is also owner/admin, mark as duplicate record
3001 # if this user is also owner/admin, mark as duplicate record
3001 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
3002 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
3002 usr.duplicate_perm = True
3003 usr.duplicate_perm = True
3003 usr.permission = _usr.permission.permission_name
3004 usr.permission = _usr.permission.permission_name
3004 perm_rows.append(usr)
3005 perm_rows.append(usr)
3005
3006
3006 # filter the perm rows by 'default' first and then sort them by
3007 # filter the perm rows by 'default' first and then sort them by
3007 # admin,write,read,none permissions sorted again alphabetically in
3008 # admin,write,read,none permissions sorted again alphabetically in
3008 # each group
3009 # each group
3009 perm_rows = sorted(perm_rows, key=display_user_sort)
3010 perm_rows = sorted(perm_rows, key=display_user_sort)
3010
3011
3011 user_groups_rows = []
3012 user_groups_rows = []
3012 if expand_from_user_groups:
3013 if expand_from_user_groups:
3013 for ug in self.permission_user_groups(with_members=True):
3014 for ug in self.permission_user_groups(with_members=True):
3014 for user_data in ug.members:
3015 for user_data in ug.members:
3015 user_groups_rows.append(user_data)
3016 user_groups_rows.append(user_data)
3016
3017
3017 return super_admin_rows + owner_row + perm_rows + user_groups_rows
3018 return super_admin_rows + owner_row + perm_rows + user_groups_rows
3018
3019
3019 def permission_user_groups(self, with_members=False):
3020 def permission_user_groups(self, with_members=False):
3020 q = UserGroupRepoGroupToPerm.query()\
3021 q = UserGroupRepoGroupToPerm.query()\
3021 .filter(UserGroupRepoGroupToPerm.group == self)
3022 .filter(UserGroupRepoGroupToPerm.group == self)
3022 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
3023 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
3023 joinedload(UserGroupRepoGroupToPerm.users_group),
3024 joinedload(UserGroupRepoGroupToPerm.users_group),
3024 joinedload(UserGroupRepoGroupToPerm.permission),)
3025 joinedload(UserGroupRepoGroupToPerm.permission),)
3025
3026
3026 perm_rows = []
3027 perm_rows = []
3027 for _user_group in q.all():
3028 for _user_group in q.all():
3028 entry = AttributeDict(_user_group.users_group.get_dict())
3029 entry = AttributeDict(_user_group.users_group.get_dict())
3029 entry.permission = _user_group.permission.permission_name
3030 entry.permission = _user_group.permission.permission_name
3030 if with_members:
3031 if with_members:
3031 entry.members = [x.user.get_dict()
3032 entry.members = [x.user.get_dict()
3032 for x in _user_group.users_group.members]
3033 for x in _user_group.users_group.members]
3033 perm_rows.append(entry)
3034 perm_rows.append(entry)
3034
3035
3035 perm_rows = sorted(perm_rows, key=display_user_group_sort)
3036 perm_rows = sorted(perm_rows, key=display_user_group_sort)
3036 return perm_rows
3037 return perm_rows
3037
3038
3038 def get_api_data(self):
3039 def get_api_data(self):
3039 """
3040 """
3040 Common function for generating api data
3041 Common function for generating api data
3041
3042
3042 """
3043 """
3043 group = self
3044 group = self
3044 data = {
3045 data = {
3045 'group_id': group.group_id,
3046 'group_id': group.group_id,
3046 'group_name': group.group_name,
3047 'group_name': group.group_name,
3047 'group_description': group.description_safe,
3048 'group_description': group.description_safe,
3048 'parent_group': group.parent_group.group_name if group.parent_group else None,
3049 'parent_group': group.parent_group.group_name if group.parent_group else None,
3049 'repositories': [x.repo_name for x in group.repositories],
3050 'repositories': [x.repo_name for x in group.repositories],
3050 'owner': group.user.username,
3051 'owner': group.user.username,
3051 }
3052 }
3052 return data
3053 return data
3053
3054
3054 def get_dict(self):
3055 def get_dict(self):
3055 # Since we transformed `group_name` to a hybrid property, we need to
3056 # Since we transformed `group_name` to a hybrid property, we need to
3056 # keep compatibility with the code which uses `group_name` field.
3057 # keep compatibility with the code which uses `group_name` field.
3057 result = super(RepoGroup, self).get_dict()
3058 result = super(RepoGroup, self).get_dict()
3058 result['group_name'] = result.pop('_group_name', None)
3059 result['group_name'] = result.pop('_group_name', None)
3059 return result
3060 return result
3060
3061
3061
3062
3062 class Permission(Base, BaseModel):
3063 class Permission(Base, BaseModel):
3063 __tablename__ = 'permissions'
3064 __tablename__ = 'permissions'
3064 __table_args__ = (
3065 __table_args__ = (
3065 Index('p_perm_name_idx', 'permission_name'),
3066 Index('p_perm_name_idx', 'permission_name'),
3066 base_table_args,
3067 base_table_args,
3067 )
3068 )
3068
3069
3069 PERMS = [
3070 PERMS = [
3070 ('hg.admin', _('RhodeCode Super Administrator')),
3071 ('hg.admin', _('RhodeCode Super Administrator')),
3071
3072
3072 ('repository.none', _('Repository no access')),
3073 ('repository.none', _('Repository no access')),
3073 ('repository.read', _('Repository read access')),
3074 ('repository.read', _('Repository read access')),
3074 ('repository.write', _('Repository write access')),
3075 ('repository.write', _('Repository write access')),
3075 ('repository.admin', _('Repository admin access')),
3076 ('repository.admin', _('Repository admin access')),
3076
3077
3077 ('group.none', _('Repository group no access')),
3078 ('group.none', _('Repository group no access')),
3078 ('group.read', _('Repository group read access')),
3079 ('group.read', _('Repository group read access')),
3079 ('group.write', _('Repository group write access')),
3080 ('group.write', _('Repository group write access')),
3080 ('group.admin', _('Repository group admin access')),
3081 ('group.admin', _('Repository group admin access')),
3081
3082
3082 ('usergroup.none', _('User group no access')),
3083 ('usergroup.none', _('User group no access')),
3083 ('usergroup.read', _('User group read access')),
3084 ('usergroup.read', _('User group read access')),
3084 ('usergroup.write', _('User group write access')),
3085 ('usergroup.write', _('User group write access')),
3085 ('usergroup.admin', _('User group admin access')),
3086 ('usergroup.admin', _('User group admin access')),
3086
3087
3087 ('branch.none', _('Branch no permissions')),
3088 ('branch.none', _('Branch no permissions')),
3088 ('branch.merge', _('Branch access by web merge')),
3089 ('branch.merge', _('Branch access by web merge')),
3089 ('branch.push', _('Branch access by push')),
3090 ('branch.push', _('Branch access by push')),
3090 ('branch.push_force', _('Branch access by push with force')),
3091 ('branch.push_force', _('Branch access by push with force')),
3091
3092
3092 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
3093 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
3093 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
3094 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
3094
3095
3095 ('hg.usergroup.create.false', _('User Group creation disabled')),
3096 ('hg.usergroup.create.false', _('User Group creation disabled')),
3096 ('hg.usergroup.create.true', _('User Group creation enabled')),
3097 ('hg.usergroup.create.true', _('User Group creation enabled')),
3097
3098
3098 ('hg.create.none', _('Repository creation disabled')),
3099 ('hg.create.none', _('Repository creation disabled')),
3099 ('hg.create.repository', _('Repository creation enabled')),
3100 ('hg.create.repository', _('Repository creation enabled')),
3100 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
3101 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
3101 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
3102 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
3102
3103
3103 ('hg.fork.none', _('Repository forking disabled')),
3104 ('hg.fork.none', _('Repository forking disabled')),
3104 ('hg.fork.repository', _('Repository forking enabled')),
3105 ('hg.fork.repository', _('Repository forking enabled')),
3105
3106
3106 ('hg.register.none', _('Registration disabled')),
3107 ('hg.register.none', _('Registration disabled')),
3107 ('hg.register.manual_activate', _('User Registration with manual account activation')),
3108 ('hg.register.manual_activate', _('User Registration with manual account activation')),
3108 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
3109 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
3109
3110
3110 ('hg.password_reset.enabled', _('Password reset enabled')),
3111 ('hg.password_reset.enabled', _('Password reset enabled')),
3111 ('hg.password_reset.hidden', _('Password reset hidden')),
3112 ('hg.password_reset.hidden', _('Password reset hidden')),
3112 ('hg.password_reset.disabled', _('Password reset disabled')),
3113 ('hg.password_reset.disabled', _('Password reset disabled')),
3113
3114
3114 ('hg.extern_activate.manual', _('Manual activation of external account')),
3115 ('hg.extern_activate.manual', _('Manual activation of external account')),
3115 ('hg.extern_activate.auto', _('Automatic activation of external account')),
3116 ('hg.extern_activate.auto', _('Automatic activation of external account')),
3116
3117
3117 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
3118 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
3118 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
3119 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
3119 ]
3120 ]
3120
3121
3121 # definition of system default permissions for DEFAULT user, created on
3122 # definition of system default permissions for DEFAULT user, created on
3122 # system setup
3123 # system setup
3123 DEFAULT_USER_PERMISSIONS = [
3124 DEFAULT_USER_PERMISSIONS = [
3124 # object perms
3125 # object perms
3125 'repository.read',
3126 'repository.read',
3126 'group.read',
3127 'group.read',
3127 'usergroup.read',
3128 'usergroup.read',
3128 # branch, for backward compat we need same value as before so forced pushed
3129 # branch, for backward compat we need same value as before so forced pushed
3129 'branch.push_force',
3130 'branch.push_force',
3130 # global
3131 # global
3131 'hg.create.repository',
3132 'hg.create.repository',
3132 'hg.repogroup.create.false',
3133 'hg.repogroup.create.false',
3133 'hg.usergroup.create.false',
3134 'hg.usergroup.create.false',
3134 'hg.create.write_on_repogroup.true',
3135 'hg.create.write_on_repogroup.true',
3135 'hg.fork.repository',
3136 'hg.fork.repository',
3136 'hg.register.manual_activate',
3137 'hg.register.manual_activate',
3137 'hg.password_reset.enabled',
3138 'hg.password_reset.enabled',
3138 'hg.extern_activate.auto',
3139 'hg.extern_activate.auto',
3139 'hg.inherit_default_perms.true',
3140 'hg.inherit_default_perms.true',
3140 ]
3141 ]
3141
3142
3142 # defines which permissions are more important higher the more important
3143 # defines which permissions are more important higher the more important
3143 # Weight defines which permissions are more important.
3144 # Weight defines which permissions are more important.
3144 # The higher number the more important.
3145 # The higher number the more important.
3145 PERM_WEIGHTS = {
3146 PERM_WEIGHTS = {
3146 'repository.none': 0,
3147 'repository.none': 0,
3147 'repository.read': 1,
3148 'repository.read': 1,
3148 'repository.write': 3,
3149 'repository.write': 3,
3149 'repository.admin': 4,
3150 'repository.admin': 4,
3150
3151
3151 'group.none': 0,
3152 'group.none': 0,
3152 'group.read': 1,
3153 'group.read': 1,
3153 'group.write': 3,
3154 'group.write': 3,
3154 'group.admin': 4,
3155 'group.admin': 4,
3155
3156
3156 'usergroup.none': 0,
3157 'usergroup.none': 0,
3157 'usergroup.read': 1,
3158 'usergroup.read': 1,
3158 'usergroup.write': 3,
3159 'usergroup.write': 3,
3159 'usergroup.admin': 4,
3160 'usergroup.admin': 4,
3160
3161
3161 'branch.none': 0,
3162 'branch.none': 0,
3162 'branch.merge': 1,
3163 'branch.merge': 1,
3163 'branch.push': 3,
3164 'branch.push': 3,
3164 'branch.push_force': 4,
3165 'branch.push_force': 4,
3165
3166
3166 'hg.repogroup.create.false': 0,
3167 'hg.repogroup.create.false': 0,
3167 'hg.repogroup.create.true': 1,
3168 'hg.repogroup.create.true': 1,
3168
3169
3169 'hg.usergroup.create.false': 0,
3170 'hg.usergroup.create.false': 0,
3170 'hg.usergroup.create.true': 1,
3171 'hg.usergroup.create.true': 1,
3171
3172
3172 'hg.fork.none': 0,
3173 'hg.fork.none': 0,
3173 'hg.fork.repository': 1,
3174 'hg.fork.repository': 1,
3174 'hg.create.none': 0,
3175 'hg.create.none': 0,
3175 'hg.create.repository': 1
3176 'hg.create.repository': 1
3176 }
3177 }
3177
3178
3178 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3179 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3179 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
3180 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
3180 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
3181 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
3181
3182
3182 def __unicode__(self):
3183 def __unicode__(self):
3183 return u"<%s('%s:%s')>" % (
3184 return u"<%s('%s:%s')>" % (
3184 self.__class__.__name__, self.permission_id, self.permission_name
3185 self.__class__.__name__, self.permission_id, self.permission_name
3185 )
3186 )
3186
3187
3187 @classmethod
3188 @classmethod
3188 def get_by_key(cls, key):
3189 def get_by_key(cls, key):
3189 return cls.query().filter(cls.permission_name == key).scalar()
3190 return cls.query().filter(cls.permission_name == key).scalar()
3190
3191
3191 @classmethod
3192 @classmethod
3192 def get_default_repo_perms(cls, user_id, repo_id=None):
3193 def get_default_repo_perms(cls, user_id, repo_id=None):
3193 q = Session().query(UserRepoToPerm, Repository, Permission)\
3194 q = Session().query(UserRepoToPerm, Repository, Permission)\
3194 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
3195 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
3195 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
3196 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
3196 .filter(UserRepoToPerm.user_id == user_id)
3197 .filter(UserRepoToPerm.user_id == user_id)
3197 if repo_id:
3198 if repo_id:
3198 q = q.filter(UserRepoToPerm.repository_id == repo_id)
3199 q = q.filter(UserRepoToPerm.repository_id == repo_id)
3199 return q.all()
3200 return q.all()
3200
3201
3201 @classmethod
3202 @classmethod
3202 def get_default_repo_branch_perms(cls, user_id, repo_id=None):
3203 def get_default_repo_branch_perms(cls, user_id, repo_id=None):
3203 q = Session().query(UserToRepoBranchPermission, UserRepoToPerm, Permission) \
3204 q = Session().query(UserToRepoBranchPermission, UserRepoToPerm, Permission) \
3204 .join(
3205 .join(
3205 Permission,
3206 Permission,
3206 UserToRepoBranchPermission.permission_id == Permission.permission_id) \
3207 UserToRepoBranchPermission.permission_id == Permission.permission_id) \
3207 .join(
3208 .join(
3208 UserRepoToPerm,
3209 UserRepoToPerm,
3209 UserToRepoBranchPermission.rule_to_perm_id == UserRepoToPerm.repo_to_perm_id) \
3210 UserToRepoBranchPermission.rule_to_perm_id == UserRepoToPerm.repo_to_perm_id) \
3210 .filter(UserRepoToPerm.user_id == user_id)
3211 .filter(UserRepoToPerm.user_id == user_id)
3211
3212
3212 if repo_id:
3213 if repo_id:
3213 q = q.filter(UserToRepoBranchPermission.repository_id == repo_id)
3214 q = q.filter(UserToRepoBranchPermission.repository_id == repo_id)
3214 return q.order_by(UserToRepoBranchPermission.rule_order).all()
3215 return q.order_by(UserToRepoBranchPermission.rule_order).all()
3215
3216
3216 @classmethod
3217 @classmethod
3217 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
3218 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
3218 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
3219 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
3219 .join(
3220 .join(
3220 Permission,
3221 Permission,
3221 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
3222 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
3222 .join(
3223 .join(
3223 Repository,
3224 Repository,
3224 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
3225 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
3225 .join(
3226 .join(
3226 UserGroup,
3227 UserGroup,
3227 UserGroupRepoToPerm.users_group_id ==
3228 UserGroupRepoToPerm.users_group_id ==
3228 UserGroup.users_group_id)\
3229 UserGroup.users_group_id)\
3229 .join(
3230 .join(
3230 UserGroupMember,
3231 UserGroupMember,
3231 UserGroupRepoToPerm.users_group_id ==
3232 UserGroupRepoToPerm.users_group_id ==
3232 UserGroupMember.users_group_id)\
3233 UserGroupMember.users_group_id)\
3233 .filter(
3234 .filter(
3234 UserGroupMember.user_id == user_id,
3235 UserGroupMember.user_id == user_id,
3235 UserGroup.users_group_active == true())
3236 UserGroup.users_group_active == true())
3236 if repo_id:
3237 if repo_id:
3237 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
3238 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
3238 return q.all()
3239 return q.all()
3239
3240
3240 @classmethod
3241 @classmethod
3241 def get_default_repo_branch_perms_from_user_group(cls, user_id, repo_id=None):
3242 def get_default_repo_branch_perms_from_user_group(cls, user_id, repo_id=None):
3242 q = Session().query(UserGroupToRepoBranchPermission, UserGroupRepoToPerm, Permission) \
3243 q = Session().query(UserGroupToRepoBranchPermission, UserGroupRepoToPerm, Permission) \
3243 .join(
3244 .join(
3244 Permission,
3245 Permission,
3245 UserGroupToRepoBranchPermission.permission_id == Permission.permission_id) \
3246 UserGroupToRepoBranchPermission.permission_id == Permission.permission_id) \
3246 .join(
3247 .join(
3247 UserGroupRepoToPerm,
3248 UserGroupRepoToPerm,
3248 UserGroupToRepoBranchPermission.rule_to_perm_id == UserGroupRepoToPerm.users_group_to_perm_id) \
3249 UserGroupToRepoBranchPermission.rule_to_perm_id == UserGroupRepoToPerm.users_group_to_perm_id) \
3249 .join(
3250 .join(
3250 UserGroup,
3251 UserGroup,
3251 UserGroupRepoToPerm.users_group_id == UserGroup.users_group_id) \
3252 UserGroupRepoToPerm.users_group_id == UserGroup.users_group_id) \
3252 .join(
3253 .join(
3253 UserGroupMember,
3254 UserGroupMember,
3254 UserGroupRepoToPerm.users_group_id == UserGroupMember.users_group_id) \
3255 UserGroupRepoToPerm.users_group_id == UserGroupMember.users_group_id) \
3255 .filter(
3256 .filter(
3256 UserGroupMember.user_id == user_id,
3257 UserGroupMember.user_id == user_id,
3257 UserGroup.users_group_active == true())
3258 UserGroup.users_group_active == true())
3258
3259
3259 if repo_id:
3260 if repo_id:
3260 q = q.filter(UserGroupToRepoBranchPermission.repository_id == repo_id)
3261 q = q.filter(UserGroupToRepoBranchPermission.repository_id == repo_id)
3261 return q.order_by(UserGroupToRepoBranchPermission.rule_order).all()
3262 return q.order_by(UserGroupToRepoBranchPermission.rule_order).all()
3262
3263
3263 @classmethod
3264 @classmethod
3264 def get_default_group_perms(cls, user_id, repo_group_id=None):
3265 def get_default_group_perms(cls, user_id, repo_group_id=None):
3265 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
3266 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
3266 .join(
3267 .join(
3267 Permission,
3268 Permission,
3268 UserRepoGroupToPerm.permission_id == Permission.permission_id)\
3269 UserRepoGroupToPerm.permission_id == Permission.permission_id)\
3269 .join(
3270 .join(
3270 RepoGroup,
3271 RepoGroup,
3271 UserRepoGroupToPerm.group_id == RepoGroup.group_id)\
3272 UserRepoGroupToPerm.group_id == RepoGroup.group_id)\
3272 .filter(UserRepoGroupToPerm.user_id == user_id)
3273 .filter(UserRepoGroupToPerm.user_id == user_id)
3273 if repo_group_id:
3274 if repo_group_id:
3274 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
3275 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
3275 return q.all()
3276 return q.all()
3276
3277
3277 @classmethod
3278 @classmethod
3278 def get_default_group_perms_from_user_group(
3279 def get_default_group_perms_from_user_group(
3279 cls, user_id, repo_group_id=None):
3280 cls, user_id, repo_group_id=None):
3280 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
3281 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
3281 .join(
3282 .join(
3282 Permission,
3283 Permission,
3283 UserGroupRepoGroupToPerm.permission_id ==
3284 UserGroupRepoGroupToPerm.permission_id ==
3284 Permission.permission_id)\
3285 Permission.permission_id)\
3285 .join(
3286 .join(
3286 RepoGroup,
3287 RepoGroup,
3287 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
3288 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
3288 .join(
3289 .join(
3289 UserGroup,
3290 UserGroup,
3290 UserGroupRepoGroupToPerm.users_group_id ==
3291 UserGroupRepoGroupToPerm.users_group_id ==
3291 UserGroup.users_group_id)\
3292 UserGroup.users_group_id)\
3292 .join(
3293 .join(
3293 UserGroupMember,
3294 UserGroupMember,
3294 UserGroupRepoGroupToPerm.users_group_id ==
3295 UserGroupRepoGroupToPerm.users_group_id ==
3295 UserGroupMember.users_group_id)\
3296 UserGroupMember.users_group_id)\
3296 .filter(
3297 .filter(
3297 UserGroupMember.user_id == user_id,
3298 UserGroupMember.user_id == user_id,
3298 UserGroup.users_group_active == true())
3299 UserGroup.users_group_active == true())
3299 if repo_group_id:
3300 if repo_group_id:
3300 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
3301 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
3301 return q.all()
3302 return q.all()
3302
3303
3303 @classmethod
3304 @classmethod
3304 def get_default_user_group_perms(cls, user_id, user_group_id=None):
3305 def get_default_user_group_perms(cls, user_id, user_group_id=None):
3305 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
3306 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
3306 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
3307 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
3307 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
3308 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
3308 .filter(UserUserGroupToPerm.user_id == user_id)
3309 .filter(UserUserGroupToPerm.user_id == user_id)
3309 if user_group_id:
3310 if user_group_id:
3310 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
3311 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
3311 return q.all()
3312 return q.all()
3312
3313
3313 @classmethod
3314 @classmethod
3314 def get_default_user_group_perms_from_user_group(
3315 def get_default_user_group_perms_from_user_group(
3315 cls, user_id, user_group_id=None):
3316 cls, user_id, user_group_id=None):
3316 TargetUserGroup = aliased(UserGroup, name='target_user_group')
3317 TargetUserGroup = aliased(UserGroup, name='target_user_group')
3317 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
3318 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
3318 .join(
3319 .join(
3319 Permission,
3320 Permission,
3320 UserGroupUserGroupToPerm.permission_id ==
3321 UserGroupUserGroupToPerm.permission_id ==
3321 Permission.permission_id)\
3322 Permission.permission_id)\
3322 .join(
3323 .join(
3323 TargetUserGroup,
3324 TargetUserGroup,
3324 UserGroupUserGroupToPerm.target_user_group_id ==
3325 UserGroupUserGroupToPerm.target_user_group_id ==
3325 TargetUserGroup.users_group_id)\
3326 TargetUserGroup.users_group_id)\
3326 .join(
3327 .join(
3327 UserGroup,
3328 UserGroup,
3328 UserGroupUserGroupToPerm.user_group_id ==
3329 UserGroupUserGroupToPerm.user_group_id ==
3329 UserGroup.users_group_id)\
3330 UserGroup.users_group_id)\
3330 .join(
3331 .join(
3331 UserGroupMember,
3332 UserGroupMember,
3332 UserGroupUserGroupToPerm.user_group_id ==
3333 UserGroupUserGroupToPerm.user_group_id ==
3333 UserGroupMember.users_group_id)\
3334 UserGroupMember.users_group_id)\
3334 .filter(
3335 .filter(
3335 UserGroupMember.user_id == user_id,
3336 UserGroupMember.user_id == user_id,
3336 UserGroup.users_group_active == true())
3337 UserGroup.users_group_active == true())
3337 if user_group_id:
3338 if user_group_id:
3338 q = q.filter(
3339 q = q.filter(
3339 UserGroupUserGroupToPerm.user_group_id == user_group_id)
3340 UserGroupUserGroupToPerm.user_group_id == user_group_id)
3340
3341
3341 return q.all()
3342 return q.all()
3342
3343
3343
3344
3344 class UserRepoToPerm(Base, BaseModel):
3345 class UserRepoToPerm(Base, BaseModel):
3345 __tablename__ = 'repo_to_perm'
3346 __tablename__ = 'repo_to_perm'
3346 __table_args__ = (
3347 __table_args__ = (
3347 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
3348 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
3348 base_table_args
3349 base_table_args
3349 )
3350 )
3350
3351
3351 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3352 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3352 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3353 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3353 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3354 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3354 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3355 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3355
3356
3356 user = relationship('User')
3357 user = relationship('User')
3357 repository = relationship('Repository')
3358 repository = relationship('Repository')
3358 permission = relationship('Permission')
3359 permission = relationship('Permission')
3359
3360
3360 branch_perm_entry = relationship('UserToRepoBranchPermission', cascade="all, delete-orphan", lazy='joined')
3361 branch_perm_entry = relationship('UserToRepoBranchPermission', cascade="all, delete-orphan", lazy='joined')
3361
3362
3362 @classmethod
3363 @classmethod
3363 def create(cls, user, repository, permission):
3364 def create(cls, user, repository, permission):
3364 n = cls()
3365 n = cls()
3365 n.user = user
3366 n.user = user
3366 n.repository = repository
3367 n.repository = repository
3367 n.permission = permission
3368 n.permission = permission
3368 Session().add(n)
3369 Session().add(n)
3369 return n
3370 return n
3370
3371
3371 def __unicode__(self):
3372 def __unicode__(self):
3372 return u'<%s => %s >' % (self.user, self.repository)
3373 return u'<%s => %s >' % (self.user, self.repository)
3373
3374
3374
3375
3375 class UserUserGroupToPerm(Base, BaseModel):
3376 class UserUserGroupToPerm(Base, BaseModel):
3376 __tablename__ = 'user_user_group_to_perm'
3377 __tablename__ = 'user_user_group_to_perm'
3377 __table_args__ = (
3378 __table_args__ = (
3378 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
3379 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
3379 base_table_args
3380 base_table_args
3380 )
3381 )
3381
3382
3382 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3383 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3383 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3384 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3384 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3385 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3385 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3386 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3386
3387
3387 user = relationship('User')
3388 user = relationship('User')
3388 user_group = relationship('UserGroup')
3389 user_group = relationship('UserGroup')
3389 permission = relationship('Permission')
3390 permission = relationship('Permission')
3390
3391
3391 @classmethod
3392 @classmethod
3392 def create(cls, user, user_group, permission):
3393 def create(cls, user, user_group, permission):
3393 n = cls()
3394 n = cls()
3394 n.user = user
3395 n.user = user
3395 n.user_group = user_group
3396 n.user_group = user_group
3396 n.permission = permission
3397 n.permission = permission
3397 Session().add(n)
3398 Session().add(n)
3398 return n
3399 return n
3399
3400
3400 def __unicode__(self):
3401 def __unicode__(self):
3401 return u'<%s => %s >' % (self.user, self.user_group)
3402 return u'<%s => %s >' % (self.user, self.user_group)
3402
3403
3403
3404
3404 class UserToPerm(Base, BaseModel):
3405 class UserToPerm(Base, BaseModel):
3405 __tablename__ = 'user_to_perm'
3406 __tablename__ = 'user_to_perm'
3406 __table_args__ = (
3407 __table_args__ = (
3407 UniqueConstraint('user_id', 'permission_id'),
3408 UniqueConstraint('user_id', 'permission_id'),
3408 base_table_args
3409 base_table_args
3409 )
3410 )
3410
3411
3411 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3412 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3412 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3413 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3413 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3414 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3414
3415
3415 user = relationship('User')
3416 user = relationship('User')
3416 permission = relationship('Permission', lazy='joined')
3417 permission = relationship('Permission', lazy='joined')
3417
3418
3418 def __unicode__(self):
3419 def __unicode__(self):
3419 return u'<%s => %s >' % (self.user, self.permission)
3420 return u'<%s => %s >' % (self.user, self.permission)
3420
3421
3421
3422
3422 class UserGroupRepoToPerm(Base, BaseModel):
3423 class UserGroupRepoToPerm(Base, BaseModel):
3423 __tablename__ = 'users_group_repo_to_perm'
3424 __tablename__ = 'users_group_repo_to_perm'
3424 __table_args__ = (
3425 __table_args__ = (
3425 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
3426 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
3426 base_table_args
3427 base_table_args
3427 )
3428 )
3428
3429
3429 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3430 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3430 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3431 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3431 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3432 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3432 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3433 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3433
3434
3434 users_group = relationship('UserGroup')
3435 users_group = relationship('UserGroup')
3435 permission = relationship('Permission')
3436 permission = relationship('Permission')
3436 repository = relationship('Repository')
3437 repository = relationship('Repository')
3437 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission', cascade='all')
3438 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission', cascade='all')
3438
3439
3439 @classmethod
3440 @classmethod
3440 def create(cls, users_group, repository, permission):
3441 def create(cls, users_group, repository, permission):
3441 n = cls()
3442 n = cls()
3442 n.users_group = users_group
3443 n.users_group = users_group
3443 n.repository = repository
3444 n.repository = repository
3444 n.permission = permission
3445 n.permission = permission
3445 Session().add(n)
3446 Session().add(n)
3446 return n
3447 return n
3447
3448
3448 def __unicode__(self):
3449 def __unicode__(self):
3449 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
3450 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
3450
3451
3451
3452
3452 class UserGroupUserGroupToPerm(Base, BaseModel):
3453 class UserGroupUserGroupToPerm(Base, BaseModel):
3453 __tablename__ = 'user_group_user_group_to_perm'
3454 __tablename__ = 'user_group_user_group_to_perm'
3454 __table_args__ = (
3455 __table_args__ = (
3455 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
3456 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
3456 CheckConstraint('target_user_group_id != user_group_id'),
3457 CheckConstraint('target_user_group_id != user_group_id'),
3457 base_table_args
3458 base_table_args
3458 )
3459 )
3459
3460
3460 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3461 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3461 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3462 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3462 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3463 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3463 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3464 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3464
3465
3465 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
3466 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
3466 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
3467 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
3467 permission = relationship('Permission')
3468 permission = relationship('Permission')
3468
3469
3469 @classmethod
3470 @classmethod
3470 def create(cls, target_user_group, user_group, permission):
3471 def create(cls, target_user_group, user_group, permission):
3471 n = cls()
3472 n = cls()
3472 n.target_user_group = target_user_group
3473 n.target_user_group = target_user_group
3473 n.user_group = user_group
3474 n.user_group = user_group
3474 n.permission = permission
3475 n.permission = permission
3475 Session().add(n)
3476 Session().add(n)
3476 return n
3477 return n
3477
3478
3478 def __unicode__(self):
3479 def __unicode__(self):
3479 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
3480 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
3480
3481
3481
3482
3482 class UserGroupToPerm(Base, BaseModel):
3483 class UserGroupToPerm(Base, BaseModel):
3483 __tablename__ = 'users_group_to_perm'
3484 __tablename__ = 'users_group_to_perm'
3484 __table_args__ = (
3485 __table_args__ = (
3485 UniqueConstraint('users_group_id', 'permission_id',),
3486 UniqueConstraint('users_group_id', 'permission_id',),
3486 base_table_args
3487 base_table_args
3487 )
3488 )
3488
3489
3489 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3490 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3490 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3491 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3491 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3492 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3492
3493
3493 users_group = relationship('UserGroup')
3494 users_group = relationship('UserGroup')
3494 permission = relationship('Permission')
3495 permission = relationship('Permission')
3495
3496
3496
3497
3497 class UserRepoGroupToPerm(Base, BaseModel):
3498 class UserRepoGroupToPerm(Base, BaseModel):
3498 __tablename__ = 'user_repo_group_to_perm'
3499 __tablename__ = 'user_repo_group_to_perm'
3499 __table_args__ = (
3500 __table_args__ = (
3500 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3501 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3501 base_table_args
3502 base_table_args
3502 )
3503 )
3503
3504
3504 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3505 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3505 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3506 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3506 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3507 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3507 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3508 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3508
3509
3509 user = relationship('User')
3510 user = relationship('User')
3510 group = relationship('RepoGroup')
3511 group = relationship('RepoGroup')
3511 permission = relationship('Permission')
3512 permission = relationship('Permission')
3512
3513
3513 @classmethod
3514 @classmethod
3514 def create(cls, user, repository_group, permission):
3515 def create(cls, user, repository_group, permission):
3515 n = cls()
3516 n = cls()
3516 n.user = user
3517 n.user = user
3517 n.group = repository_group
3518 n.group = repository_group
3518 n.permission = permission
3519 n.permission = permission
3519 Session().add(n)
3520 Session().add(n)
3520 return n
3521 return n
3521
3522
3522
3523
3523 class UserGroupRepoGroupToPerm(Base, BaseModel):
3524 class UserGroupRepoGroupToPerm(Base, BaseModel):
3524 __tablename__ = 'users_group_repo_group_to_perm'
3525 __tablename__ = 'users_group_repo_group_to_perm'
3525 __table_args__ = (
3526 __table_args__ = (
3526 UniqueConstraint('users_group_id', 'group_id'),
3527 UniqueConstraint('users_group_id', 'group_id'),
3527 base_table_args
3528 base_table_args
3528 )
3529 )
3529
3530
3530 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3531 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3531 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3532 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3532 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3533 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3533 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3534 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3534
3535
3535 users_group = relationship('UserGroup')
3536 users_group = relationship('UserGroup')
3536 permission = relationship('Permission')
3537 permission = relationship('Permission')
3537 group = relationship('RepoGroup')
3538 group = relationship('RepoGroup')
3538
3539
3539 @classmethod
3540 @classmethod
3540 def create(cls, user_group, repository_group, permission):
3541 def create(cls, user_group, repository_group, permission):
3541 n = cls()
3542 n = cls()
3542 n.users_group = user_group
3543 n.users_group = user_group
3543 n.group = repository_group
3544 n.group = repository_group
3544 n.permission = permission
3545 n.permission = permission
3545 Session().add(n)
3546 Session().add(n)
3546 return n
3547 return n
3547
3548
3548 def __unicode__(self):
3549 def __unicode__(self):
3549 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3550 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3550
3551
3551
3552
3552 class Statistics(Base, BaseModel):
3553 class Statistics(Base, BaseModel):
3553 __tablename__ = 'statistics'
3554 __tablename__ = 'statistics'
3554 __table_args__ = (
3555 __table_args__ = (
3555 base_table_args
3556 base_table_args
3556 )
3557 )
3557
3558
3558 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3559 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3559 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3560 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3560 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3561 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3561 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
3562 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
3562 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
3563 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
3563 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
3564 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
3564
3565
3565 repository = relationship('Repository', single_parent=True)
3566 repository = relationship('Repository', single_parent=True)
3566
3567
3567
3568
3568 class UserFollowing(Base, BaseModel):
3569 class UserFollowing(Base, BaseModel):
3569 __tablename__ = 'user_followings'
3570 __tablename__ = 'user_followings'
3570 __table_args__ = (
3571 __table_args__ = (
3571 UniqueConstraint('user_id', 'follows_repository_id'),
3572 UniqueConstraint('user_id', 'follows_repository_id'),
3572 UniqueConstraint('user_id', 'follows_user_id'),
3573 UniqueConstraint('user_id', 'follows_user_id'),
3573 base_table_args
3574 base_table_args
3574 )
3575 )
3575
3576
3576 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3577 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3577 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3578 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3578 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3579 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3579 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3580 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3580 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3581 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3581
3582
3582 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
3583 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
3583
3584
3584 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3585 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3585 follows_repository = relationship('Repository', order_by='Repository.repo_name')
3586 follows_repository = relationship('Repository', order_by='Repository.repo_name')
3586
3587
3587 @classmethod
3588 @classmethod
3588 def get_repo_followers(cls, repo_id):
3589 def get_repo_followers(cls, repo_id):
3589 return cls.query().filter(cls.follows_repo_id == repo_id)
3590 return cls.query().filter(cls.follows_repo_id == repo_id)
3590
3591
3591
3592
3592 class CacheKey(Base, BaseModel):
3593 class CacheKey(Base, BaseModel):
3593 __tablename__ = 'cache_invalidation'
3594 __tablename__ = 'cache_invalidation'
3594 __table_args__ = (
3595 __table_args__ = (
3595 UniqueConstraint('cache_key'),
3596 UniqueConstraint('cache_key'),
3596 Index('key_idx', 'cache_key'),
3597 Index('key_idx', 'cache_key'),
3597 base_table_args,
3598 base_table_args,
3598 )
3599 )
3599
3600
3600 CACHE_TYPE_FEED = 'FEED'
3601 CACHE_TYPE_FEED = 'FEED'
3601
3602
3602 # namespaces used to register process/thread aware caches
3603 # namespaces used to register process/thread aware caches
3603 REPO_INVALIDATION_NAMESPACE = 'repo_cache:{repo_id}'
3604 REPO_INVALIDATION_NAMESPACE = 'repo_cache:{repo_id}'
3604 SETTINGS_INVALIDATION_NAMESPACE = 'system_settings'
3605 SETTINGS_INVALIDATION_NAMESPACE = 'system_settings'
3605
3606
3606 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3607 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3607 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3608 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3608 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3609 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3609 cache_state_uid = Column("cache_state_uid", String(255), nullable=True, unique=None, default=None)
3610 cache_state_uid = Column("cache_state_uid", String(255), nullable=True, unique=None, default=None)
3610 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3611 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3611
3612
3612 def __init__(self, cache_key, cache_args='', cache_state_uid=None):
3613 def __init__(self, cache_key, cache_args='', cache_state_uid=None):
3613 self.cache_key = cache_key
3614 self.cache_key = cache_key
3614 self.cache_args = cache_args
3615 self.cache_args = cache_args
3615 self.cache_active = False
3616 self.cache_active = False
3616 # first key should be same for all entries, since all workers should share it
3617 # first key should be same for all entries, since all workers should share it
3617 self.cache_state_uid = cache_state_uid or self.generate_new_state_uid()
3618 self.cache_state_uid = cache_state_uid or self.generate_new_state_uid()
3618
3619
3619 def __unicode__(self):
3620 def __unicode__(self):
3620 return u"<%s('%s:%s[%s]')>" % (
3621 return u"<%s('%s:%s[%s]')>" % (
3621 self.__class__.__name__,
3622 self.__class__.__name__,
3622 self.cache_id, self.cache_key, self.cache_active)
3623 self.cache_id, self.cache_key, self.cache_active)
3623
3624
3624 def _cache_key_partition(self):
3625 def _cache_key_partition(self):
3625 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3626 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3626 return prefix, repo_name, suffix
3627 return prefix, repo_name, suffix
3627
3628
3628 def get_prefix(self):
3629 def get_prefix(self):
3629 """
3630 """
3630 Try to extract prefix from existing cache key. The key could consist
3631 Try to extract prefix from existing cache key. The key could consist
3631 of prefix, repo_name, suffix
3632 of prefix, repo_name, suffix
3632 """
3633 """
3633 # this returns prefix, repo_name, suffix
3634 # this returns prefix, repo_name, suffix
3634 return self._cache_key_partition()[0]
3635 return self._cache_key_partition()[0]
3635
3636
3636 def get_suffix(self):
3637 def get_suffix(self):
3637 """
3638 """
3638 get suffix that might have been used in _get_cache_key to
3639 get suffix that might have been used in _get_cache_key to
3639 generate self.cache_key. Only used for informational purposes
3640 generate self.cache_key. Only used for informational purposes
3640 in repo_edit.mako.
3641 in repo_edit.mako.
3641 """
3642 """
3642 # prefix, repo_name, suffix
3643 # prefix, repo_name, suffix
3643 return self._cache_key_partition()[2]
3644 return self._cache_key_partition()[2]
3644
3645
3645 @classmethod
3646 @classmethod
3646 def generate_new_state_uid(cls, based_on=None):
3647 def generate_new_state_uid(cls, based_on=None):
3647 if based_on:
3648 if based_on:
3648 return str(uuid.uuid5(uuid.NAMESPACE_URL, safe_str(based_on)))
3649 return str(uuid.uuid5(uuid.NAMESPACE_URL, safe_str(based_on)))
3649 else:
3650 else:
3650 return str(uuid.uuid4())
3651 return str(uuid.uuid4())
3651
3652
3652 @classmethod
3653 @classmethod
3653 def delete_all_cache(cls):
3654 def delete_all_cache(cls):
3654 """
3655 """
3655 Delete all cache keys from database.
3656 Delete all cache keys from database.
3656 Should only be run when all instances are down and all entries
3657 Should only be run when all instances are down and all entries
3657 thus stale.
3658 thus stale.
3658 """
3659 """
3659 cls.query().delete()
3660 cls.query().delete()
3660 Session().commit()
3661 Session().commit()
3661
3662
3662 @classmethod
3663 @classmethod
3663 def set_invalidate(cls, cache_uid, delete=False):
3664 def set_invalidate(cls, cache_uid, delete=False):
3664 """
3665 """
3665 Mark all caches of a repo as invalid in the database.
3666 Mark all caches of a repo as invalid in the database.
3666 """
3667 """
3667
3668
3668 try:
3669 try:
3669 qry = Session().query(cls).filter(cls.cache_args == cache_uid)
3670 qry = Session().query(cls).filter(cls.cache_args == cache_uid)
3670 if delete:
3671 if delete:
3671 qry.delete()
3672 qry.delete()
3672 log.debug('cache objects deleted for cache args %s',
3673 log.debug('cache objects deleted for cache args %s',
3673 safe_str(cache_uid))
3674 safe_str(cache_uid))
3674 else:
3675 else:
3675 qry.update({"cache_active": False,
3676 qry.update({"cache_active": False,
3676 "cache_state_uid": cls.generate_new_state_uid()})
3677 "cache_state_uid": cls.generate_new_state_uid()})
3677 log.debug('cache objects marked as invalid for cache args %s',
3678 log.debug('cache objects marked as invalid for cache args %s',
3678 safe_str(cache_uid))
3679 safe_str(cache_uid))
3679
3680
3680 Session().commit()
3681 Session().commit()
3681 except Exception:
3682 except Exception:
3682 log.exception(
3683 log.exception(
3683 'Cache key invalidation failed for cache args %s',
3684 'Cache key invalidation failed for cache args %s',
3684 safe_str(cache_uid))
3685 safe_str(cache_uid))
3685 Session().rollback()
3686 Session().rollback()
3686
3687
3687 @classmethod
3688 @classmethod
3688 def get_active_cache(cls, cache_key):
3689 def get_active_cache(cls, cache_key):
3689 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3690 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3690 if inv_obj:
3691 if inv_obj:
3691 return inv_obj
3692 return inv_obj
3692 return None
3693 return None
3693
3694
3694 @classmethod
3695 @classmethod
3695 def get_namespace_map(cls, namespace):
3696 def get_namespace_map(cls, namespace):
3696 return {
3697 return {
3697 x.cache_key: x
3698 x.cache_key: x
3698 for x in cls.query().filter(cls.cache_args == namespace)}
3699 for x in cls.query().filter(cls.cache_args == namespace)}
3699
3700
3700
3701
3701 class ChangesetComment(Base, BaseModel):
3702 class ChangesetComment(Base, BaseModel):
3702 __tablename__ = 'changeset_comments'
3703 __tablename__ = 'changeset_comments'
3703 __table_args__ = (
3704 __table_args__ = (
3704 Index('cc_revision_idx', 'revision'),
3705 Index('cc_revision_idx', 'revision'),
3705 base_table_args,
3706 base_table_args,
3706 )
3707 )
3707
3708
3708 COMMENT_OUTDATED = u'comment_outdated'
3709 COMMENT_OUTDATED = u'comment_outdated'
3709 COMMENT_TYPE_NOTE = u'note'
3710 COMMENT_TYPE_NOTE = u'note'
3710 COMMENT_TYPE_TODO = u'todo'
3711 COMMENT_TYPE_TODO = u'todo'
3711 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3712 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3712
3713
3713 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3714 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3714 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3715 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3715 revision = Column('revision', String(40), nullable=True)
3716 revision = Column('revision', String(40), nullable=True)
3716 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3717 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3717 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3718 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3718 line_no = Column('line_no', Unicode(10), nullable=True)
3719 line_no = Column('line_no', Unicode(10), nullable=True)
3719 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3720 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3720 f_path = Column('f_path', Unicode(1000), nullable=True)
3721 f_path = Column('f_path', Unicode(1000), nullable=True)
3721 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3722 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3722 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3723 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3723 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3724 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3724 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3725 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3725 renderer = Column('renderer', Unicode(64), nullable=True)
3726 renderer = Column('renderer', Unicode(64), nullable=True)
3726 display_state = Column('display_state', Unicode(128), nullable=True)
3727 display_state = Column('display_state', Unicode(128), nullable=True)
3727
3728
3728 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3729 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3729 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3730 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3730
3731
3731 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, back_populates='resolved_by')
3732 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, back_populates='resolved_by')
3732 resolved_by = relationship('ChangesetComment', back_populates='resolved_comment')
3733 resolved_by = relationship('ChangesetComment', back_populates='resolved_comment')
3733
3734
3734 author = relationship('User', lazy='joined')
3735 author = relationship('User', lazy='joined')
3735 repo = relationship('Repository')
3736 repo = relationship('Repository')
3736 status_change = relationship('ChangesetStatus', cascade="all, delete-orphan", lazy='joined')
3737 status_change = relationship('ChangesetStatus', cascade="all, delete-orphan", lazy='joined')
3737 pull_request = relationship('PullRequest', lazy='joined')
3738 pull_request = relationship('PullRequest', lazy='joined')
3738 pull_request_version = relationship('PullRequestVersion')
3739 pull_request_version = relationship('PullRequestVersion')
3739
3740
3740 @classmethod
3741 @classmethod
3741 def get_users(cls, revision=None, pull_request_id=None):
3742 def get_users(cls, revision=None, pull_request_id=None):
3742 """
3743 """
3743 Returns user associated with this ChangesetComment. ie those
3744 Returns user associated with this ChangesetComment. ie those
3744 who actually commented
3745 who actually commented
3745
3746
3746 :param cls:
3747 :param cls:
3747 :param revision:
3748 :param revision:
3748 """
3749 """
3749 q = Session().query(User)\
3750 q = Session().query(User)\
3750 .join(ChangesetComment.author)
3751 .join(ChangesetComment.author)
3751 if revision:
3752 if revision:
3752 q = q.filter(cls.revision == revision)
3753 q = q.filter(cls.revision == revision)
3753 elif pull_request_id:
3754 elif pull_request_id:
3754 q = q.filter(cls.pull_request_id == pull_request_id)
3755 q = q.filter(cls.pull_request_id == pull_request_id)
3755 return q.all()
3756 return q.all()
3756
3757
3757 @classmethod
3758 @classmethod
3758 def get_index_from_version(cls, pr_version, versions):
3759 def get_index_from_version(cls, pr_version, versions):
3759 num_versions = [x.pull_request_version_id for x in versions]
3760 num_versions = [x.pull_request_version_id for x in versions]
3760 try:
3761 try:
3761 return num_versions.index(pr_version) +1
3762 return num_versions.index(pr_version) +1
3762 except (IndexError, ValueError):
3763 except (IndexError, ValueError):
3763 return
3764 return
3764
3765
3765 @property
3766 @property
3766 def outdated(self):
3767 def outdated(self):
3767 return self.display_state == self.COMMENT_OUTDATED
3768 return self.display_state == self.COMMENT_OUTDATED
3768
3769
3769 def outdated_at_version(self, version):
3770 def outdated_at_version(self, version):
3770 """
3771 """
3771 Checks if comment is outdated for given pull request version
3772 Checks if comment is outdated for given pull request version
3772 """
3773 """
3773 return self.outdated and self.pull_request_version_id != version
3774 return self.outdated and self.pull_request_version_id != version
3774
3775
3775 def older_than_version(self, version):
3776 def older_than_version(self, version):
3776 """
3777 """
3777 Checks if comment is made from previous version than given
3778 Checks if comment is made from previous version than given
3778 """
3779 """
3779 if version is None:
3780 if version is None:
3780 return self.pull_request_version_id is not None
3781 return self.pull_request_version_id is not None
3781
3782
3782 return self.pull_request_version_id < version
3783 return self.pull_request_version_id < version
3783
3784
3784 @property
3785 @property
3785 def resolved(self):
3786 def resolved(self):
3786 return self.resolved_by[0] if self.resolved_by else None
3787 return self.resolved_by[0] if self.resolved_by else None
3787
3788
3788 @property
3789 @property
3789 def is_todo(self):
3790 def is_todo(self):
3790 return self.comment_type == self.COMMENT_TYPE_TODO
3791 return self.comment_type == self.COMMENT_TYPE_TODO
3791
3792
3792 @property
3793 @property
3793 def is_inline(self):
3794 def is_inline(self):
3794 return self.line_no and self.f_path
3795 return self.line_no and self.f_path
3795
3796
3796 def get_index_version(self, versions):
3797 def get_index_version(self, versions):
3797 return self.get_index_from_version(
3798 return self.get_index_from_version(
3798 self.pull_request_version_id, versions)
3799 self.pull_request_version_id, versions)
3799
3800
3800 def __repr__(self):
3801 def __repr__(self):
3801 if self.comment_id:
3802 if self.comment_id:
3802 return '<DB:Comment #%s>' % self.comment_id
3803 return '<DB:Comment #%s>' % self.comment_id
3803 else:
3804 else:
3804 return '<DB:Comment at %#x>' % id(self)
3805 return '<DB:Comment at %#x>' % id(self)
3805
3806
3806 def get_api_data(self):
3807 def get_api_data(self):
3807 comment = self
3808 comment = self
3808 data = {
3809 data = {
3809 'comment_id': comment.comment_id,
3810 'comment_id': comment.comment_id,
3810 'comment_type': comment.comment_type,
3811 'comment_type': comment.comment_type,
3811 'comment_text': comment.text,
3812 'comment_text': comment.text,
3812 'comment_status': comment.status_change,
3813 'comment_status': comment.status_change,
3813 'comment_f_path': comment.f_path,
3814 'comment_f_path': comment.f_path,
3814 'comment_lineno': comment.line_no,
3815 'comment_lineno': comment.line_no,
3815 'comment_author': comment.author,
3816 'comment_author': comment.author,
3816 'comment_created_on': comment.created_on,
3817 'comment_created_on': comment.created_on,
3817 'comment_resolved_by': self.resolved
3818 'comment_resolved_by': self.resolved
3818 }
3819 }
3819 return data
3820 return data
3820
3821
3821 def __json__(self):
3822 def __json__(self):
3822 data = dict()
3823 data = dict()
3823 data.update(self.get_api_data())
3824 data.update(self.get_api_data())
3824 return data
3825 return data
3825
3826
3826
3827
3827 class ChangesetStatus(Base, BaseModel):
3828 class ChangesetStatus(Base, BaseModel):
3828 __tablename__ = 'changeset_statuses'
3829 __tablename__ = 'changeset_statuses'
3829 __table_args__ = (
3830 __table_args__ = (
3830 Index('cs_revision_idx', 'revision'),
3831 Index('cs_revision_idx', 'revision'),
3831 Index('cs_version_idx', 'version'),
3832 Index('cs_version_idx', 'version'),
3832 UniqueConstraint('repo_id', 'revision', 'version'),
3833 UniqueConstraint('repo_id', 'revision', 'version'),
3833 base_table_args
3834 base_table_args
3834 )
3835 )
3835
3836
3836 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3837 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3837 STATUS_APPROVED = 'approved'
3838 STATUS_APPROVED = 'approved'
3838 STATUS_REJECTED = 'rejected'
3839 STATUS_REJECTED = 'rejected'
3839 STATUS_UNDER_REVIEW = 'under_review'
3840 STATUS_UNDER_REVIEW = 'under_review'
3840
3841
3841 STATUSES = [
3842 STATUSES = [
3842 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3843 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3843 (STATUS_APPROVED, _("Approved")),
3844 (STATUS_APPROVED, _("Approved")),
3844 (STATUS_REJECTED, _("Rejected")),
3845 (STATUS_REJECTED, _("Rejected")),
3845 (STATUS_UNDER_REVIEW, _("Under Review")),
3846 (STATUS_UNDER_REVIEW, _("Under Review")),
3846 ]
3847 ]
3847
3848
3848 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
3849 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
3849 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3850 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3850 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
3851 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
3851 revision = Column('revision', String(40), nullable=False)
3852 revision = Column('revision', String(40), nullable=False)
3852 status = Column('status', String(128), nullable=False, default=DEFAULT)
3853 status = Column('status', String(128), nullable=False, default=DEFAULT)
3853 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
3854 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
3854 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
3855 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
3855 version = Column('version', Integer(), nullable=False, default=0)
3856 version = Column('version', Integer(), nullable=False, default=0)
3856 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3857 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3857
3858
3858 author = relationship('User', lazy='joined')
3859 author = relationship('User', lazy='joined')
3859 repo = relationship('Repository')
3860 repo = relationship('Repository')
3860 comment = relationship('ChangesetComment', lazy='joined')
3861 comment = relationship('ChangesetComment', lazy='joined')
3861 pull_request = relationship('PullRequest', lazy='joined')
3862 pull_request = relationship('PullRequest', lazy='joined')
3862
3863
3863 def __unicode__(self):
3864 def __unicode__(self):
3864 return u"<%s('%s[v%s]:%s')>" % (
3865 return u"<%s('%s[v%s]:%s')>" % (
3865 self.__class__.__name__,
3866 self.__class__.__name__,
3866 self.status, self.version, self.author
3867 self.status, self.version, self.author
3867 )
3868 )
3868
3869
3869 @classmethod
3870 @classmethod
3870 def get_status_lbl(cls, value):
3871 def get_status_lbl(cls, value):
3871 return dict(cls.STATUSES).get(value)
3872 return dict(cls.STATUSES).get(value)
3872
3873
3873 @property
3874 @property
3874 def status_lbl(self):
3875 def status_lbl(self):
3875 return ChangesetStatus.get_status_lbl(self.status)
3876 return ChangesetStatus.get_status_lbl(self.status)
3876
3877
3877 def get_api_data(self):
3878 def get_api_data(self):
3878 status = self
3879 status = self
3879 data = {
3880 data = {
3880 'status_id': status.changeset_status_id,
3881 'status_id': status.changeset_status_id,
3881 'status': status.status,
3882 'status': status.status,
3882 }
3883 }
3883 return data
3884 return data
3884
3885
3885 def __json__(self):
3886 def __json__(self):
3886 data = dict()
3887 data = dict()
3887 data.update(self.get_api_data())
3888 data.update(self.get_api_data())
3888 return data
3889 return data
3889
3890
3890
3891
3891 class _SetState(object):
3892 class _SetState(object):
3892 """
3893 """
3893 Context processor allowing changing state for sensitive operation such as
3894 Context processor allowing changing state for sensitive operation such as
3894 pull request update or merge
3895 pull request update or merge
3895 """
3896 """
3896
3897
3897 def __init__(self, pull_request, pr_state, back_state=None):
3898 def __init__(self, pull_request, pr_state, back_state=None):
3898 self._pr = pull_request
3899 self._pr = pull_request
3899 self._org_state = back_state or pull_request.pull_request_state
3900 self._org_state = back_state or pull_request.pull_request_state
3900 self._pr_state = pr_state
3901 self._pr_state = pr_state
3901 self._current_state = None
3902 self._current_state = None
3902
3903
3903 def __enter__(self):
3904 def __enter__(self):
3904 log.debug('StateLock: entering set state context of pr %s, setting state to: `%s`',
3905 log.debug('StateLock: entering set state context of pr %s, setting state to: `%s`',
3905 self._pr, self._pr_state)
3906 self._pr, self._pr_state)
3906 self.set_pr_state(self._pr_state)
3907 self.set_pr_state(self._pr_state)
3907 return self
3908 return self
3908
3909
3909 def __exit__(self, exc_type, exc_val, exc_tb):
3910 def __exit__(self, exc_type, exc_val, exc_tb):
3910 if exc_val is not None:
3911 if exc_val is not None:
3911 log.error(traceback.format_exc(exc_tb))
3912 log.error(traceback.format_exc(exc_tb))
3912 return None
3913 return None
3913
3914
3914 self.set_pr_state(self._org_state)
3915 self.set_pr_state(self._org_state)
3915 log.debug('StateLock: exiting set state context of pr %s, setting state to: `%s`',
3916 log.debug('StateLock: exiting set state context of pr %s, setting state to: `%s`',
3916 self._pr, self._org_state)
3917 self._pr, self._org_state)
3917
3918
3918 @property
3919 @property
3919 def state(self):
3920 def state(self):
3920 return self._current_state
3921 return self._current_state
3921
3922
3922 def set_pr_state(self, pr_state):
3923 def set_pr_state(self, pr_state):
3923 try:
3924 try:
3924 self._pr.pull_request_state = pr_state
3925 self._pr.pull_request_state = pr_state
3925 Session().add(self._pr)
3926 Session().add(self._pr)
3926 Session().commit()
3927 Session().commit()
3927 self._current_state = pr_state
3928 self._current_state = pr_state
3928 except Exception:
3929 except Exception:
3929 log.exception('Failed to set PullRequest %s state to %s', self._pr, pr_state)
3930 log.exception('Failed to set PullRequest %s state to %s', self._pr, pr_state)
3930 raise
3931 raise
3931
3932
3932
3933
3933 class _PullRequestBase(BaseModel):
3934 class _PullRequestBase(BaseModel):
3934 """
3935 """
3935 Common attributes of pull request and version entries.
3936 Common attributes of pull request and version entries.
3936 """
3937 """
3937
3938
3938 # .status values
3939 # .status values
3939 STATUS_NEW = u'new'
3940 STATUS_NEW = u'new'
3940 STATUS_OPEN = u'open'
3941 STATUS_OPEN = u'open'
3941 STATUS_CLOSED = u'closed'
3942 STATUS_CLOSED = u'closed'
3942
3943
3943 # available states
3944 # available states
3944 STATE_CREATING = u'creating'
3945 STATE_CREATING = u'creating'
3945 STATE_UPDATING = u'updating'
3946 STATE_UPDATING = u'updating'
3946 STATE_MERGING = u'merging'
3947 STATE_MERGING = u'merging'
3947 STATE_CREATED = u'created'
3948 STATE_CREATED = u'created'
3948
3949
3949 title = Column('title', Unicode(255), nullable=True)
3950 title = Column('title', Unicode(255), nullable=True)
3950 description = Column(
3951 description = Column(
3951 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
3952 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
3952 nullable=True)
3953 nullable=True)
3953 description_renderer = Column('description_renderer', Unicode(64), nullable=True)
3954 description_renderer = Column('description_renderer', Unicode(64), nullable=True)
3954
3955
3955 # new/open/closed status of pull request (not approve/reject/etc)
3956 # new/open/closed status of pull request (not approve/reject/etc)
3956 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
3957 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
3957 created_on = Column(
3958 created_on = Column(
3958 'created_on', DateTime(timezone=False), nullable=False,
3959 'created_on', DateTime(timezone=False), nullable=False,
3959 default=datetime.datetime.now)
3960 default=datetime.datetime.now)
3960 updated_on = Column(
3961 updated_on = Column(
3961 'updated_on', DateTime(timezone=False), nullable=False,
3962 'updated_on', DateTime(timezone=False), nullable=False,
3962 default=datetime.datetime.now)
3963 default=datetime.datetime.now)
3963
3964
3964 pull_request_state = Column("pull_request_state", String(255), nullable=True)
3965 pull_request_state = Column("pull_request_state", String(255), nullable=True)
3965
3966
3966 @declared_attr
3967 @declared_attr
3967 def user_id(cls):
3968 def user_id(cls):
3968 return Column(
3969 return Column(
3969 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
3970 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
3970 unique=None)
3971 unique=None)
3971
3972
3972 # 500 revisions max
3973 # 500 revisions max
3973 _revisions = Column(
3974 _revisions = Column(
3974 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
3975 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
3975
3976
3976 @declared_attr
3977 @declared_attr
3977 def source_repo_id(cls):
3978 def source_repo_id(cls):
3978 # TODO: dan: rename column to source_repo_id
3979 # TODO: dan: rename column to source_repo_id
3979 return Column(
3980 return Column(
3980 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3981 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3981 nullable=False)
3982 nullable=False)
3982
3983
3983 _source_ref = Column('org_ref', Unicode(255), nullable=False)
3984 _source_ref = Column('org_ref', Unicode(255), nullable=False)
3984
3985
3985 @hybrid_property
3986 @hybrid_property
3986 def source_ref(self):
3987 def source_ref(self):
3987 return self._source_ref
3988 return self._source_ref
3988
3989
3989 @source_ref.setter
3990 @source_ref.setter
3990 def source_ref(self, val):
3991 def source_ref(self, val):
3991 parts = (val or '').split(':')
3992 parts = (val or '').split(':')
3992 if len(parts) != 3:
3993 if len(parts) != 3:
3993 raise ValueError(
3994 raise ValueError(
3994 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
3995 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
3995 self._source_ref = safe_unicode(val)
3996 self._source_ref = safe_unicode(val)
3996
3997
3997 _target_ref = Column('other_ref', Unicode(255), nullable=False)
3998 _target_ref = Column('other_ref', Unicode(255), nullable=False)
3998
3999
3999 @hybrid_property
4000 @hybrid_property
4000 def target_ref(self):
4001 def target_ref(self):
4001 return self._target_ref
4002 return self._target_ref
4002
4003
4003 @target_ref.setter
4004 @target_ref.setter
4004 def target_ref(self, val):
4005 def target_ref(self, val):
4005 parts = (val or '').split(':')
4006 parts = (val or '').split(':')
4006 if len(parts) != 3:
4007 if len(parts) != 3:
4007 raise ValueError(
4008 raise ValueError(
4008 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
4009 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
4009 self._target_ref = safe_unicode(val)
4010 self._target_ref = safe_unicode(val)
4010
4011
4011 @declared_attr
4012 @declared_attr
4012 def target_repo_id(cls):
4013 def target_repo_id(cls):
4013 # TODO: dan: rename column to target_repo_id
4014 # TODO: dan: rename column to target_repo_id
4014 return Column(
4015 return Column(
4015 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4016 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4016 nullable=False)
4017 nullable=False)
4017
4018
4018 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
4019 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
4019
4020
4020 # TODO: dan: rename column to last_merge_source_rev
4021 # TODO: dan: rename column to last_merge_source_rev
4021 _last_merge_source_rev = Column(
4022 _last_merge_source_rev = Column(
4022 'last_merge_org_rev', String(40), nullable=True)
4023 'last_merge_org_rev', String(40), nullable=True)
4023 # TODO: dan: rename column to last_merge_target_rev
4024 # TODO: dan: rename column to last_merge_target_rev
4024 _last_merge_target_rev = Column(
4025 _last_merge_target_rev = Column(
4025 'last_merge_other_rev', String(40), nullable=True)
4026 'last_merge_other_rev', String(40), nullable=True)
4026 _last_merge_status = Column('merge_status', Integer(), nullable=True)
4027 _last_merge_status = Column('merge_status', Integer(), nullable=True)
4028 last_merge_metadata = Column(
4029 'last_merge_metadata', MutationObj.as_mutable(
4030 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4031
4027 merge_rev = Column('merge_rev', String(40), nullable=True)
4032 merge_rev = Column('merge_rev', String(40), nullable=True)
4028
4033
4029 reviewer_data = Column(
4034 reviewer_data = Column(
4030 'reviewer_data_json', MutationObj.as_mutable(
4035 'reviewer_data_json', MutationObj.as_mutable(
4031 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4036 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4032
4037
4033 @property
4038 @property
4034 def reviewer_data_json(self):
4039 def reviewer_data_json(self):
4035 return json.dumps(self.reviewer_data)
4040 return json.dumps(self.reviewer_data)
4036
4041
4037 @property
4042 @property
4038 def work_in_progress(self):
4043 def work_in_progress(self):
4039 """checks if pull request is work in progress by checking the title"""
4044 """checks if pull request is work in progress by checking the title"""
4040 title = self.title.upper()
4045 title = self.title.upper()
4041 if re.match(r'^(\[WIP\]\s*|WIP:\s*|WIP\s+)', title):
4046 if re.match(r'^(\[WIP\]\s*|WIP:\s*|WIP\s+)', title):
4042 return True
4047 return True
4043 return False
4048 return False
4044
4049
4045 @hybrid_property
4050 @hybrid_property
4046 def description_safe(self):
4051 def description_safe(self):
4047 from rhodecode.lib import helpers as h
4052 from rhodecode.lib import helpers as h
4048 return h.escape(self.description)
4053 return h.escape(self.description)
4049
4054
4050 @hybrid_property
4055 @hybrid_property
4051 def revisions(self):
4056 def revisions(self):
4052 return self._revisions.split(':') if self._revisions else []
4057 return self._revisions.split(':') if self._revisions else []
4053
4058
4054 @revisions.setter
4059 @revisions.setter
4055 def revisions(self, val):
4060 def revisions(self, val):
4056 self._revisions = u':'.join(val)
4061 self._revisions = u':'.join(val)
4057
4062
4058 @hybrid_property
4063 @hybrid_property
4059 def last_merge_status(self):
4064 def last_merge_status(self):
4060 return safe_int(self._last_merge_status)
4065 return safe_int(self._last_merge_status)
4061
4066
4062 @last_merge_status.setter
4067 @last_merge_status.setter
4063 def last_merge_status(self, val):
4068 def last_merge_status(self, val):
4064 self._last_merge_status = val
4069 self._last_merge_status = val
4065
4070
4066 @declared_attr
4071 @declared_attr
4067 def author(cls):
4072 def author(cls):
4068 return relationship('User', lazy='joined')
4073 return relationship('User', lazy='joined')
4069
4074
4070 @declared_attr
4075 @declared_attr
4071 def source_repo(cls):
4076 def source_repo(cls):
4072 return relationship(
4077 return relationship(
4073 'Repository',
4078 'Repository',
4074 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
4079 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
4075
4080
4076 @property
4081 @property
4077 def source_ref_parts(self):
4082 def source_ref_parts(self):
4078 return self.unicode_to_reference(self.source_ref)
4083 return self.unicode_to_reference(self.source_ref)
4079
4084
4080 @declared_attr
4085 @declared_attr
4081 def target_repo(cls):
4086 def target_repo(cls):
4082 return relationship(
4087 return relationship(
4083 'Repository',
4088 'Repository',
4084 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
4089 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
4085
4090
4086 @property
4091 @property
4087 def target_ref_parts(self):
4092 def target_ref_parts(self):
4088 return self.unicode_to_reference(self.target_ref)
4093 return self.unicode_to_reference(self.target_ref)
4089
4094
4090 @property
4095 @property
4091 def shadow_merge_ref(self):
4096 def shadow_merge_ref(self):
4092 return self.unicode_to_reference(self._shadow_merge_ref)
4097 return self.unicode_to_reference(self._shadow_merge_ref)
4093
4098
4094 @shadow_merge_ref.setter
4099 @shadow_merge_ref.setter
4095 def shadow_merge_ref(self, ref):
4100 def shadow_merge_ref(self, ref):
4096 self._shadow_merge_ref = self.reference_to_unicode(ref)
4101 self._shadow_merge_ref = self.reference_to_unicode(ref)
4097
4102
4098 @staticmethod
4103 @staticmethod
4099 def unicode_to_reference(raw):
4104 def unicode_to_reference(raw):
4100 """
4105 """
4101 Convert a unicode (or string) to a reference object.
4106 Convert a unicode (or string) to a reference object.
4102 If unicode evaluates to False it returns None.
4107 If unicode evaluates to False it returns None.
4103 """
4108 """
4104 if raw:
4109 if raw:
4105 refs = raw.split(':')
4110 refs = raw.split(':')
4106 return Reference(*refs)
4111 return Reference(*refs)
4107 else:
4112 else:
4108 return None
4113 return None
4109
4114
4110 @staticmethod
4115 @staticmethod
4111 def reference_to_unicode(ref):
4116 def reference_to_unicode(ref):
4112 """
4117 """
4113 Convert a reference object to unicode.
4118 Convert a reference object to unicode.
4114 If reference is None it returns None.
4119 If reference is None it returns None.
4115 """
4120 """
4116 if ref:
4121 if ref:
4117 return u':'.join(ref)
4122 return u':'.join(ref)
4118 else:
4123 else:
4119 return None
4124 return None
4120
4125
4121 def get_api_data(self, with_merge_state=True):
4126 def get_api_data(self, with_merge_state=True):
4122 from rhodecode.model.pull_request import PullRequestModel
4127 from rhodecode.model.pull_request import PullRequestModel
4123
4128
4124 pull_request = self
4129 pull_request = self
4125 if with_merge_state:
4130 if with_merge_state:
4126 merge_status = PullRequestModel().merge_status(pull_request)
4131 merge_response, merge_status, msg = \
4132 PullRequestModel().merge_status(pull_request)
4127 merge_state = {
4133 merge_state = {
4128 'status': merge_status[0],
4134 'status': merge_status,
4129 'message': safe_unicode(merge_status[1]),
4135 'message': safe_unicode(msg),
4130 }
4136 }
4131 else:
4137 else:
4132 merge_state = {'status': 'not_available',
4138 merge_state = {'status': 'not_available',
4133 'message': 'not_available'}
4139 'message': 'not_available'}
4134
4140
4135 merge_data = {
4141 merge_data = {
4136 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
4142 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
4137 'reference': (
4143 'reference': (
4138 pull_request.shadow_merge_ref._asdict()
4144 pull_request.shadow_merge_ref._asdict()
4139 if pull_request.shadow_merge_ref else None),
4145 if pull_request.shadow_merge_ref else None),
4140 }
4146 }
4141
4147
4142 data = {
4148 data = {
4143 'pull_request_id': pull_request.pull_request_id,
4149 'pull_request_id': pull_request.pull_request_id,
4144 'url': PullRequestModel().get_url(pull_request),
4150 'url': PullRequestModel().get_url(pull_request),
4145 'title': pull_request.title,
4151 'title': pull_request.title,
4146 'description': pull_request.description,
4152 'description': pull_request.description,
4147 'status': pull_request.status,
4153 'status': pull_request.status,
4148 'state': pull_request.pull_request_state,
4154 'state': pull_request.pull_request_state,
4149 'created_on': pull_request.created_on,
4155 'created_on': pull_request.created_on,
4150 'updated_on': pull_request.updated_on,
4156 'updated_on': pull_request.updated_on,
4151 'commit_ids': pull_request.revisions,
4157 'commit_ids': pull_request.revisions,
4152 'review_status': pull_request.calculated_review_status(),
4158 'review_status': pull_request.calculated_review_status(),
4153 'mergeable': merge_state,
4159 'mergeable': merge_state,
4154 'source': {
4160 'source': {
4155 'clone_url': pull_request.source_repo.clone_url(),
4161 'clone_url': pull_request.source_repo.clone_url(),
4156 'repository': pull_request.source_repo.repo_name,
4162 'repository': pull_request.source_repo.repo_name,
4157 'reference': {
4163 'reference': {
4158 'name': pull_request.source_ref_parts.name,
4164 'name': pull_request.source_ref_parts.name,
4159 'type': pull_request.source_ref_parts.type,
4165 'type': pull_request.source_ref_parts.type,
4160 'commit_id': pull_request.source_ref_parts.commit_id,
4166 'commit_id': pull_request.source_ref_parts.commit_id,
4161 },
4167 },
4162 },
4168 },
4163 'target': {
4169 'target': {
4164 'clone_url': pull_request.target_repo.clone_url(),
4170 'clone_url': pull_request.target_repo.clone_url(),
4165 'repository': pull_request.target_repo.repo_name,
4171 'repository': pull_request.target_repo.repo_name,
4166 'reference': {
4172 'reference': {
4167 'name': pull_request.target_ref_parts.name,
4173 'name': pull_request.target_ref_parts.name,
4168 'type': pull_request.target_ref_parts.type,
4174 'type': pull_request.target_ref_parts.type,
4169 'commit_id': pull_request.target_ref_parts.commit_id,
4175 'commit_id': pull_request.target_ref_parts.commit_id,
4170 },
4176 },
4171 },
4177 },
4172 'merge': merge_data,
4178 'merge': merge_data,
4173 'author': pull_request.author.get_api_data(include_secrets=False,
4179 'author': pull_request.author.get_api_data(include_secrets=False,
4174 details='basic'),
4180 details='basic'),
4175 'reviewers': [
4181 'reviewers': [
4176 {
4182 {
4177 'user': reviewer.get_api_data(include_secrets=False,
4183 'user': reviewer.get_api_data(include_secrets=False,
4178 details='basic'),
4184 details='basic'),
4179 'reasons': reasons,
4185 'reasons': reasons,
4180 'review_status': st[0][1].status if st else 'not_reviewed',
4186 'review_status': st[0][1].status if st else 'not_reviewed',
4181 }
4187 }
4182 for obj, reviewer, reasons, mandatory, st in
4188 for obj, reviewer, reasons, mandatory, st in
4183 pull_request.reviewers_statuses()
4189 pull_request.reviewers_statuses()
4184 ]
4190 ]
4185 }
4191 }
4186
4192
4187 return data
4193 return data
4188
4194
4189 def set_state(self, pull_request_state, final_state=None):
4195 def set_state(self, pull_request_state, final_state=None):
4190 """
4196 """
4191 # goes from initial state to updating to initial state.
4197 # goes from initial state to updating to initial state.
4192 # initial state can be changed by specifying back_state=
4198 # initial state can be changed by specifying back_state=
4193 with pull_request_obj.set_state(PullRequest.STATE_UPDATING):
4199 with pull_request_obj.set_state(PullRequest.STATE_UPDATING):
4194 pull_request.merge()
4200 pull_request.merge()
4195
4201
4196 :param pull_request_state:
4202 :param pull_request_state:
4197 :param final_state:
4203 :param final_state:
4198
4204
4199 """
4205 """
4200
4206
4201 return _SetState(self, pull_request_state, back_state=final_state)
4207 return _SetState(self, pull_request_state, back_state=final_state)
4202
4208
4203
4209
4204 class PullRequest(Base, _PullRequestBase):
4210 class PullRequest(Base, _PullRequestBase):
4205 __tablename__ = 'pull_requests'
4211 __tablename__ = 'pull_requests'
4206 __table_args__ = (
4212 __table_args__ = (
4207 base_table_args,
4213 base_table_args,
4208 )
4214 )
4209
4215
4210 pull_request_id = Column(
4216 pull_request_id = Column(
4211 'pull_request_id', Integer(), nullable=False, primary_key=True)
4217 'pull_request_id', Integer(), nullable=False, primary_key=True)
4212
4218
4213 def __repr__(self):
4219 def __repr__(self):
4214 if self.pull_request_id:
4220 if self.pull_request_id:
4215 return '<DB:PullRequest #%s>' % self.pull_request_id
4221 return '<DB:PullRequest #%s>' % self.pull_request_id
4216 else:
4222 else:
4217 return '<DB:PullRequest at %#x>' % id(self)
4223 return '<DB:PullRequest at %#x>' % id(self)
4218
4224
4219 reviewers = relationship('PullRequestReviewers', cascade="all, delete-orphan")
4225 reviewers = relationship('PullRequestReviewers', cascade="all, delete-orphan")
4220 statuses = relationship('ChangesetStatus', cascade="all, delete-orphan")
4226 statuses = relationship('ChangesetStatus', cascade="all, delete-orphan")
4221 comments = relationship('ChangesetComment', cascade="all, delete-orphan")
4227 comments = relationship('ChangesetComment', cascade="all, delete-orphan")
4222 versions = relationship('PullRequestVersion', cascade="all, delete-orphan",
4228 versions = relationship('PullRequestVersion', cascade="all, delete-orphan",
4223 lazy='dynamic')
4229 lazy='dynamic')
4224
4230
4225 @classmethod
4231 @classmethod
4226 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
4232 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
4227 internal_methods=None):
4233 internal_methods=None):
4228
4234
4229 class PullRequestDisplay(object):
4235 class PullRequestDisplay(object):
4230 """
4236 """
4231 Special object wrapper for showing PullRequest data via Versions
4237 Special object wrapper for showing PullRequest data via Versions
4232 It mimics PR object as close as possible. This is read only object
4238 It mimics PR object as close as possible. This is read only object
4233 just for display
4239 just for display
4234 """
4240 """
4235
4241
4236 def __init__(self, attrs, internal=None):
4242 def __init__(self, attrs, internal=None):
4237 self.attrs = attrs
4243 self.attrs = attrs
4238 # internal have priority over the given ones via attrs
4244 # internal have priority over the given ones via attrs
4239 self.internal = internal or ['versions']
4245 self.internal = internal or ['versions']
4240
4246
4241 def __getattr__(self, item):
4247 def __getattr__(self, item):
4242 if item in self.internal:
4248 if item in self.internal:
4243 return getattr(self, item)
4249 return getattr(self, item)
4244 try:
4250 try:
4245 return self.attrs[item]
4251 return self.attrs[item]
4246 except KeyError:
4252 except KeyError:
4247 raise AttributeError(
4253 raise AttributeError(
4248 '%s object has no attribute %s' % (self, item))
4254 '%s object has no attribute %s' % (self, item))
4249
4255
4250 def __repr__(self):
4256 def __repr__(self):
4251 return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id')
4257 return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id')
4252
4258
4253 def versions(self):
4259 def versions(self):
4254 return pull_request_obj.versions.order_by(
4260 return pull_request_obj.versions.order_by(
4255 PullRequestVersion.pull_request_version_id).all()
4261 PullRequestVersion.pull_request_version_id).all()
4256
4262
4257 def is_closed(self):
4263 def is_closed(self):
4258 return pull_request_obj.is_closed()
4264 return pull_request_obj.is_closed()
4259
4265
4260 def is_state_changing(self):
4266 def is_state_changing(self):
4261 return pull_request_obj.is_state_changing()
4267 return pull_request_obj.is_state_changing()
4262
4268
4263 @property
4269 @property
4264 def pull_request_version_id(self):
4270 def pull_request_version_id(self):
4265 return getattr(pull_request_obj, 'pull_request_version_id', None)
4271 return getattr(pull_request_obj, 'pull_request_version_id', None)
4266
4272
4267 attrs = StrictAttributeDict(pull_request_obj.get_api_data(with_merge_state=False))
4273 attrs = StrictAttributeDict(pull_request_obj.get_api_data(with_merge_state=False))
4268
4274
4269 attrs.author = StrictAttributeDict(
4275 attrs.author = StrictAttributeDict(
4270 pull_request_obj.author.get_api_data())
4276 pull_request_obj.author.get_api_data())
4271 if pull_request_obj.target_repo:
4277 if pull_request_obj.target_repo:
4272 attrs.target_repo = StrictAttributeDict(
4278 attrs.target_repo = StrictAttributeDict(
4273 pull_request_obj.target_repo.get_api_data())
4279 pull_request_obj.target_repo.get_api_data())
4274 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
4280 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
4275
4281
4276 if pull_request_obj.source_repo:
4282 if pull_request_obj.source_repo:
4277 attrs.source_repo = StrictAttributeDict(
4283 attrs.source_repo = StrictAttributeDict(
4278 pull_request_obj.source_repo.get_api_data())
4284 pull_request_obj.source_repo.get_api_data())
4279 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
4285 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
4280
4286
4281 attrs.source_ref_parts = pull_request_obj.source_ref_parts
4287 attrs.source_ref_parts = pull_request_obj.source_ref_parts
4282 attrs.target_ref_parts = pull_request_obj.target_ref_parts
4288 attrs.target_ref_parts = pull_request_obj.target_ref_parts
4283 attrs.revisions = pull_request_obj.revisions
4289 attrs.revisions = pull_request_obj.revisions
4284
4290
4285 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
4291 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
4286 attrs.reviewer_data = org_pull_request_obj.reviewer_data
4292 attrs.reviewer_data = org_pull_request_obj.reviewer_data
4287 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
4293 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
4288
4294
4289 return PullRequestDisplay(attrs, internal=internal_methods)
4295 return PullRequestDisplay(attrs, internal=internal_methods)
4290
4296
4291 def is_closed(self):
4297 def is_closed(self):
4292 return self.status == self.STATUS_CLOSED
4298 return self.status == self.STATUS_CLOSED
4293
4299
4294 def is_state_changing(self):
4300 def is_state_changing(self):
4295 return self.pull_request_state != PullRequest.STATE_CREATED
4301 return self.pull_request_state != PullRequest.STATE_CREATED
4296
4302
4297 def __json__(self):
4303 def __json__(self):
4298 return {
4304 return {
4299 'revisions': self.revisions,
4305 'revisions': self.revisions,
4300 'versions': self.versions_count
4306 'versions': self.versions_count
4301 }
4307 }
4302
4308
4303 def calculated_review_status(self):
4309 def calculated_review_status(self):
4304 from rhodecode.model.changeset_status import ChangesetStatusModel
4310 from rhodecode.model.changeset_status import ChangesetStatusModel
4305 return ChangesetStatusModel().calculated_review_status(self)
4311 return ChangesetStatusModel().calculated_review_status(self)
4306
4312
4307 def reviewers_statuses(self):
4313 def reviewers_statuses(self):
4308 from rhodecode.model.changeset_status import ChangesetStatusModel
4314 from rhodecode.model.changeset_status import ChangesetStatusModel
4309 return ChangesetStatusModel().reviewers_statuses(self)
4315 return ChangesetStatusModel().reviewers_statuses(self)
4310
4316
4311 @property
4317 @property
4312 def workspace_id(self):
4318 def workspace_id(self):
4313 from rhodecode.model.pull_request import PullRequestModel
4319 from rhodecode.model.pull_request import PullRequestModel
4314 return PullRequestModel()._workspace_id(self)
4320 return PullRequestModel()._workspace_id(self)
4315
4321
4316 def get_shadow_repo(self):
4322 def get_shadow_repo(self):
4317 workspace_id = self.workspace_id
4323 workspace_id = self.workspace_id
4318 shadow_repository_path = self.target_repo.get_shadow_repository_path(workspace_id)
4324 shadow_repository_path = self.target_repo.get_shadow_repository_path(workspace_id)
4319 if os.path.isdir(shadow_repository_path):
4325 if os.path.isdir(shadow_repository_path):
4320 vcs_obj = self.target_repo.scm_instance()
4326 vcs_obj = self.target_repo.scm_instance()
4321 return vcs_obj.get_shadow_instance(shadow_repository_path)
4327 return vcs_obj.get_shadow_instance(shadow_repository_path)
4322
4328
4323 @property
4329 @property
4324 def versions_count(self):
4330 def versions_count(self):
4325 """
4331 """
4326 return number of versions this PR have, e.g a PR that once been
4332 return number of versions this PR have, e.g a PR that once been
4327 updated will have 2 versions
4333 updated will have 2 versions
4328 """
4334 """
4329 return self.versions.count() + 1
4335 return self.versions.count() + 1
4330
4336
4331
4337
4332 class PullRequestVersion(Base, _PullRequestBase):
4338 class PullRequestVersion(Base, _PullRequestBase):
4333 __tablename__ = 'pull_request_versions'
4339 __tablename__ = 'pull_request_versions'
4334 __table_args__ = (
4340 __table_args__ = (
4335 base_table_args,
4341 base_table_args,
4336 )
4342 )
4337
4343
4338 pull_request_version_id = Column(
4344 pull_request_version_id = Column(
4339 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
4345 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
4340 pull_request_id = Column(
4346 pull_request_id = Column(
4341 'pull_request_id', Integer(),
4347 'pull_request_id', Integer(),
4342 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4348 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4343 pull_request = relationship('PullRequest')
4349 pull_request = relationship('PullRequest')
4344
4350
4345 def __repr__(self):
4351 def __repr__(self):
4346 if self.pull_request_version_id:
4352 if self.pull_request_version_id:
4347 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
4353 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
4348 else:
4354 else:
4349 return '<DB:PullRequestVersion at %#x>' % id(self)
4355 return '<DB:PullRequestVersion at %#x>' % id(self)
4350
4356
4351 @property
4357 @property
4352 def reviewers(self):
4358 def reviewers(self):
4353 return self.pull_request.reviewers
4359 return self.pull_request.reviewers
4354
4360
4355 @property
4361 @property
4356 def versions(self):
4362 def versions(self):
4357 return self.pull_request.versions
4363 return self.pull_request.versions
4358
4364
4359 def is_closed(self):
4365 def is_closed(self):
4360 # calculate from original
4366 # calculate from original
4361 return self.pull_request.status == self.STATUS_CLOSED
4367 return self.pull_request.status == self.STATUS_CLOSED
4362
4368
4363 def is_state_changing(self):
4369 def is_state_changing(self):
4364 return self.pull_request.pull_request_state != PullRequest.STATE_CREATED
4370 return self.pull_request.pull_request_state != PullRequest.STATE_CREATED
4365
4371
4366 def calculated_review_status(self):
4372 def calculated_review_status(self):
4367 return self.pull_request.calculated_review_status()
4373 return self.pull_request.calculated_review_status()
4368
4374
4369 def reviewers_statuses(self):
4375 def reviewers_statuses(self):
4370 return self.pull_request.reviewers_statuses()
4376 return self.pull_request.reviewers_statuses()
4371
4377
4372
4378
4373 class PullRequestReviewers(Base, BaseModel):
4379 class PullRequestReviewers(Base, BaseModel):
4374 __tablename__ = 'pull_request_reviewers'
4380 __tablename__ = 'pull_request_reviewers'
4375 __table_args__ = (
4381 __table_args__ = (
4376 base_table_args,
4382 base_table_args,
4377 )
4383 )
4378
4384
4379 @hybrid_property
4385 @hybrid_property
4380 def reasons(self):
4386 def reasons(self):
4381 if not self._reasons:
4387 if not self._reasons:
4382 return []
4388 return []
4383 return self._reasons
4389 return self._reasons
4384
4390
4385 @reasons.setter
4391 @reasons.setter
4386 def reasons(self, val):
4392 def reasons(self, val):
4387 val = val or []
4393 val = val or []
4388 if any(not isinstance(x, compat.string_types) for x in val):
4394 if any(not isinstance(x, compat.string_types) for x in val):
4389 raise Exception('invalid reasons type, must be list of strings')
4395 raise Exception('invalid reasons type, must be list of strings')
4390 self._reasons = val
4396 self._reasons = val
4391
4397
4392 pull_requests_reviewers_id = Column(
4398 pull_requests_reviewers_id = Column(
4393 'pull_requests_reviewers_id', Integer(), nullable=False,
4399 'pull_requests_reviewers_id', Integer(), nullable=False,
4394 primary_key=True)
4400 primary_key=True)
4395 pull_request_id = Column(
4401 pull_request_id = Column(
4396 "pull_request_id", Integer(),
4402 "pull_request_id", Integer(),
4397 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4403 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4398 user_id = Column(
4404 user_id = Column(
4399 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
4405 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
4400 _reasons = Column(
4406 _reasons = Column(
4401 'reason', MutationList.as_mutable(
4407 'reason', MutationList.as_mutable(
4402 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
4408 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
4403
4409
4404 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4410 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4405 user = relationship('User')
4411 user = relationship('User')
4406 pull_request = relationship('PullRequest')
4412 pull_request = relationship('PullRequest')
4407
4413
4408 rule_data = Column(
4414 rule_data = Column(
4409 'rule_data_json',
4415 'rule_data_json',
4410 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
4416 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
4411
4417
4412 def rule_user_group_data(self):
4418 def rule_user_group_data(self):
4413 """
4419 """
4414 Returns the voting user group rule data for this reviewer
4420 Returns the voting user group rule data for this reviewer
4415 """
4421 """
4416
4422
4417 if self.rule_data and 'vote_rule' in self.rule_data:
4423 if self.rule_data and 'vote_rule' in self.rule_data:
4418 user_group_data = {}
4424 user_group_data = {}
4419 if 'rule_user_group_entry_id' in self.rule_data:
4425 if 'rule_user_group_entry_id' in self.rule_data:
4420 # means a group with voting rules !
4426 # means a group with voting rules !
4421 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
4427 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
4422 user_group_data['name'] = self.rule_data['rule_name']
4428 user_group_data['name'] = self.rule_data['rule_name']
4423 user_group_data['vote_rule'] = self.rule_data['vote_rule']
4429 user_group_data['vote_rule'] = self.rule_data['vote_rule']
4424
4430
4425 return user_group_data
4431 return user_group_data
4426
4432
4427 def __unicode__(self):
4433 def __unicode__(self):
4428 return u"<%s('id:%s')>" % (self.__class__.__name__,
4434 return u"<%s('id:%s')>" % (self.__class__.__name__,
4429 self.pull_requests_reviewers_id)
4435 self.pull_requests_reviewers_id)
4430
4436
4431
4437
4432 class Notification(Base, BaseModel):
4438 class Notification(Base, BaseModel):
4433 __tablename__ = 'notifications'
4439 __tablename__ = 'notifications'
4434 __table_args__ = (
4440 __table_args__ = (
4435 Index('notification_type_idx', 'type'),
4441 Index('notification_type_idx', 'type'),
4436 base_table_args,
4442 base_table_args,
4437 )
4443 )
4438
4444
4439 TYPE_CHANGESET_COMMENT = u'cs_comment'
4445 TYPE_CHANGESET_COMMENT = u'cs_comment'
4440 TYPE_MESSAGE = u'message'
4446 TYPE_MESSAGE = u'message'
4441 TYPE_MENTION = u'mention'
4447 TYPE_MENTION = u'mention'
4442 TYPE_REGISTRATION = u'registration'
4448 TYPE_REGISTRATION = u'registration'
4443 TYPE_PULL_REQUEST = u'pull_request'
4449 TYPE_PULL_REQUEST = u'pull_request'
4444 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
4450 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
4445 TYPE_PULL_REQUEST_UPDATE = u'pull_request_update'
4451 TYPE_PULL_REQUEST_UPDATE = u'pull_request_update'
4446
4452
4447 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
4453 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
4448 subject = Column('subject', Unicode(512), nullable=True)
4454 subject = Column('subject', Unicode(512), nullable=True)
4449 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4455 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4450 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
4456 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
4451 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4457 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4452 type_ = Column('type', Unicode(255))
4458 type_ = Column('type', Unicode(255))
4453
4459
4454 created_by_user = relationship('User')
4460 created_by_user = relationship('User')
4455 notifications_to_users = relationship('UserNotification', lazy='joined',
4461 notifications_to_users = relationship('UserNotification', lazy='joined',
4456 cascade="all, delete-orphan")
4462 cascade="all, delete-orphan")
4457
4463
4458 @property
4464 @property
4459 def recipients(self):
4465 def recipients(self):
4460 return [x.user for x in UserNotification.query()\
4466 return [x.user for x in UserNotification.query()\
4461 .filter(UserNotification.notification == self)\
4467 .filter(UserNotification.notification == self)\
4462 .order_by(UserNotification.user_id.asc()).all()]
4468 .order_by(UserNotification.user_id.asc()).all()]
4463
4469
4464 @classmethod
4470 @classmethod
4465 def create(cls, created_by, subject, body, recipients, type_=None):
4471 def create(cls, created_by, subject, body, recipients, type_=None):
4466 if type_ is None:
4472 if type_ is None:
4467 type_ = Notification.TYPE_MESSAGE
4473 type_ = Notification.TYPE_MESSAGE
4468
4474
4469 notification = cls()
4475 notification = cls()
4470 notification.created_by_user = created_by
4476 notification.created_by_user = created_by
4471 notification.subject = subject
4477 notification.subject = subject
4472 notification.body = body
4478 notification.body = body
4473 notification.type_ = type_
4479 notification.type_ = type_
4474 notification.created_on = datetime.datetime.now()
4480 notification.created_on = datetime.datetime.now()
4475
4481
4476 # For each recipient link the created notification to his account
4482 # For each recipient link the created notification to his account
4477 for u in recipients:
4483 for u in recipients:
4478 assoc = UserNotification()
4484 assoc = UserNotification()
4479 assoc.user_id = u.user_id
4485 assoc.user_id = u.user_id
4480 assoc.notification = notification
4486 assoc.notification = notification
4481
4487
4482 # if created_by is inside recipients mark his notification
4488 # if created_by is inside recipients mark his notification
4483 # as read
4489 # as read
4484 if u.user_id == created_by.user_id:
4490 if u.user_id == created_by.user_id:
4485 assoc.read = True
4491 assoc.read = True
4486 Session().add(assoc)
4492 Session().add(assoc)
4487
4493
4488 Session().add(notification)
4494 Session().add(notification)
4489
4495
4490 return notification
4496 return notification
4491
4497
4492
4498
4493 class UserNotification(Base, BaseModel):
4499 class UserNotification(Base, BaseModel):
4494 __tablename__ = 'user_to_notification'
4500 __tablename__ = 'user_to_notification'
4495 __table_args__ = (
4501 __table_args__ = (
4496 UniqueConstraint('user_id', 'notification_id'),
4502 UniqueConstraint('user_id', 'notification_id'),
4497 base_table_args
4503 base_table_args
4498 )
4504 )
4499
4505
4500 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4506 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4501 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
4507 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
4502 read = Column('read', Boolean, default=False)
4508 read = Column('read', Boolean, default=False)
4503 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
4509 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
4504
4510
4505 user = relationship('User', lazy="joined")
4511 user = relationship('User', lazy="joined")
4506 notification = relationship('Notification', lazy="joined",
4512 notification = relationship('Notification', lazy="joined",
4507 order_by=lambda: Notification.created_on.desc(),)
4513 order_by=lambda: Notification.created_on.desc(),)
4508
4514
4509 def mark_as_read(self):
4515 def mark_as_read(self):
4510 self.read = True
4516 self.read = True
4511 Session().add(self)
4517 Session().add(self)
4512
4518
4513
4519
4514 class Gist(Base, BaseModel):
4520 class Gist(Base, BaseModel):
4515 __tablename__ = 'gists'
4521 __tablename__ = 'gists'
4516 __table_args__ = (
4522 __table_args__ = (
4517 Index('g_gist_access_id_idx', 'gist_access_id'),
4523 Index('g_gist_access_id_idx', 'gist_access_id'),
4518 Index('g_created_on_idx', 'created_on'),
4524 Index('g_created_on_idx', 'created_on'),
4519 base_table_args
4525 base_table_args
4520 )
4526 )
4521
4527
4522 GIST_PUBLIC = u'public'
4528 GIST_PUBLIC = u'public'
4523 GIST_PRIVATE = u'private'
4529 GIST_PRIVATE = u'private'
4524 DEFAULT_FILENAME = u'gistfile1.txt'
4530 DEFAULT_FILENAME = u'gistfile1.txt'
4525
4531
4526 ACL_LEVEL_PUBLIC = u'acl_public'
4532 ACL_LEVEL_PUBLIC = u'acl_public'
4527 ACL_LEVEL_PRIVATE = u'acl_private'
4533 ACL_LEVEL_PRIVATE = u'acl_private'
4528
4534
4529 gist_id = Column('gist_id', Integer(), primary_key=True)
4535 gist_id = Column('gist_id', Integer(), primary_key=True)
4530 gist_access_id = Column('gist_access_id', Unicode(250))
4536 gist_access_id = Column('gist_access_id', Unicode(250))
4531 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
4537 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
4532 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
4538 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
4533 gist_expires = Column('gist_expires', Float(53), nullable=False)
4539 gist_expires = Column('gist_expires', Float(53), nullable=False)
4534 gist_type = Column('gist_type', Unicode(128), nullable=False)
4540 gist_type = Column('gist_type', Unicode(128), nullable=False)
4535 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4541 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4536 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4542 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4537 acl_level = Column('acl_level', Unicode(128), nullable=True)
4543 acl_level = Column('acl_level', Unicode(128), nullable=True)
4538
4544
4539 owner = relationship('User')
4545 owner = relationship('User')
4540
4546
4541 def __repr__(self):
4547 def __repr__(self):
4542 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
4548 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
4543
4549
4544 @hybrid_property
4550 @hybrid_property
4545 def description_safe(self):
4551 def description_safe(self):
4546 from rhodecode.lib import helpers as h
4552 from rhodecode.lib import helpers as h
4547 return h.escape(self.gist_description)
4553 return h.escape(self.gist_description)
4548
4554
4549 @classmethod
4555 @classmethod
4550 def get_or_404(cls, id_):
4556 def get_or_404(cls, id_):
4551 from pyramid.httpexceptions import HTTPNotFound
4557 from pyramid.httpexceptions import HTTPNotFound
4552
4558
4553 res = cls.query().filter(cls.gist_access_id == id_).scalar()
4559 res = cls.query().filter(cls.gist_access_id == id_).scalar()
4554 if not res:
4560 if not res:
4555 raise HTTPNotFound()
4561 raise HTTPNotFound()
4556 return res
4562 return res
4557
4563
4558 @classmethod
4564 @classmethod
4559 def get_by_access_id(cls, gist_access_id):
4565 def get_by_access_id(cls, gist_access_id):
4560 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
4566 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
4561
4567
4562 def gist_url(self):
4568 def gist_url(self):
4563 from rhodecode.model.gist import GistModel
4569 from rhodecode.model.gist import GistModel
4564 return GistModel().get_url(self)
4570 return GistModel().get_url(self)
4565
4571
4566 @classmethod
4572 @classmethod
4567 def base_path(cls):
4573 def base_path(cls):
4568 """
4574 """
4569 Returns base path when all gists are stored
4575 Returns base path when all gists are stored
4570
4576
4571 :param cls:
4577 :param cls:
4572 """
4578 """
4573 from rhodecode.model.gist import GIST_STORE_LOC
4579 from rhodecode.model.gist import GIST_STORE_LOC
4574 q = Session().query(RhodeCodeUi)\
4580 q = Session().query(RhodeCodeUi)\
4575 .filter(RhodeCodeUi.ui_key == URL_SEP)
4581 .filter(RhodeCodeUi.ui_key == URL_SEP)
4576 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
4582 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
4577 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
4583 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
4578
4584
4579 def get_api_data(self):
4585 def get_api_data(self):
4580 """
4586 """
4581 Common function for generating gist related data for API
4587 Common function for generating gist related data for API
4582 """
4588 """
4583 gist = self
4589 gist = self
4584 data = {
4590 data = {
4585 'gist_id': gist.gist_id,
4591 'gist_id': gist.gist_id,
4586 'type': gist.gist_type,
4592 'type': gist.gist_type,
4587 'access_id': gist.gist_access_id,
4593 'access_id': gist.gist_access_id,
4588 'description': gist.gist_description,
4594 'description': gist.gist_description,
4589 'url': gist.gist_url(),
4595 'url': gist.gist_url(),
4590 'expires': gist.gist_expires,
4596 'expires': gist.gist_expires,
4591 'created_on': gist.created_on,
4597 'created_on': gist.created_on,
4592 'modified_at': gist.modified_at,
4598 'modified_at': gist.modified_at,
4593 'content': None,
4599 'content': None,
4594 'acl_level': gist.acl_level,
4600 'acl_level': gist.acl_level,
4595 }
4601 }
4596 return data
4602 return data
4597
4603
4598 def __json__(self):
4604 def __json__(self):
4599 data = dict(
4605 data = dict(
4600 )
4606 )
4601 data.update(self.get_api_data())
4607 data.update(self.get_api_data())
4602 return data
4608 return data
4603 # SCM functions
4609 # SCM functions
4604
4610
4605 def scm_instance(self, **kwargs):
4611 def scm_instance(self, **kwargs):
4606 """
4612 """
4607 Get an instance of VCS Repository
4613 Get an instance of VCS Repository
4608
4614
4609 :param kwargs:
4615 :param kwargs:
4610 """
4616 """
4611 from rhodecode.model.gist import GistModel
4617 from rhodecode.model.gist import GistModel
4612 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4618 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4613 return get_vcs_instance(
4619 return get_vcs_instance(
4614 repo_path=safe_str(full_repo_path), create=False,
4620 repo_path=safe_str(full_repo_path), create=False,
4615 _vcs_alias=GistModel.vcs_backend)
4621 _vcs_alias=GistModel.vcs_backend)
4616
4622
4617
4623
4618 class ExternalIdentity(Base, BaseModel):
4624 class ExternalIdentity(Base, BaseModel):
4619 __tablename__ = 'external_identities'
4625 __tablename__ = 'external_identities'
4620 __table_args__ = (
4626 __table_args__ = (
4621 Index('local_user_id_idx', 'local_user_id'),
4627 Index('local_user_id_idx', 'local_user_id'),
4622 Index('external_id_idx', 'external_id'),
4628 Index('external_id_idx', 'external_id'),
4623 base_table_args
4629 base_table_args
4624 )
4630 )
4625
4631
4626 external_id = Column('external_id', Unicode(255), default=u'', primary_key=True)
4632 external_id = Column('external_id', Unicode(255), default=u'', primary_key=True)
4627 external_username = Column('external_username', Unicode(1024), default=u'')
4633 external_username = Column('external_username', Unicode(1024), default=u'')
4628 local_user_id = Column('local_user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4634 local_user_id = Column('local_user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4629 provider_name = Column('provider_name', Unicode(255), default=u'', primary_key=True)
4635 provider_name = Column('provider_name', Unicode(255), default=u'', primary_key=True)
4630 access_token = Column('access_token', String(1024), default=u'')
4636 access_token = Column('access_token', String(1024), default=u'')
4631 alt_token = Column('alt_token', String(1024), default=u'')
4637 alt_token = Column('alt_token', String(1024), default=u'')
4632 token_secret = Column('token_secret', String(1024), default=u'')
4638 token_secret = Column('token_secret', String(1024), default=u'')
4633
4639
4634 @classmethod
4640 @classmethod
4635 def by_external_id_and_provider(cls, external_id, provider_name, local_user_id=None):
4641 def by_external_id_and_provider(cls, external_id, provider_name, local_user_id=None):
4636 """
4642 """
4637 Returns ExternalIdentity instance based on search params
4643 Returns ExternalIdentity instance based on search params
4638
4644
4639 :param external_id:
4645 :param external_id:
4640 :param provider_name:
4646 :param provider_name:
4641 :return: ExternalIdentity
4647 :return: ExternalIdentity
4642 """
4648 """
4643 query = cls.query()
4649 query = cls.query()
4644 query = query.filter(cls.external_id == external_id)
4650 query = query.filter(cls.external_id == external_id)
4645 query = query.filter(cls.provider_name == provider_name)
4651 query = query.filter(cls.provider_name == provider_name)
4646 if local_user_id:
4652 if local_user_id:
4647 query = query.filter(cls.local_user_id == local_user_id)
4653 query = query.filter(cls.local_user_id == local_user_id)
4648 return query.first()
4654 return query.first()
4649
4655
4650 @classmethod
4656 @classmethod
4651 def user_by_external_id_and_provider(cls, external_id, provider_name):
4657 def user_by_external_id_and_provider(cls, external_id, provider_name):
4652 """
4658 """
4653 Returns User instance based on search params
4659 Returns User instance based on search params
4654
4660
4655 :param external_id:
4661 :param external_id:
4656 :param provider_name:
4662 :param provider_name:
4657 :return: User
4663 :return: User
4658 """
4664 """
4659 query = User.query()
4665 query = User.query()
4660 query = query.filter(cls.external_id == external_id)
4666 query = query.filter(cls.external_id == external_id)
4661 query = query.filter(cls.provider_name == provider_name)
4667 query = query.filter(cls.provider_name == provider_name)
4662 query = query.filter(User.user_id == cls.local_user_id)
4668 query = query.filter(User.user_id == cls.local_user_id)
4663 return query.first()
4669 return query.first()
4664
4670
4665 @classmethod
4671 @classmethod
4666 def by_local_user_id(cls, local_user_id):
4672 def by_local_user_id(cls, local_user_id):
4667 """
4673 """
4668 Returns all tokens for user
4674 Returns all tokens for user
4669
4675
4670 :param local_user_id:
4676 :param local_user_id:
4671 :return: ExternalIdentity
4677 :return: ExternalIdentity
4672 """
4678 """
4673 query = cls.query()
4679 query = cls.query()
4674 query = query.filter(cls.local_user_id == local_user_id)
4680 query = query.filter(cls.local_user_id == local_user_id)
4675 return query
4681 return query
4676
4682
4677 @classmethod
4683 @classmethod
4678 def load_provider_plugin(cls, plugin_id):
4684 def load_provider_plugin(cls, plugin_id):
4679 from rhodecode.authentication.base import loadplugin
4685 from rhodecode.authentication.base import loadplugin
4680 _plugin_id = 'egg:rhodecode-enterprise-ee#{}'.format(plugin_id)
4686 _plugin_id = 'egg:rhodecode-enterprise-ee#{}'.format(plugin_id)
4681 auth_plugin = loadplugin(_plugin_id)
4687 auth_plugin = loadplugin(_plugin_id)
4682 return auth_plugin
4688 return auth_plugin
4683
4689
4684
4690
4685 class Integration(Base, BaseModel):
4691 class Integration(Base, BaseModel):
4686 __tablename__ = 'integrations'
4692 __tablename__ = 'integrations'
4687 __table_args__ = (
4693 __table_args__ = (
4688 base_table_args
4694 base_table_args
4689 )
4695 )
4690
4696
4691 integration_id = Column('integration_id', Integer(), primary_key=True)
4697 integration_id = Column('integration_id', Integer(), primary_key=True)
4692 integration_type = Column('integration_type', String(255))
4698 integration_type = Column('integration_type', String(255))
4693 enabled = Column('enabled', Boolean(), nullable=False)
4699 enabled = Column('enabled', Boolean(), nullable=False)
4694 name = Column('name', String(255), nullable=False)
4700 name = Column('name', String(255), nullable=False)
4695 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
4701 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
4696 default=False)
4702 default=False)
4697
4703
4698 settings = Column(
4704 settings = Column(
4699 'settings_json', MutationObj.as_mutable(
4705 'settings_json', MutationObj.as_mutable(
4700 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4706 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4701 repo_id = Column(
4707 repo_id = Column(
4702 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
4708 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
4703 nullable=True, unique=None, default=None)
4709 nullable=True, unique=None, default=None)
4704 repo = relationship('Repository', lazy='joined')
4710 repo = relationship('Repository', lazy='joined')
4705
4711
4706 repo_group_id = Column(
4712 repo_group_id = Column(
4707 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
4713 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
4708 nullable=True, unique=None, default=None)
4714 nullable=True, unique=None, default=None)
4709 repo_group = relationship('RepoGroup', lazy='joined')
4715 repo_group = relationship('RepoGroup', lazy='joined')
4710
4716
4711 @property
4717 @property
4712 def scope(self):
4718 def scope(self):
4713 if self.repo:
4719 if self.repo:
4714 return repr(self.repo)
4720 return repr(self.repo)
4715 if self.repo_group:
4721 if self.repo_group:
4716 if self.child_repos_only:
4722 if self.child_repos_only:
4717 return repr(self.repo_group) + ' (child repos only)'
4723 return repr(self.repo_group) + ' (child repos only)'
4718 else:
4724 else:
4719 return repr(self.repo_group) + ' (recursive)'
4725 return repr(self.repo_group) + ' (recursive)'
4720 if self.child_repos_only:
4726 if self.child_repos_only:
4721 return 'root_repos'
4727 return 'root_repos'
4722 return 'global'
4728 return 'global'
4723
4729
4724 def __repr__(self):
4730 def __repr__(self):
4725 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
4731 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
4726
4732
4727
4733
4728 class RepoReviewRuleUser(Base, BaseModel):
4734 class RepoReviewRuleUser(Base, BaseModel):
4729 __tablename__ = 'repo_review_rules_users'
4735 __tablename__ = 'repo_review_rules_users'
4730 __table_args__ = (
4736 __table_args__ = (
4731 base_table_args
4737 base_table_args
4732 )
4738 )
4733
4739
4734 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
4740 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
4735 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4741 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4736 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
4742 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
4737 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4743 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4738 user = relationship('User')
4744 user = relationship('User')
4739
4745
4740 def rule_data(self):
4746 def rule_data(self):
4741 return {
4747 return {
4742 'mandatory': self.mandatory
4748 'mandatory': self.mandatory
4743 }
4749 }
4744
4750
4745
4751
4746 class RepoReviewRuleUserGroup(Base, BaseModel):
4752 class RepoReviewRuleUserGroup(Base, BaseModel):
4747 __tablename__ = 'repo_review_rules_users_groups'
4753 __tablename__ = 'repo_review_rules_users_groups'
4748 __table_args__ = (
4754 __table_args__ = (
4749 base_table_args
4755 base_table_args
4750 )
4756 )
4751
4757
4752 VOTE_RULE_ALL = -1
4758 VOTE_RULE_ALL = -1
4753
4759
4754 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
4760 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
4755 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4761 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4756 users_group_id = Column("users_group_id", Integer(),ForeignKey('users_groups.users_group_id'), nullable=False)
4762 users_group_id = Column("users_group_id", Integer(),ForeignKey('users_groups.users_group_id'), nullable=False)
4757 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4763 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4758 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
4764 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
4759 users_group = relationship('UserGroup')
4765 users_group = relationship('UserGroup')
4760
4766
4761 def rule_data(self):
4767 def rule_data(self):
4762 return {
4768 return {
4763 'mandatory': self.mandatory,
4769 'mandatory': self.mandatory,
4764 'vote_rule': self.vote_rule
4770 'vote_rule': self.vote_rule
4765 }
4771 }
4766
4772
4767 @property
4773 @property
4768 def vote_rule_label(self):
4774 def vote_rule_label(self):
4769 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
4775 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
4770 return 'all must vote'
4776 return 'all must vote'
4771 else:
4777 else:
4772 return 'min. vote {}'.format(self.vote_rule)
4778 return 'min. vote {}'.format(self.vote_rule)
4773
4779
4774
4780
4775 class RepoReviewRule(Base, BaseModel):
4781 class RepoReviewRule(Base, BaseModel):
4776 __tablename__ = 'repo_review_rules'
4782 __tablename__ = 'repo_review_rules'
4777 __table_args__ = (
4783 __table_args__ = (
4778 base_table_args
4784 base_table_args
4779 )
4785 )
4780
4786
4781 repo_review_rule_id = Column(
4787 repo_review_rule_id = Column(
4782 'repo_review_rule_id', Integer(), primary_key=True)
4788 'repo_review_rule_id', Integer(), primary_key=True)
4783 repo_id = Column(
4789 repo_id = Column(
4784 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
4790 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
4785 repo = relationship('Repository', backref='review_rules')
4791 repo = relationship('Repository', backref='review_rules')
4786
4792
4787 review_rule_name = Column('review_rule_name', String(255))
4793 review_rule_name = Column('review_rule_name', String(255))
4788 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4794 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4789 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4795 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4790 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4796 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4791
4797
4792 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
4798 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
4793 forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
4799 forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
4794 forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
4800 forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
4795 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
4801 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
4796
4802
4797 rule_users = relationship('RepoReviewRuleUser')
4803 rule_users = relationship('RepoReviewRuleUser')
4798 rule_user_groups = relationship('RepoReviewRuleUserGroup')
4804 rule_user_groups = relationship('RepoReviewRuleUserGroup')
4799
4805
4800 def _validate_pattern(self, value):
4806 def _validate_pattern(self, value):
4801 re.compile('^' + glob2re(value) + '$')
4807 re.compile('^' + glob2re(value) + '$')
4802
4808
4803 @hybrid_property
4809 @hybrid_property
4804 def source_branch_pattern(self):
4810 def source_branch_pattern(self):
4805 return self._branch_pattern or '*'
4811 return self._branch_pattern or '*'
4806
4812
4807 @source_branch_pattern.setter
4813 @source_branch_pattern.setter
4808 def source_branch_pattern(self, value):
4814 def source_branch_pattern(self, value):
4809 self._validate_pattern(value)
4815 self._validate_pattern(value)
4810 self._branch_pattern = value or '*'
4816 self._branch_pattern = value or '*'
4811
4817
4812 @hybrid_property
4818 @hybrid_property
4813 def target_branch_pattern(self):
4819 def target_branch_pattern(self):
4814 return self._target_branch_pattern or '*'
4820 return self._target_branch_pattern or '*'
4815
4821
4816 @target_branch_pattern.setter
4822 @target_branch_pattern.setter
4817 def target_branch_pattern(self, value):
4823 def target_branch_pattern(self, value):
4818 self._validate_pattern(value)
4824 self._validate_pattern(value)
4819 self._target_branch_pattern = value or '*'
4825 self._target_branch_pattern = value or '*'
4820
4826
4821 @hybrid_property
4827 @hybrid_property
4822 def file_pattern(self):
4828 def file_pattern(self):
4823 return self._file_pattern or '*'
4829 return self._file_pattern or '*'
4824
4830
4825 @file_pattern.setter
4831 @file_pattern.setter
4826 def file_pattern(self, value):
4832 def file_pattern(self, value):
4827 self._validate_pattern(value)
4833 self._validate_pattern(value)
4828 self._file_pattern = value or '*'
4834 self._file_pattern = value or '*'
4829
4835
4830 def matches(self, source_branch, target_branch, files_changed):
4836 def matches(self, source_branch, target_branch, files_changed):
4831 """
4837 """
4832 Check if this review rule matches a branch/files in a pull request
4838 Check if this review rule matches a branch/files in a pull request
4833
4839
4834 :param source_branch: source branch name for the commit
4840 :param source_branch: source branch name for the commit
4835 :param target_branch: target branch name for the commit
4841 :param target_branch: target branch name for the commit
4836 :param files_changed: list of file paths changed in the pull request
4842 :param files_changed: list of file paths changed in the pull request
4837 """
4843 """
4838
4844
4839 source_branch = source_branch or ''
4845 source_branch = source_branch or ''
4840 target_branch = target_branch or ''
4846 target_branch = target_branch or ''
4841 files_changed = files_changed or []
4847 files_changed = files_changed or []
4842
4848
4843 branch_matches = True
4849 branch_matches = True
4844 if source_branch or target_branch:
4850 if source_branch or target_branch:
4845 if self.source_branch_pattern == '*':
4851 if self.source_branch_pattern == '*':
4846 source_branch_match = True
4852 source_branch_match = True
4847 else:
4853 else:
4848 if self.source_branch_pattern.startswith('re:'):
4854 if self.source_branch_pattern.startswith('re:'):
4849 source_pattern = self.source_branch_pattern[3:]
4855 source_pattern = self.source_branch_pattern[3:]
4850 else:
4856 else:
4851 source_pattern = '^' + glob2re(self.source_branch_pattern) + '$'
4857 source_pattern = '^' + glob2re(self.source_branch_pattern) + '$'
4852 source_branch_regex = re.compile(source_pattern)
4858 source_branch_regex = re.compile(source_pattern)
4853 source_branch_match = bool(source_branch_regex.search(source_branch))
4859 source_branch_match = bool(source_branch_regex.search(source_branch))
4854 if self.target_branch_pattern == '*':
4860 if self.target_branch_pattern == '*':
4855 target_branch_match = True
4861 target_branch_match = True
4856 else:
4862 else:
4857 if self.target_branch_pattern.startswith('re:'):
4863 if self.target_branch_pattern.startswith('re:'):
4858 target_pattern = self.target_branch_pattern[3:]
4864 target_pattern = self.target_branch_pattern[3:]
4859 else:
4865 else:
4860 target_pattern = '^' + glob2re(self.target_branch_pattern) + '$'
4866 target_pattern = '^' + glob2re(self.target_branch_pattern) + '$'
4861 target_branch_regex = re.compile(target_pattern)
4867 target_branch_regex = re.compile(target_pattern)
4862 target_branch_match = bool(target_branch_regex.search(target_branch))
4868 target_branch_match = bool(target_branch_regex.search(target_branch))
4863
4869
4864 branch_matches = source_branch_match and target_branch_match
4870 branch_matches = source_branch_match and target_branch_match
4865
4871
4866 files_matches = True
4872 files_matches = True
4867 if self.file_pattern != '*':
4873 if self.file_pattern != '*':
4868 files_matches = False
4874 files_matches = False
4869 if self.file_pattern.startswith('re:'):
4875 if self.file_pattern.startswith('re:'):
4870 file_pattern = self.file_pattern[3:]
4876 file_pattern = self.file_pattern[3:]
4871 else:
4877 else:
4872 file_pattern = glob2re(self.file_pattern)
4878 file_pattern = glob2re(self.file_pattern)
4873 file_regex = re.compile(file_pattern)
4879 file_regex = re.compile(file_pattern)
4874 for filename in files_changed:
4880 for filename in files_changed:
4875 if file_regex.search(filename):
4881 if file_regex.search(filename):
4876 files_matches = True
4882 files_matches = True
4877 break
4883 break
4878
4884
4879 return branch_matches and files_matches
4885 return branch_matches and files_matches
4880
4886
4881 @property
4887 @property
4882 def review_users(self):
4888 def review_users(self):
4883 """ Returns the users which this rule applies to """
4889 """ Returns the users which this rule applies to """
4884
4890
4885 users = collections.OrderedDict()
4891 users = collections.OrderedDict()
4886
4892
4887 for rule_user in self.rule_users:
4893 for rule_user in self.rule_users:
4888 if rule_user.user.active:
4894 if rule_user.user.active:
4889 if rule_user.user not in users:
4895 if rule_user.user not in users:
4890 users[rule_user.user.username] = {
4896 users[rule_user.user.username] = {
4891 'user': rule_user.user,
4897 'user': rule_user.user,
4892 'source': 'user',
4898 'source': 'user',
4893 'source_data': {},
4899 'source_data': {},
4894 'data': rule_user.rule_data()
4900 'data': rule_user.rule_data()
4895 }
4901 }
4896
4902
4897 for rule_user_group in self.rule_user_groups:
4903 for rule_user_group in self.rule_user_groups:
4898 source_data = {
4904 source_data = {
4899 'user_group_id': rule_user_group.users_group.users_group_id,
4905 'user_group_id': rule_user_group.users_group.users_group_id,
4900 'name': rule_user_group.users_group.users_group_name,
4906 'name': rule_user_group.users_group.users_group_name,
4901 'members': len(rule_user_group.users_group.members)
4907 'members': len(rule_user_group.users_group.members)
4902 }
4908 }
4903 for member in rule_user_group.users_group.members:
4909 for member in rule_user_group.users_group.members:
4904 if member.user.active:
4910 if member.user.active:
4905 key = member.user.username
4911 key = member.user.username
4906 if key in users:
4912 if key in users:
4907 # skip this member as we have him already
4913 # skip this member as we have him already
4908 # this prevents from override the "first" matched
4914 # this prevents from override the "first" matched
4909 # users with duplicates in multiple groups
4915 # users with duplicates in multiple groups
4910 continue
4916 continue
4911
4917
4912 users[key] = {
4918 users[key] = {
4913 'user': member.user,
4919 'user': member.user,
4914 'source': 'user_group',
4920 'source': 'user_group',
4915 'source_data': source_data,
4921 'source_data': source_data,
4916 'data': rule_user_group.rule_data()
4922 'data': rule_user_group.rule_data()
4917 }
4923 }
4918
4924
4919 return users
4925 return users
4920
4926
4921 def user_group_vote_rule(self, user_id):
4927 def user_group_vote_rule(self, user_id):
4922
4928
4923 rules = []
4929 rules = []
4924 if not self.rule_user_groups:
4930 if not self.rule_user_groups:
4925 return rules
4931 return rules
4926
4932
4927 for user_group in self.rule_user_groups:
4933 for user_group in self.rule_user_groups:
4928 user_group_members = [x.user_id for x in user_group.users_group.members]
4934 user_group_members = [x.user_id for x in user_group.users_group.members]
4929 if user_id in user_group_members:
4935 if user_id in user_group_members:
4930 rules.append(user_group)
4936 rules.append(user_group)
4931 return rules
4937 return rules
4932
4938
4933 def __repr__(self):
4939 def __repr__(self):
4934 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
4940 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
4935 self.repo_review_rule_id, self.repo)
4941 self.repo_review_rule_id, self.repo)
4936
4942
4937
4943
4938 class ScheduleEntry(Base, BaseModel):
4944 class ScheduleEntry(Base, BaseModel):
4939 __tablename__ = 'schedule_entries'
4945 __tablename__ = 'schedule_entries'
4940 __table_args__ = (
4946 __table_args__ = (
4941 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
4947 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
4942 UniqueConstraint('task_uid', name='s_task_uid_idx'),
4948 UniqueConstraint('task_uid', name='s_task_uid_idx'),
4943 base_table_args,
4949 base_table_args,
4944 )
4950 )
4945
4951
4946 schedule_types = ['crontab', 'timedelta', 'integer']
4952 schedule_types = ['crontab', 'timedelta', 'integer']
4947 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
4953 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
4948
4954
4949 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
4955 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
4950 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
4956 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
4951 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
4957 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
4952
4958
4953 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
4959 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
4954 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
4960 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
4955
4961
4956 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
4962 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
4957 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
4963 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
4958
4964
4959 # task
4965 # task
4960 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
4966 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
4961 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
4967 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
4962 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
4968 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
4963 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
4969 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
4964
4970
4965 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4971 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4966 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
4972 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
4967
4973
4968 @hybrid_property
4974 @hybrid_property
4969 def schedule_type(self):
4975 def schedule_type(self):
4970 return self._schedule_type
4976 return self._schedule_type
4971
4977
4972 @schedule_type.setter
4978 @schedule_type.setter
4973 def schedule_type(self, val):
4979 def schedule_type(self, val):
4974 if val not in self.schedule_types:
4980 if val not in self.schedule_types:
4975 raise ValueError('Value must be on of `{}` and got `{}`'.format(
4981 raise ValueError('Value must be on of `{}` and got `{}`'.format(
4976 val, self.schedule_type))
4982 val, self.schedule_type))
4977
4983
4978 self._schedule_type = val
4984 self._schedule_type = val
4979
4985
4980 @classmethod
4986 @classmethod
4981 def get_uid(cls, obj):
4987 def get_uid(cls, obj):
4982 args = obj.task_args
4988 args = obj.task_args
4983 kwargs = obj.task_kwargs
4989 kwargs = obj.task_kwargs
4984 if isinstance(args, JsonRaw):
4990 if isinstance(args, JsonRaw):
4985 try:
4991 try:
4986 args = json.loads(args)
4992 args = json.loads(args)
4987 except ValueError:
4993 except ValueError:
4988 args = tuple()
4994 args = tuple()
4989
4995
4990 if isinstance(kwargs, JsonRaw):
4996 if isinstance(kwargs, JsonRaw):
4991 try:
4997 try:
4992 kwargs = json.loads(kwargs)
4998 kwargs = json.loads(kwargs)
4993 except ValueError:
4999 except ValueError:
4994 kwargs = dict()
5000 kwargs = dict()
4995
5001
4996 dot_notation = obj.task_dot_notation
5002 dot_notation = obj.task_dot_notation
4997 val = '.'.join(map(safe_str, [
5003 val = '.'.join(map(safe_str, [
4998 sorted(dot_notation), args, sorted(kwargs.items())]))
5004 sorted(dot_notation), args, sorted(kwargs.items())]))
4999 return hashlib.sha1(val).hexdigest()
5005 return hashlib.sha1(val).hexdigest()
5000
5006
5001 @classmethod
5007 @classmethod
5002 def get_by_schedule_name(cls, schedule_name):
5008 def get_by_schedule_name(cls, schedule_name):
5003 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
5009 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
5004
5010
5005 @classmethod
5011 @classmethod
5006 def get_by_schedule_id(cls, schedule_id):
5012 def get_by_schedule_id(cls, schedule_id):
5007 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
5013 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
5008
5014
5009 @property
5015 @property
5010 def task(self):
5016 def task(self):
5011 return self.task_dot_notation
5017 return self.task_dot_notation
5012
5018
5013 @property
5019 @property
5014 def schedule(self):
5020 def schedule(self):
5015 from rhodecode.lib.celerylib.utils import raw_2_schedule
5021 from rhodecode.lib.celerylib.utils import raw_2_schedule
5016 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
5022 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
5017 return schedule
5023 return schedule
5018
5024
5019 @property
5025 @property
5020 def args(self):
5026 def args(self):
5021 try:
5027 try:
5022 return list(self.task_args or [])
5028 return list(self.task_args or [])
5023 except ValueError:
5029 except ValueError:
5024 return list()
5030 return list()
5025
5031
5026 @property
5032 @property
5027 def kwargs(self):
5033 def kwargs(self):
5028 try:
5034 try:
5029 return dict(self.task_kwargs or {})
5035 return dict(self.task_kwargs or {})
5030 except ValueError:
5036 except ValueError:
5031 return dict()
5037 return dict()
5032
5038
5033 def _as_raw(self, val):
5039 def _as_raw(self, val):
5034 if hasattr(val, 'de_coerce'):
5040 if hasattr(val, 'de_coerce'):
5035 val = val.de_coerce()
5041 val = val.de_coerce()
5036 if val:
5042 if val:
5037 val = json.dumps(val)
5043 val = json.dumps(val)
5038
5044
5039 return val
5045 return val
5040
5046
5041 @property
5047 @property
5042 def schedule_definition_raw(self):
5048 def schedule_definition_raw(self):
5043 return self._as_raw(self.schedule_definition)
5049 return self._as_raw(self.schedule_definition)
5044
5050
5045 @property
5051 @property
5046 def args_raw(self):
5052 def args_raw(self):
5047 return self._as_raw(self.task_args)
5053 return self._as_raw(self.task_args)
5048
5054
5049 @property
5055 @property
5050 def kwargs_raw(self):
5056 def kwargs_raw(self):
5051 return self._as_raw(self.task_kwargs)
5057 return self._as_raw(self.task_kwargs)
5052
5058
5053 def __repr__(self):
5059 def __repr__(self):
5054 return '<DB:ScheduleEntry({}:{})>'.format(
5060 return '<DB:ScheduleEntry({}:{})>'.format(
5055 self.schedule_entry_id, self.schedule_name)
5061 self.schedule_entry_id, self.schedule_name)
5056
5062
5057
5063
5058 @event.listens_for(ScheduleEntry, 'before_update')
5064 @event.listens_for(ScheduleEntry, 'before_update')
5059 def update_task_uid(mapper, connection, target):
5065 def update_task_uid(mapper, connection, target):
5060 target.task_uid = ScheduleEntry.get_uid(target)
5066 target.task_uid = ScheduleEntry.get_uid(target)
5061
5067
5062
5068
5063 @event.listens_for(ScheduleEntry, 'before_insert')
5069 @event.listens_for(ScheduleEntry, 'before_insert')
5064 def set_task_uid(mapper, connection, target):
5070 def set_task_uid(mapper, connection, target):
5065 target.task_uid = ScheduleEntry.get_uid(target)
5071 target.task_uid = ScheduleEntry.get_uid(target)
5066
5072
5067
5073
5068 class _BaseBranchPerms(BaseModel):
5074 class _BaseBranchPerms(BaseModel):
5069 @classmethod
5075 @classmethod
5070 def compute_hash(cls, value):
5076 def compute_hash(cls, value):
5071 return sha1_safe(value)
5077 return sha1_safe(value)
5072
5078
5073 @hybrid_property
5079 @hybrid_property
5074 def branch_pattern(self):
5080 def branch_pattern(self):
5075 return self._branch_pattern or '*'
5081 return self._branch_pattern or '*'
5076
5082
5077 @hybrid_property
5083 @hybrid_property
5078 def branch_hash(self):
5084 def branch_hash(self):
5079 return self._branch_hash
5085 return self._branch_hash
5080
5086
5081 def _validate_glob(self, value):
5087 def _validate_glob(self, value):
5082 re.compile('^' + glob2re(value) + '$')
5088 re.compile('^' + glob2re(value) + '$')
5083
5089
5084 @branch_pattern.setter
5090 @branch_pattern.setter
5085 def branch_pattern(self, value):
5091 def branch_pattern(self, value):
5086 self._validate_glob(value)
5092 self._validate_glob(value)
5087 self._branch_pattern = value or '*'
5093 self._branch_pattern = value or '*'
5088 # set the Hash when setting the branch pattern
5094 # set the Hash when setting the branch pattern
5089 self._branch_hash = self.compute_hash(self._branch_pattern)
5095 self._branch_hash = self.compute_hash(self._branch_pattern)
5090
5096
5091 def matches(self, branch):
5097 def matches(self, branch):
5092 """
5098 """
5093 Check if this the branch matches entry
5099 Check if this the branch matches entry
5094
5100
5095 :param branch: branch name for the commit
5101 :param branch: branch name for the commit
5096 """
5102 """
5097
5103
5098 branch = branch or ''
5104 branch = branch or ''
5099
5105
5100 branch_matches = True
5106 branch_matches = True
5101 if branch:
5107 if branch:
5102 branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$')
5108 branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$')
5103 branch_matches = bool(branch_regex.search(branch))
5109 branch_matches = bool(branch_regex.search(branch))
5104
5110
5105 return branch_matches
5111 return branch_matches
5106
5112
5107
5113
5108 class UserToRepoBranchPermission(Base, _BaseBranchPerms):
5114 class UserToRepoBranchPermission(Base, _BaseBranchPerms):
5109 __tablename__ = 'user_to_repo_branch_permissions'
5115 __tablename__ = 'user_to_repo_branch_permissions'
5110 __table_args__ = (
5116 __table_args__ = (
5111 base_table_args
5117 base_table_args
5112 )
5118 )
5113
5119
5114 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5120 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5115
5121
5116 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5122 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5117 repo = relationship('Repository', backref='user_branch_perms')
5123 repo = relationship('Repository', backref='user_branch_perms')
5118
5124
5119 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5125 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5120 permission = relationship('Permission')
5126 permission = relationship('Permission')
5121
5127
5122 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('repo_to_perm.repo_to_perm_id'), nullable=False, unique=None, default=None)
5128 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('repo_to_perm.repo_to_perm_id'), nullable=False, unique=None, default=None)
5123 user_repo_to_perm = relationship('UserRepoToPerm')
5129 user_repo_to_perm = relationship('UserRepoToPerm')
5124
5130
5125 rule_order = Column('rule_order', Integer(), nullable=False)
5131 rule_order = Column('rule_order', Integer(), nullable=False)
5126 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
5132 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
5127 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5133 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5128
5134
5129 def __unicode__(self):
5135 def __unicode__(self):
5130 return u'<UserBranchPermission(%s => %r)>' % (
5136 return u'<UserBranchPermission(%s => %r)>' % (
5131 self.user_repo_to_perm, self.branch_pattern)
5137 self.user_repo_to_perm, self.branch_pattern)
5132
5138
5133
5139
5134 class UserGroupToRepoBranchPermission(Base, _BaseBranchPerms):
5140 class UserGroupToRepoBranchPermission(Base, _BaseBranchPerms):
5135 __tablename__ = 'user_group_to_repo_branch_permissions'
5141 __tablename__ = 'user_group_to_repo_branch_permissions'
5136 __table_args__ = (
5142 __table_args__ = (
5137 base_table_args
5143 base_table_args
5138 )
5144 )
5139
5145
5140 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5146 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5141
5147
5142 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5148 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5143 repo = relationship('Repository', backref='user_group_branch_perms')
5149 repo = relationship('Repository', backref='user_group_branch_perms')
5144
5150
5145 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5151 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5146 permission = relationship('Permission')
5152 permission = relationship('Permission')
5147
5153
5148 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('users_group_repo_to_perm.users_group_to_perm_id'), nullable=False, unique=None, default=None)
5154 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('users_group_repo_to_perm.users_group_to_perm_id'), nullable=False, unique=None, default=None)
5149 user_group_repo_to_perm = relationship('UserGroupRepoToPerm')
5155 user_group_repo_to_perm = relationship('UserGroupRepoToPerm')
5150
5156
5151 rule_order = Column('rule_order', Integer(), nullable=False)
5157 rule_order = Column('rule_order', Integer(), nullable=False)
5152 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
5158 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
5153 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5159 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5154
5160
5155 def __unicode__(self):
5161 def __unicode__(self):
5156 return u'<UserBranchPermission(%s => %r)>' % (
5162 return u'<UserBranchPermission(%s => %r)>' % (
5157 self.user_group_repo_to_perm, self.branch_pattern)
5163 self.user_group_repo_to_perm, self.branch_pattern)
5158
5164
5159
5165
5160 class UserBookmark(Base, BaseModel):
5166 class UserBookmark(Base, BaseModel):
5161 __tablename__ = 'user_bookmarks'
5167 __tablename__ = 'user_bookmarks'
5162 __table_args__ = (
5168 __table_args__ = (
5163 UniqueConstraint('user_id', 'bookmark_repo_id'),
5169 UniqueConstraint('user_id', 'bookmark_repo_id'),
5164 UniqueConstraint('user_id', 'bookmark_repo_group_id'),
5170 UniqueConstraint('user_id', 'bookmark_repo_group_id'),
5165 UniqueConstraint('user_id', 'bookmark_position'),
5171 UniqueConstraint('user_id', 'bookmark_position'),
5166 base_table_args
5172 base_table_args
5167 )
5173 )
5168
5174
5169 user_bookmark_id = Column("user_bookmark_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
5175 user_bookmark_id = Column("user_bookmark_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
5170 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
5176 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
5171 position = Column("bookmark_position", Integer(), nullable=False)
5177 position = Column("bookmark_position", Integer(), nullable=False)
5172 title = Column("bookmark_title", String(255), nullable=True, unique=None, default=None)
5178 title = Column("bookmark_title", String(255), nullable=True, unique=None, default=None)
5173 redirect_url = Column("bookmark_redirect_url", String(10240), nullable=True, unique=None, default=None)
5179 redirect_url = Column("bookmark_redirect_url", String(10240), nullable=True, unique=None, default=None)
5174 created_on = Column("created_on", DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5180 created_on = Column("created_on", DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5175
5181
5176 bookmark_repo_id = Column("bookmark_repo_id", Integer(), ForeignKey("repositories.repo_id"), nullable=True, unique=None, default=None)
5182 bookmark_repo_id = Column("bookmark_repo_id", Integer(), ForeignKey("repositories.repo_id"), nullable=True, unique=None, default=None)
5177 bookmark_repo_group_id = Column("bookmark_repo_group_id", Integer(), ForeignKey("groups.group_id"), nullable=True, unique=None, default=None)
5183 bookmark_repo_group_id = Column("bookmark_repo_group_id", Integer(), ForeignKey("groups.group_id"), nullable=True, unique=None, default=None)
5178
5184
5179 user = relationship("User")
5185 user = relationship("User")
5180
5186
5181 repository = relationship("Repository")
5187 repository = relationship("Repository")
5182 repository_group = relationship("RepoGroup")
5188 repository_group = relationship("RepoGroup")
5183
5189
5184 @classmethod
5190 @classmethod
5185 def get_by_position_for_user(cls, position, user_id):
5191 def get_by_position_for_user(cls, position, user_id):
5186 return cls.query() \
5192 return cls.query() \
5187 .filter(UserBookmark.user_id == user_id) \
5193 .filter(UserBookmark.user_id == user_id) \
5188 .filter(UserBookmark.position == position).scalar()
5194 .filter(UserBookmark.position == position).scalar()
5189
5195
5190 @classmethod
5196 @classmethod
5191 def get_bookmarks_for_user(cls, user_id, cache=True):
5197 def get_bookmarks_for_user(cls, user_id, cache=True):
5192 bookmarks = cls.query() \
5198 bookmarks = cls.query() \
5193 .filter(UserBookmark.user_id == user_id) \
5199 .filter(UserBookmark.user_id == user_id) \
5194 .options(joinedload(UserBookmark.repository)) \
5200 .options(joinedload(UserBookmark.repository)) \
5195 .options(joinedload(UserBookmark.repository_group)) \
5201 .options(joinedload(UserBookmark.repository_group)) \
5196 .order_by(UserBookmark.position.asc())
5202 .order_by(UserBookmark.position.asc())
5197
5203
5198 if cache:
5204 if cache:
5199 bookmarks = bookmarks.options(
5205 bookmarks = bookmarks.options(
5200 FromCache("sql_cache_short", "get_user_{}_bookmarks".format(user_id))
5206 FromCache("sql_cache_short", "get_user_{}_bookmarks".format(user_id))
5201 )
5207 )
5202
5208
5203 return bookmarks.all()
5209 return bookmarks.all()
5204
5210
5205 def __unicode__(self):
5211 def __unicode__(self):
5206 return u'<UserBookmark(%s @ %r)>' % (self.position, self.redirect_url)
5212 return u'<UserBookmark(%s @ %r)>' % (self.position, self.redirect_url)
5207
5213
5208
5214
5209 class FileStore(Base, BaseModel):
5215 class FileStore(Base, BaseModel):
5210 __tablename__ = 'file_store'
5216 __tablename__ = 'file_store'
5211 __table_args__ = (
5217 __table_args__ = (
5212 base_table_args
5218 base_table_args
5213 )
5219 )
5214
5220
5215 file_store_id = Column('file_store_id', Integer(), primary_key=True)
5221 file_store_id = Column('file_store_id', Integer(), primary_key=True)
5216 file_uid = Column('file_uid', String(1024), nullable=False)
5222 file_uid = Column('file_uid', String(1024), nullable=False)
5217 file_display_name = Column('file_display_name', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), nullable=True)
5223 file_display_name = Column('file_display_name', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), nullable=True)
5218 file_description = Column('file_description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=True)
5224 file_description = Column('file_description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=True)
5219 file_org_name = Column('file_org_name', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=False)
5225 file_org_name = Column('file_org_name', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=False)
5220
5226
5221 # sha256 hash
5227 # sha256 hash
5222 file_hash = Column('file_hash', String(512), nullable=False)
5228 file_hash = Column('file_hash', String(512), nullable=False)
5223 file_size = Column('file_size', BigInteger(), nullable=False)
5229 file_size = Column('file_size', BigInteger(), nullable=False)
5224
5230
5225 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5231 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5226 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True)
5232 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True)
5227 accessed_count = Column('accessed_count', Integer(), default=0)
5233 accessed_count = Column('accessed_count', Integer(), default=0)
5228
5234
5229 enabled = Column('enabled', Boolean(), nullable=False, default=True)
5235 enabled = Column('enabled', Boolean(), nullable=False, default=True)
5230
5236
5231 # if repo/repo_group reference is set, check for permissions
5237 # if repo/repo_group reference is set, check for permissions
5232 check_acl = Column('check_acl', Boolean(), nullable=False, default=True)
5238 check_acl = Column('check_acl', Boolean(), nullable=False, default=True)
5233
5239
5234 # hidden defines an attachment that should be hidden from showing in artifact listing
5240 # hidden defines an attachment that should be hidden from showing in artifact listing
5235 hidden = Column('hidden', Boolean(), nullable=False, default=False)
5241 hidden = Column('hidden', Boolean(), nullable=False, default=False)
5236
5242
5237 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
5243 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
5238 upload_user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.user_id')
5244 upload_user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.user_id')
5239
5245
5240 file_metadata = relationship('FileStoreMetadata', lazy='joined')
5246 file_metadata = relationship('FileStoreMetadata', lazy='joined')
5241
5247
5242 # scope limited to user, which requester have access to
5248 # scope limited to user, which requester have access to
5243 scope_user_id = Column(
5249 scope_user_id = Column(
5244 'scope_user_id', Integer(), ForeignKey('users.user_id'),
5250 'scope_user_id', Integer(), ForeignKey('users.user_id'),
5245 nullable=True, unique=None, default=None)
5251 nullable=True, unique=None, default=None)
5246 user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.scope_user_id')
5252 user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.scope_user_id')
5247
5253
5248 # scope limited to user group, which requester have access to
5254 # scope limited to user group, which requester have access to
5249 scope_user_group_id = Column(
5255 scope_user_group_id = Column(
5250 'scope_user_group_id', Integer(), ForeignKey('users_groups.users_group_id'),
5256 'scope_user_group_id', Integer(), ForeignKey('users_groups.users_group_id'),
5251 nullable=True, unique=None, default=None)
5257 nullable=True, unique=None, default=None)
5252 user_group = relationship('UserGroup', lazy='joined')
5258 user_group = relationship('UserGroup', lazy='joined')
5253
5259
5254 # scope limited to repo, which requester have access to
5260 # scope limited to repo, which requester have access to
5255 scope_repo_id = Column(
5261 scope_repo_id = Column(
5256 'scope_repo_id', Integer(), ForeignKey('repositories.repo_id'),
5262 'scope_repo_id', Integer(), ForeignKey('repositories.repo_id'),
5257 nullable=True, unique=None, default=None)
5263 nullable=True, unique=None, default=None)
5258 repo = relationship('Repository', lazy='joined')
5264 repo = relationship('Repository', lazy='joined')
5259
5265
5260 # scope limited to repo group, which requester have access to
5266 # scope limited to repo group, which requester have access to
5261 scope_repo_group_id = Column(
5267 scope_repo_group_id = Column(
5262 'scope_repo_group_id', Integer(), ForeignKey('groups.group_id'),
5268 'scope_repo_group_id', Integer(), ForeignKey('groups.group_id'),
5263 nullable=True, unique=None, default=None)
5269 nullable=True, unique=None, default=None)
5264 repo_group = relationship('RepoGroup', lazy='joined')
5270 repo_group = relationship('RepoGroup', lazy='joined')
5265
5271
5266 @classmethod
5272 @classmethod
5267 def get_by_store_uid(cls, file_store_uid):
5273 def get_by_store_uid(cls, file_store_uid):
5268 return FileStore.query().filter(FileStore.file_uid == file_store_uid).scalar()
5274 return FileStore.query().filter(FileStore.file_uid == file_store_uid).scalar()
5269
5275
5270 @classmethod
5276 @classmethod
5271 def create(cls, file_uid, filename, file_hash, file_size, file_display_name='',
5277 def create(cls, file_uid, filename, file_hash, file_size, file_display_name='',
5272 file_description='', enabled=True, hidden=False, check_acl=True,
5278 file_description='', enabled=True, hidden=False, check_acl=True,
5273 user_id=None, scope_user_id=None, scope_repo_id=None, scope_repo_group_id=None):
5279 user_id=None, scope_user_id=None, scope_repo_id=None, scope_repo_group_id=None):
5274
5280
5275 store_entry = FileStore()
5281 store_entry = FileStore()
5276 store_entry.file_uid = file_uid
5282 store_entry.file_uid = file_uid
5277 store_entry.file_display_name = file_display_name
5283 store_entry.file_display_name = file_display_name
5278 store_entry.file_org_name = filename
5284 store_entry.file_org_name = filename
5279 store_entry.file_size = file_size
5285 store_entry.file_size = file_size
5280 store_entry.file_hash = file_hash
5286 store_entry.file_hash = file_hash
5281 store_entry.file_description = file_description
5287 store_entry.file_description = file_description
5282
5288
5283 store_entry.check_acl = check_acl
5289 store_entry.check_acl = check_acl
5284 store_entry.enabled = enabled
5290 store_entry.enabled = enabled
5285 store_entry.hidden = hidden
5291 store_entry.hidden = hidden
5286
5292
5287 store_entry.user_id = user_id
5293 store_entry.user_id = user_id
5288 store_entry.scope_user_id = scope_user_id
5294 store_entry.scope_user_id = scope_user_id
5289 store_entry.scope_repo_id = scope_repo_id
5295 store_entry.scope_repo_id = scope_repo_id
5290 store_entry.scope_repo_group_id = scope_repo_group_id
5296 store_entry.scope_repo_group_id = scope_repo_group_id
5291
5297
5292 return store_entry
5298 return store_entry
5293
5299
5294 @classmethod
5300 @classmethod
5295 def store_metadata(cls, file_store_id, args, commit=True):
5301 def store_metadata(cls, file_store_id, args, commit=True):
5296 file_store = FileStore.get(file_store_id)
5302 file_store = FileStore.get(file_store_id)
5297 if file_store is None:
5303 if file_store is None:
5298 return
5304 return
5299
5305
5300 for section, key, value, value_type in args:
5306 for section, key, value, value_type in args:
5301 has_key = FileStoreMetadata().query() \
5307 has_key = FileStoreMetadata().query() \
5302 .filter(FileStoreMetadata.file_store_id == file_store.file_store_id) \
5308 .filter(FileStoreMetadata.file_store_id == file_store.file_store_id) \
5303 .filter(FileStoreMetadata.file_store_meta_section == section) \
5309 .filter(FileStoreMetadata.file_store_meta_section == section) \
5304 .filter(FileStoreMetadata.file_store_meta_key == key) \
5310 .filter(FileStoreMetadata.file_store_meta_key == key) \
5305 .scalar()
5311 .scalar()
5306 if has_key:
5312 if has_key:
5307 msg = 'key `{}` already defined under section `{}` for this file.'\
5313 msg = 'key `{}` already defined under section `{}` for this file.'\
5308 .format(key, section)
5314 .format(key, section)
5309 raise ArtifactMetadataDuplicate(msg, err_section=section, err_key=key)
5315 raise ArtifactMetadataDuplicate(msg, err_section=section, err_key=key)
5310
5316
5311 # NOTE(marcink): raises ArtifactMetadataBadValueType
5317 # NOTE(marcink): raises ArtifactMetadataBadValueType
5312 FileStoreMetadata.valid_value_type(value_type)
5318 FileStoreMetadata.valid_value_type(value_type)
5313
5319
5314 meta_entry = FileStoreMetadata()
5320 meta_entry = FileStoreMetadata()
5315 meta_entry.file_store = file_store
5321 meta_entry.file_store = file_store
5316 meta_entry.file_store_meta_section = section
5322 meta_entry.file_store_meta_section = section
5317 meta_entry.file_store_meta_key = key
5323 meta_entry.file_store_meta_key = key
5318 meta_entry.file_store_meta_value_type = value_type
5324 meta_entry.file_store_meta_value_type = value_type
5319 meta_entry.file_store_meta_value = value
5325 meta_entry.file_store_meta_value = value
5320
5326
5321 Session().add(meta_entry)
5327 Session().add(meta_entry)
5322
5328
5323 try:
5329 try:
5324 if commit:
5330 if commit:
5325 Session().commit()
5331 Session().commit()
5326 except IntegrityError:
5332 except IntegrityError:
5327 Session().rollback()
5333 Session().rollback()
5328 raise ArtifactMetadataDuplicate('Duplicate section/key found for this file.')
5334 raise ArtifactMetadataDuplicate('Duplicate section/key found for this file.')
5329
5335
5330 @classmethod
5336 @classmethod
5331 def bump_access_counter(cls, file_uid, commit=True):
5337 def bump_access_counter(cls, file_uid, commit=True):
5332 FileStore().query()\
5338 FileStore().query()\
5333 .filter(FileStore.file_uid == file_uid)\
5339 .filter(FileStore.file_uid == file_uid)\
5334 .update({FileStore.accessed_count: (FileStore.accessed_count + 1),
5340 .update({FileStore.accessed_count: (FileStore.accessed_count + 1),
5335 FileStore.accessed_on: datetime.datetime.now()})
5341 FileStore.accessed_on: datetime.datetime.now()})
5336 if commit:
5342 if commit:
5337 Session().commit()
5343 Session().commit()
5338
5344
5339 def __json__(self):
5345 def __json__(self):
5340 data = {
5346 data = {
5341 'filename': self.file_display_name,
5347 'filename': self.file_display_name,
5342 'filename_org': self.file_org_name,
5348 'filename_org': self.file_org_name,
5343 'file_uid': self.file_uid,
5349 'file_uid': self.file_uid,
5344 'description': self.file_description,
5350 'description': self.file_description,
5345 'hidden': self.hidden,
5351 'hidden': self.hidden,
5346 'size': self.file_size,
5352 'size': self.file_size,
5347 'created_on': self.created_on,
5353 'created_on': self.created_on,
5348 'uploaded_by': self.upload_user.get_api_data(details='basic'),
5354 'uploaded_by': self.upload_user.get_api_data(details='basic'),
5349 'downloaded_times': self.accessed_count,
5355 'downloaded_times': self.accessed_count,
5350 'sha256': self.file_hash,
5356 'sha256': self.file_hash,
5351 'metadata': self.file_metadata,
5357 'metadata': self.file_metadata,
5352 }
5358 }
5353
5359
5354 return data
5360 return data
5355
5361
5356 def __repr__(self):
5362 def __repr__(self):
5357 return '<FileStore({})>'.format(self.file_store_id)
5363 return '<FileStore({})>'.format(self.file_store_id)
5358
5364
5359
5365
5360 class FileStoreMetadata(Base, BaseModel):
5366 class FileStoreMetadata(Base, BaseModel):
5361 __tablename__ = 'file_store_metadata'
5367 __tablename__ = 'file_store_metadata'
5362 __table_args__ = (
5368 __table_args__ = (
5363 UniqueConstraint('file_store_id', 'file_store_meta_section_hash', 'file_store_meta_key_hash'),
5369 UniqueConstraint('file_store_id', 'file_store_meta_section_hash', 'file_store_meta_key_hash'),
5364 Index('file_store_meta_section_idx', 'file_store_meta_section', mysql_length=255),
5370 Index('file_store_meta_section_idx', 'file_store_meta_section', mysql_length=255),
5365 Index('file_store_meta_key_idx', 'file_store_meta_key', mysql_length=255),
5371 Index('file_store_meta_key_idx', 'file_store_meta_key', mysql_length=255),
5366 base_table_args
5372 base_table_args
5367 )
5373 )
5368 SETTINGS_TYPES = {
5374 SETTINGS_TYPES = {
5369 'str': safe_str,
5375 'str': safe_str,
5370 'int': safe_int,
5376 'int': safe_int,
5371 'unicode': safe_unicode,
5377 'unicode': safe_unicode,
5372 'bool': str2bool,
5378 'bool': str2bool,
5373 'list': functools.partial(aslist, sep=',')
5379 'list': functools.partial(aslist, sep=',')
5374 }
5380 }
5375
5381
5376 file_store_meta_id = Column(
5382 file_store_meta_id = Column(
5377 "file_store_meta_id", Integer(), nullable=False, unique=True, default=None,
5383 "file_store_meta_id", Integer(), nullable=False, unique=True, default=None,
5378 primary_key=True)
5384 primary_key=True)
5379 _file_store_meta_section = Column(
5385 _file_store_meta_section = Column(
5380 "file_store_meta_section", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5386 "file_store_meta_section", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5381 nullable=True, unique=None, default=None)
5387 nullable=True, unique=None, default=None)
5382 _file_store_meta_section_hash = Column(
5388 _file_store_meta_section_hash = Column(
5383 "file_store_meta_section_hash", String(255),
5389 "file_store_meta_section_hash", String(255),
5384 nullable=True, unique=None, default=None)
5390 nullable=True, unique=None, default=None)
5385 _file_store_meta_key = Column(
5391 _file_store_meta_key = Column(
5386 "file_store_meta_key", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5392 "file_store_meta_key", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5387 nullable=True, unique=None, default=None)
5393 nullable=True, unique=None, default=None)
5388 _file_store_meta_key_hash = Column(
5394 _file_store_meta_key_hash = Column(
5389 "file_store_meta_key_hash", String(255), nullable=True, unique=None, default=None)
5395 "file_store_meta_key_hash", String(255), nullable=True, unique=None, default=None)
5390 _file_store_meta_value = Column(
5396 _file_store_meta_value = Column(
5391 "file_store_meta_value", UnicodeText().with_variant(UnicodeText(20480), 'mysql'),
5397 "file_store_meta_value", UnicodeText().with_variant(UnicodeText(20480), 'mysql'),
5392 nullable=True, unique=None, default=None)
5398 nullable=True, unique=None, default=None)
5393 _file_store_meta_value_type = Column(
5399 _file_store_meta_value_type = Column(
5394 "file_store_meta_value_type", String(255), nullable=True, unique=None,
5400 "file_store_meta_value_type", String(255), nullable=True, unique=None,
5395 default='unicode')
5401 default='unicode')
5396
5402
5397 file_store_id = Column(
5403 file_store_id = Column(
5398 'file_store_id', Integer(), ForeignKey('file_store.file_store_id'),
5404 'file_store_id', Integer(), ForeignKey('file_store.file_store_id'),
5399 nullable=True, unique=None, default=None)
5405 nullable=True, unique=None, default=None)
5400
5406
5401 file_store = relationship('FileStore', lazy='joined')
5407 file_store = relationship('FileStore', lazy='joined')
5402
5408
5403 @classmethod
5409 @classmethod
5404 def valid_value_type(cls, value):
5410 def valid_value_type(cls, value):
5405 if value.split('.')[0] not in cls.SETTINGS_TYPES:
5411 if value.split('.')[0] not in cls.SETTINGS_TYPES:
5406 raise ArtifactMetadataBadValueType(
5412 raise ArtifactMetadataBadValueType(
5407 'value_type must be one of %s got %s' % (cls.SETTINGS_TYPES.keys(), value))
5413 'value_type must be one of %s got %s' % (cls.SETTINGS_TYPES.keys(), value))
5408
5414
5409 @hybrid_property
5415 @hybrid_property
5410 def file_store_meta_section(self):
5416 def file_store_meta_section(self):
5411 return self._file_store_meta_section
5417 return self._file_store_meta_section
5412
5418
5413 @file_store_meta_section.setter
5419 @file_store_meta_section.setter
5414 def file_store_meta_section(self, value):
5420 def file_store_meta_section(self, value):
5415 self._file_store_meta_section = value
5421 self._file_store_meta_section = value
5416 self._file_store_meta_section_hash = _hash_key(value)
5422 self._file_store_meta_section_hash = _hash_key(value)
5417
5423
5418 @hybrid_property
5424 @hybrid_property
5419 def file_store_meta_key(self):
5425 def file_store_meta_key(self):
5420 return self._file_store_meta_key
5426 return self._file_store_meta_key
5421
5427
5422 @file_store_meta_key.setter
5428 @file_store_meta_key.setter
5423 def file_store_meta_key(self, value):
5429 def file_store_meta_key(self, value):
5424 self._file_store_meta_key = value
5430 self._file_store_meta_key = value
5425 self._file_store_meta_key_hash = _hash_key(value)
5431 self._file_store_meta_key_hash = _hash_key(value)
5426
5432
5427 @hybrid_property
5433 @hybrid_property
5428 def file_store_meta_value(self):
5434 def file_store_meta_value(self):
5429 val = self._file_store_meta_value
5435 val = self._file_store_meta_value
5430
5436
5431 if self._file_store_meta_value_type:
5437 if self._file_store_meta_value_type:
5432 # e.g unicode.encrypted == unicode
5438 # e.g unicode.encrypted == unicode
5433 _type = self._file_store_meta_value_type.split('.')[0]
5439 _type = self._file_store_meta_value_type.split('.')[0]
5434 # decode the encrypted value if it's encrypted field type
5440 # decode the encrypted value if it's encrypted field type
5435 if '.encrypted' in self._file_store_meta_value_type:
5441 if '.encrypted' in self._file_store_meta_value_type:
5436 cipher = EncryptedTextValue()
5442 cipher = EncryptedTextValue()
5437 val = safe_unicode(cipher.process_result_value(val, None))
5443 val = safe_unicode(cipher.process_result_value(val, None))
5438 # do final type conversion
5444 # do final type conversion
5439 converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode']
5445 converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode']
5440 val = converter(val)
5446 val = converter(val)
5441
5447
5442 return val
5448 return val
5443
5449
5444 @file_store_meta_value.setter
5450 @file_store_meta_value.setter
5445 def file_store_meta_value(self, val):
5451 def file_store_meta_value(self, val):
5446 val = safe_unicode(val)
5452 val = safe_unicode(val)
5447 # encode the encrypted value
5453 # encode the encrypted value
5448 if '.encrypted' in self.file_store_meta_value_type:
5454 if '.encrypted' in self.file_store_meta_value_type:
5449 cipher = EncryptedTextValue()
5455 cipher = EncryptedTextValue()
5450 val = safe_unicode(cipher.process_bind_param(val, None))
5456 val = safe_unicode(cipher.process_bind_param(val, None))
5451 self._file_store_meta_value = val
5457 self._file_store_meta_value = val
5452
5458
5453 @hybrid_property
5459 @hybrid_property
5454 def file_store_meta_value_type(self):
5460 def file_store_meta_value_type(self):
5455 return self._file_store_meta_value_type
5461 return self._file_store_meta_value_type
5456
5462
5457 @file_store_meta_value_type.setter
5463 @file_store_meta_value_type.setter
5458 def file_store_meta_value_type(self, val):
5464 def file_store_meta_value_type(self, val):
5459 # e.g unicode.encrypted
5465 # e.g unicode.encrypted
5460 self.valid_value_type(val)
5466 self.valid_value_type(val)
5461 self._file_store_meta_value_type = val
5467 self._file_store_meta_value_type = val
5462
5468
5463 def __json__(self):
5469 def __json__(self):
5464 data = {
5470 data = {
5465 'artifact': self.file_store.file_uid,
5471 'artifact': self.file_store.file_uid,
5466 'section': self.file_store_meta_section,
5472 'section': self.file_store_meta_section,
5467 'key': self.file_store_meta_key,
5473 'key': self.file_store_meta_key,
5468 'value': self.file_store_meta_value,
5474 'value': self.file_store_meta_value,
5469 }
5475 }
5470
5476
5471 return data
5477 return data
5472
5478
5473 def __repr__(self):
5479 def __repr__(self):
5474 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.file_store_meta_section,
5480 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.file_store_meta_section,
5475 self.file_store_meta_key, self.file_store_meta_value)
5481 self.file_store_meta_key, self.file_store_meta_value)
5476
5482
5477
5483
5478 class DbMigrateVersion(Base, BaseModel):
5484 class DbMigrateVersion(Base, BaseModel):
5479 __tablename__ = 'db_migrate_version'
5485 __tablename__ = 'db_migrate_version'
5480 __table_args__ = (
5486 __table_args__ = (
5481 base_table_args,
5487 base_table_args,
5482 )
5488 )
5483
5489
5484 repository_id = Column('repository_id', String(250), primary_key=True)
5490 repository_id = Column('repository_id', String(250), primary_key=True)
5485 repository_path = Column('repository_path', Text)
5491 repository_path = Column('repository_path', Text)
5486 version = Column('version', Integer)
5492 version = Column('version', Integer)
5487
5493
5488 @classmethod
5494 @classmethod
5489 def set_version(cls, version):
5495 def set_version(cls, version):
5490 """
5496 """
5491 Helper for forcing a different version, usually for debugging purposes via ishell.
5497 Helper for forcing a different version, usually for debugging purposes via ishell.
5492 """
5498 """
5493 ver = DbMigrateVersion.query().first()
5499 ver = DbMigrateVersion.query().first()
5494 ver.version = version
5500 ver.version = version
5495 Session().commit()
5501 Session().commit()
5496
5502
5497
5503
5498 class DbSession(Base, BaseModel):
5504 class DbSession(Base, BaseModel):
5499 __tablename__ = 'db_session'
5505 __tablename__ = 'db_session'
5500 __table_args__ = (
5506 __table_args__ = (
5501 base_table_args,
5507 base_table_args,
5502 )
5508 )
5503
5509
5504 def __repr__(self):
5510 def __repr__(self):
5505 return '<DB:DbSession({})>'.format(self.id)
5511 return '<DB:DbSession({})>'.format(self.id)
5506
5512
5507 id = Column('id', Integer())
5513 id = Column('id', Integer())
5508 namespace = Column('namespace', String(255), primary_key=True)
5514 namespace = Column('namespace', String(255), primary_key=True)
5509 accessed = Column('accessed', DateTime, nullable=False)
5515 accessed = Column('accessed', DateTime, nullable=False)
5510 created = Column('created', DateTime, nullable=False)
5516 created = Column('created', DateTime, nullable=False)
5511 data = Column('data', PickleType, nullable=False)
5517 data = Column('data', PickleType, nullable=False)
@@ -1,1880 +1,1893 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2019 RhodeCode GmbH
3 # Copyright (C) 2012-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 pull request model for RhodeCode
23 pull request model for RhodeCode
24 """
24 """
25
25
26
26
27 import json
27 import json
28 import logging
28 import logging
29 import os
29 import os
30
30
31 import datetime
31 import datetime
32 import urllib
32 import urllib
33 import collections
33 import collections
34
34
35 from pyramid import compat
35 from pyramid import compat
36 from pyramid.threadlocal import get_current_request
36 from pyramid.threadlocal import get_current_request
37
37
38 from rhodecode import events
38 from rhodecode import events
39 from rhodecode.translation import lazy_ugettext
39 from rhodecode.translation import lazy_ugettext
40 from rhodecode.lib import helpers as h, hooks_utils, diffs
40 from rhodecode.lib import helpers as h, hooks_utils, diffs
41 from rhodecode.lib import audit_logger
41 from rhodecode.lib import audit_logger
42 from rhodecode.lib.compat import OrderedDict
42 from rhodecode.lib.compat import OrderedDict
43 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
43 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
44 from rhodecode.lib.markup_renderer import (
44 from rhodecode.lib.markup_renderer import (
45 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
45 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
46 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
46 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
47 from rhodecode.lib.vcs.backends.base import (
47 from rhodecode.lib.vcs.backends.base import (
48 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
48 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
49 from rhodecode.lib.vcs.conf import settings as vcs_settings
49 from rhodecode.lib.vcs.conf import settings as vcs_settings
50 from rhodecode.lib.vcs.exceptions import (
50 from rhodecode.lib.vcs.exceptions import (
51 CommitDoesNotExistError, EmptyRepositoryError)
51 CommitDoesNotExistError, EmptyRepositoryError)
52 from rhodecode.model import BaseModel
52 from rhodecode.model import BaseModel
53 from rhodecode.model.changeset_status import ChangesetStatusModel
53 from rhodecode.model.changeset_status import ChangesetStatusModel
54 from rhodecode.model.comment import CommentsModel
54 from rhodecode.model.comment import CommentsModel
55 from rhodecode.model.db import (
55 from rhodecode.model.db import (
56 or_, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
56 or_, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
57 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
57 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
58 from rhodecode.model.meta import Session
58 from rhodecode.model.meta import Session
59 from rhodecode.model.notification import NotificationModel, \
59 from rhodecode.model.notification import NotificationModel, \
60 EmailNotificationModel
60 EmailNotificationModel
61 from rhodecode.model.scm import ScmModel
61 from rhodecode.model.scm import ScmModel
62 from rhodecode.model.settings import VcsSettingsModel
62 from rhodecode.model.settings import VcsSettingsModel
63
63
64
64
65 log = logging.getLogger(__name__)
65 log = logging.getLogger(__name__)
66
66
67
67
68 # Data structure to hold the response data when updating commits during a pull
68 # Data structure to hold the response data when updating commits during a pull
69 # request update.
69 # request update.
70 class UpdateResponse(object):
70 class UpdateResponse(object):
71
71
72 def __init__(self, executed, reason, new, old, common_ancestor_id,
72 def __init__(self, executed, reason, new, old, common_ancestor_id,
73 commit_changes, source_changed, target_changed):
73 commit_changes, source_changed, target_changed):
74
74
75 self.executed = executed
75 self.executed = executed
76 self.reason = reason
76 self.reason = reason
77 self.new = new
77 self.new = new
78 self.old = old
78 self.old = old
79 self.common_ancestor_id = common_ancestor_id
79 self.common_ancestor_id = common_ancestor_id
80 self.changes = commit_changes
80 self.changes = commit_changes
81 self.source_changed = source_changed
81 self.source_changed = source_changed
82 self.target_changed = target_changed
82 self.target_changed = target_changed
83
83
84
84
85 class PullRequestModel(BaseModel):
85 class PullRequestModel(BaseModel):
86
86
87 cls = PullRequest
87 cls = PullRequest
88
88
89 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
89 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
90
90
91 UPDATE_STATUS_MESSAGES = {
91 UPDATE_STATUS_MESSAGES = {
92 UpdateFailureReason.NONE: lazy_ugettext(
92 UpdateFailureReason.NONE: lazy_ugettext(
93 'Pull request update successful.'),
93 'Pull request update successful.'),
94 UpdateFailureReason.UNKNOWN: lazy_ugettext(
94 UpdateFailureReason.UNKNOWN: lazy_ugettext(
95 'Pull request update failed because of an unknown error.'),
95 'Pull request update failed because of an unknown error.'),
96 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
96 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
97 'No update needed because the source and target have not changed.'),
97 'No update needed because the source and target have not changed.'),
98 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
98 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
99 'Pull request cannot be updated because the reference type is '
99 'Pull request cannot be updated because the reference type is '
100 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
100 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
101 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
101 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
102 'This pull request cannot be updated because the target '
102 'This pull request cannot be updated because the target '
103 'reference is missing.'),
103 'reference is missing.'),
104 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
104 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
105 'This pull request cannot be updated because the source '
105 'This pull request cannot be updated because the source '
106 'reference is missing.'),
106 'reference is missing.'),
107 }
107 }
108 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
108 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
109 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
109 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
110
110
111 def __get_pull_request(self, pull_request):
111 def __get_pull_request(self, pull_request):
112 return self._get_instance((
112 return self._get_instance((
113 PullRequest, PullRequestVersion), pull_request)
113 PullRequest, PullRequestVersion), pull_request)
114
114
115 def _check_perms(self, perms, pull_request, user, api=False):
115 def _check_perms(self, perms, pull_request, user, api=False):
116 if not api:
116 if not api:
117 return h.HasRepoPermissionAny(*perms)(
117 return h.HasRepoPermissionAny(*perms)(
118 user=user, repo_name=pull_request.target_repo.repo_name)
118 user=user, repo_name=pull_request.target_repo.repo_name)
119 else:
119 else:
120 return h.HasRepoPermissionAnyApi(*perms)(
120 return h.HasRepoPermissionAnyApi(*perms)(
121 user=user, repo_name=pull_request.target_repo.repo_name)
121 user=user, repo_name=pull_request.target_repo.repo_name)
122
122
123 def check_user_read(self, pull_request, user, api=False):
123 def check_user_read(self, pull_request, user, api=False):
124 _perms = ('repository.admin', 'repository.write', 'repository.read',)
124 _perms = ('repository.admin', 'repository.write', 'repository.read',)
125 return self._check_perms(_perms, pull_request, user, api)
125 return self._check_perms(_perms, pull_request, user, api)
126
126
127 def check_user_merge(self, pull_request, user, api=False):
127 def check_user_merge(self, pull_request, user, api=False):
128 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
128 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
129 return self._check_perms(_perms, pull_request, user, api)
129 return self._check_perms(_perms, pull_request, user, api)
130
130
131 def check_user_update(self, pull_request, user, api=False):
131 def check_user_update(self, pull_request, user, api=False):
132 owner = user.user_id == pull_request.user_id
132 owner = user.user_id == pull_request.user_id
133 return self.check_user_merge(pull_request, user, api) or owner
133 return self.check_user_merge(pull_request, user, api) or owner
134
134
135 def check_user_delete(self, pull_request, user):
135 def check_user_delete(self, pull_request, user):
136 owner = user.user_id == pull_request.user_id
136 owner = user.user_id == pull_request.user_id
137 _perms = ('repository.admin',)
137 _perms = ('repository.admin',)
138 return self._check_perms(_perms, pull_request, user) or owner
138 return self._check_perms(_perms, pull_request, user) or owner
139
139
140 def check_user_change_status(self, pull_request, user, api=False):
140 def check_user_change_status(self, pull_request, user, api=False):
141 reviewer = user.user_id in [x.user_id for x in
141 reviewer = user.user_id in [x.user_id for x in
142 pull_request.reviewers]
142 pull_request.reviewers]
143 return self.check_user_update(pull_request, user, api) or reviewer
143 return self.check_user_update(pull_request, user, api) or reviewer
144
144
145 def check_user_comment(self, pull_request, user):
145 def check_user_comment(self, pull_request, user):
146 owner = user.user_id == pull_request.user_id
146 owner = user.user_id == pull_request.user_id
147 return self.check_user_read(pull_request, user) or owner
147 return self.check_user_read(pull_request, user) or owner
148
148
149 def get(self, pull_request):
149 def get(self, pull_request):
150 return self.__get_pull_request(pull_request)
150 return self.__get_pull_request(pull_request)
151
151
152 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
152 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
153 statuses=None, opened_by=None, order_by=None,
153 statuses=None, opened_by=None, order_by=None,
154 order_dir='desc', only_created=False):
154 order_dir='desc', only_created=False):
155 repo = None
155 repo = None
156 if repo_name:
156 if repo_name:
157 repo = self._get_repo(repo_name)
157 repo = self._get_repo(repo_name)
158
158
159 q = PullRequest.query()
159 q = PullRequest.query()
160
160
161 if search_q:
161 if search_q:
162 like_expression = u'%{}%'.format(safe_unicode(search_q))
162 like_expression = u'%{}%'.format(safe_unicode(search_q))
163 q = q.filter(or_(
163 q = q.filter(or_(
164 cast(PullRequest.pull_request_id, String).ilike(like_expression),
164 cast(PullRequest.pull_request_id, String).ilike(like_expression),
165 PullRequest.title.ilike(like_expression),
165 PullRequest.title.ilike(like_expression),
166 PullRequest.description.ilike(like_expression),
166 PullRequest.description.ilike(like_expression),
167 ))
167 ))
168
168
169 # source or target
169 # source or target
170 if repo and source:
170 if repo and source:
171 q = q.filter(PullRequest.source_repo == repo)
171 q = q.filter(PullRequest.source_repo == repo)
172 elif repo:
172 elif repo:
173 q = q.filter(PullRequest.target_repo == repo)
173 q = q.filter(PullRequest.target_repo == repo)
174
174
175 # closed,opened
175 # closed,opened
176 if statuses:
176 if statuses:
177 q = q.filter(PullRequest.status.in_(statuses))
177 q = q.filter(PullRequest.status.in_(statuses))
178
178
179 # opened by filter
179 # opened by filter
180 if opened_by:
180 if opened_by:
181 q = q.filter(PullRequest.user_id.in_(opened_by))
181 q = q.filter(PullRequest.user_id.in_(opened_by))
182
182
183 # only get those that are in "created" state
183 # only get those that are in "created" state
184 if only_created:
184 if only_created:
185 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
185 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
186
186
187 if order_by:
187 if order_by:
188 order_map = {
188 order_map = {
189 'name_raw': PullRequest.pull_request_id,
189 'name_raw': PullRequest.pull_request_id,
190 'id': PullRequest.pull_request_id,
190 'id': PullRequest.pull_request_id,
191 'title': PullRequest.title,
191 'title': PullRequest.title,
192 'updated_on_raw': PullRequest.updated_on,
192 'updated_on_raw': PullRequest.updated_on,
193 'target_repo': PullRequest.target_repo_id
193 'target_repo': PullRequest.target_repo_id
194 }
194 }
195 if order_dir == 'asc':
195 if order_dir == 'asc':
196 q = q.order_by(order_map[order_by].asc())
196 q = q.order_by(order_map[order_by].asc())
197 else:
197 else:
198 q = q.order_by(order_map[order_by].desc())
198 q = q.order_by(order_map[order_by].desc())
199
199
200 return q
200 return q
201
201
202 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
202 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
203 opened_by=None):
203 opened_by=None):
204 """
204 """
205 Count the number of pull requests for a specific repository.
205 Count the number of pull requests for a specific repository.
206
206
207 :param repo_name: target or source repo
207 :param repo_name: target or source repo
208 :param search_q: filter by text
208 :param search_q: filter by text
209 :param source: boolean flag to specify if repo_name refers to source
209 :param source: boolean flag to specify if repo_name refers to source
210 :param statuses: list of pull request statuses
210 :param statuses: list of pull request statuses
211 :param opened_by: author user of the pull request
211 :param opened_by: author user of the pull request
212 :returns: int number of pull requests
212 :returns: int number of pull requests
213 """
213 """
214 q = self._prepare_get_all_query(
214 q = self._prepare_get_all_query(
215 repo_name, search_q=search_q, source=source, statuses=statuses,
215 repo_name, search_q=search_q, source=source, statuses=statuses,
216 opened_by=opened_by)
216 opened_by=opened_by)
217
217
218 return q.count()
218 return q.count()
219
219
220 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
220 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
221 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
221 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
222 """
222 """
223 Get all pull requests for a specific repository.
223 Get all pull requests for a specific repository.
224
224
225 :param repo_name: target or source repo
225 :param repo_name: target or source repo
226 :param search_q: filter by text
226 :param search_q: filter by text
227 :param source: boolean flag to specify if repo_name refers to source
227 :param source: boolean flag to specify if repo_name refers to source
228 :param statuses: list of pull request statuses
228 :param statuses: list of pull request statuses
229 :param opened_by: author user of the pull request
229 :param opened_by: author user of the pull request
230 :param offset: pagination offset
230 :param offset: pagination offset
231 :param length: length of returned list
231 :param length: length of returned list
232 :param order_by: order of the returned list
232 :param order_by: order of the returned list
233 :param order_dir: 'asc' or 'desc' ordering direction
233 :param order_dir: 'asc' or 'desc' ordering direction
234 :returns: list of pull requests
234 :returns: list of pull requests
235 """
235 """
236 q = self._prepare_get_all_query(
236 q = self._prepare_get_all_query(
237 repo_name, search_q=search_q, source=source, statuses=statuses,
237 repo_name, search_q=search_q, source=source, statuses=statuses,
238 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
238 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
239
239
240 if length:
240 if length:
241 pull_requests = q.limit(length).offset(offset).all()
241 pull_requests = q.limit(length).offset(offset).all()
242 else:
242 else:
243 pull_requests = q.all()
243 pull_requests = q.all()
244
244
245 return pull_requests
245 return pull_requests
246
246
247 def count_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
247 def count_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
248 opened_by=None):
248 opened_by=None):
249 """
249 """
250 Count the number of pull requests for a specific repository that are
250 Count the number of pull requests for a specific repository that are
251 awaiting review.
251 awaiting review.
252
252
253 :param repo_name: target or source repo
253 :param repo_name: target or source repo
254 :param search_q: filter by text
254 :param search_q: filter by text
255 :param source: boolean flag to specify if repo_name refers to source
255 :param source: boolean flag to specify if repo_name refers to source
256 :param statuses: list of pull request statuses
256 :param statuses: list of pull request statuses
257 :param opened_by: author user of the pull request
257 :param opened_by: author user of the pull request
258 :returns: int number of pull requests
258 :returns: int number of pull requests
259 """
259 """
260 pull_requests = self.get_awaiting_review(
260 pull_requests = self.get_awaiting_review(
261 repo_name, search_q=search_q, source=source, statuses=statuses, opened_by=opened_by)
261 repo_name, search_q=search_q, source=source, statuses=statuses, opened_by=opened_by)
262
262
263 return len(pull_requests)
263 return len(pull_requests)
264
264
265 def get_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
265 def get_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
266 opened_by=None, offset=0, length=None,
266 opened_by=None, offset=0, length=None,
267 order_by=None, order_dir='desc'):
267 order_by=None, order_dir='desc'):
268 """
268 """
269 Get all pull requests for a specific repository that are awaiting
269 Get all pull requests for a specific repository that are awaiting
270 review.
270 review.
271
271
272 :param repo_name: target or source repo
272 :param repo_name: target or source repo
273 :param search_q: filter by text
273 :param search_q: filter by text
274 :param source: boolean flag to specify if repo_name refers to source
274 :param source: boolean flag to specify if repo_name refers to source
275 :param statuses: list of pull request statuses
275 :param statuses: list of pull request statuses
276 :param opened_by: author user of the pull request
276 :param opened_by: author user of the pull request
277 :param offset: pagination offset
277 :param offset: pagination offset
278 :param length: length of returned list
278 :param length: length of returned list
279 :param order_by: order of the returned list
279 :param order_by: order of the returned list
280 :param order_dir: 'asc' or 'desc' ordering direction
280 :param order_dir: 'asc' or 'desc' ordering direction
281 :returns: list of pull requests
281 :returns: list of pull requests
282 """
282 """
283 pull_requests = self.get_all(
283 pull_requests = self.get_all(
284 repo_name, search_q=search_q, source=source, statuses=statuses,
284 repo_name, search_q=search_q, source=source, statuses=statuses,
285 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
285 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
286
286
287 _filtered_pull_requests = []
287 _filtered_pull_requests = []
288 for pr in pull_requests:
288 for pr in pull_requests:
289 status = pr.calculated_review_status()
289 status = pr.calculated_review_status()
290 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
290 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
291 ChangesetStatus.STATUS_UNDER_REVIEW]:
291 ChangesetStatus.STATUS_UNDER_REVIEW]:
292 _filtered_pull_requests.append(pr)
292 _filtered_pull_requests.append(pr)
293 if length:
293 if length:
294 return _filtered_pull_requests[offset:offset+length]
294 return _filtered_pull_requests[offset:offset+length]
295 else:
295 else:
296 return _filtered_pull_requests
296 return _filtered_pull_requests
297
297
298 def count_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
298 def count_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
299 opened_by=None, user_id=None):
299 opened_by=None, user_id=None):
300 """
300 """
301 Count the number of pull requests for a specific repository that are
301 Count the number of pull requests for a specific repository that are
302 awaiting review from a specific user.
302 awaiting review from a specific user.
303
303
304 :param repo_name: target or source repo
304 :param repo_name: target or source repo
305 :param search_q: filter by text
305 :param search_q: filter by text
306 :param source: boolean flag to specify if repo_name refers to source
306 :param source: boolean flag to specify if repo_name refers to source
307 :param statuses: list of pull request statuses
307 :param statuses: list of pull request statuses
308 :param opened_by: author user of the pull request
308 :param opened_by: author user of the pull request
309 :param user_id: reviewer user of the pull request
309 :param user_id: reviewer user of the pull request
310 :returns: int number of pull requests
310 :returns: int number of pull requests
311 """
311 """
312 pull_requests = self.get_awaiting_my_review(
312 pull_requests = self.get_awaiting_my_review(
313 repo_name, search_q=search_q, source=source, statuses=statuses,
313 repo_name, search_q=search_q, source=source, statuses=statuses,
314 opened_by=opened_by, user_id=user_id)
314 opened_by=opened_by, user_id=user_id)
315
315
316 return len(pull_requests)
316 return len(pull_requests)
317
317
318 def get_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
318 def get_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
319 opened_by=None, user_id=None, offset=0,
319 opened_by=None, user_id=None, offset=0,
320 length=None, order_by=None, order_dir='desc'):
320 length=None, order_by=None, order_dir='desc'):
321 """
321 """
322 Get all pull requests for a specific repository that are awaiting
322 Get all pull requests for a specific repository that are awaiting
323 review from a specific user.
323 review from a specific user.
324
324
325 :param repo_name: target or source repo
325 :param repo_name: target or source repo
326 :param search_q: filter by text
326 :param search_q: filter by text
327 :param source: boolean flag to specify if repo_name refers to source
327 :param source: boolean flag to specify if repo_name refers to source
328 :param statuses: list of pull request statuses
328 :param statuses: list of pull request statuses
329 :param opened_by: author user of the pull request
329 :param opened_by: author user of the pull request
330 :param user_id: reviewer user of the pull request
330 :param user_id: reviewer user of the pull request
331 :param offset: pagination offset
331 :param offset: pagination offset
332 :param length: length of returned list
332 :param length: length of returned list
333 :param order_by: order of the returned list
333 :param order_by: order of the returned list
334 :param order_dir: 'asc' or 'desc' ordering direction
334 :param order_dir: 'asc' or 'desc' ordering direction
335 :returns: list of pull requests
335 :returns: list of pull requests
336 """
336 """
337 pull_requests = self.get_all(
337 pull_requests = self.get_all(
338 repo_name, search_q=search_q, source=source, statuses=statuses,
338 repo_name, search_q=search_q, source=source, statuses=statuses,
339 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
339 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
340
340
341 _my = PullRequestModel().get_not_reviewed(user_id)
341 _my = PullRequestModel().get_not_reviewed(user_id)
342 my_participation = []
342 my_participation = []
343 for pr in pull_requests:
343 for pr in pull_requests:
344 if pr in _my:
344 if pr in _my:
345 my_participation.append(pr)
345 my_participation.append(pr)
346 _filtered_pull_requests = my_participation
346 _filtered_pull_requests = my_participation
347 if length:
347 if length:
348 return _filtered_pull_requests[offset:offset+length]
348 return _filtered_pull_requests[offset:offset+length]
349 else:
349 else:
350 return _filtered_pull_requests
350 return _filtered_pull_requests
351
351
352 def get_not_reviewed(self, user_id):
352 def get_not_reviewed(self, user_id):
353 return [
353 return [
354 x.pull_request for x in PullRequestReviewers.query().filter(
354 x.pull_request for x in PullRequestReviewers.query().filter(
355 PullRequestReviewers.user_id == user_id).all()
355 PullRequestReviewers.user_id == user_id).all()
356 ]
356 ]
357
357
358 def _prepare_participating_query(self, user_id=None, statuses=None,
358 def _prepare_participating_query(self, user_id=None, statuses=None,
359 order_by=None, order_dir='desc'):
359 order_by=None, order_dir='desc'):
360 q = PullRequest.query()
360 q = PullRequest.query()
361 if user_id:
361 if user_id:
362 reviewers_subquery = Session().query(
362 reviewers_subquery = Session().query(
363 PullRequestReviewers.pull_request_id).filter(
363 PullRequestReviewers.pull_request_id).filter(
364 PullRequestReviewers.user_id == user_id).subquery()
364 PullRequestReviewers.user_id == user_id).subquery()
365 user_filter = or_(
365 user_filter = or_(
366 PullRequest.user_id == user_id,
366 PullRequest.user_id == user_id,
367 PullRequest.pull_request_id.in_(reviewers_subquery)
367 PullRequest.pull_request_id.in_(reviewers_subquery)
368 )
368 )
369 q = PullRequest.query().filter(user_filter)
369 q = PullRequest.query().filter(user_filter)
370
370
371 # closed,opened
371 # closed,opened
372 if statuses:
372 if statuses:
373 q = q.filter(PullRequest.status.in_(statuses))
373 q = q.filter(PullRequest.status.in_(statuses))
374
374
375 if order_by:
375 if order_by:
376 order_map = {
376 order_map = {
377 'name_raw': PullRequest.pull_request_id,
377 'name_raw': PullRequest.pull_request_id,
378 'title': PullRequest.title,
378 'title': PullRequest.title,
379 'updated_on_raw': PullRequest.updated_on,
379 'updated_on_raw': PullRequest.updated_on,
380 'target_repo': PullRequest.target_repo_id
380 'target_repo': PullRequest.target_repo_id
381 }
381 }
382 if order_dir == 'asc':
382 if order_dir == 'asc':
383 q = q.order_by(order_map[order_by].asc())
383 q = q.order_by(order_map[order_by].asc())
384 else:
384 else:
385 q = q.order_by(order_map[order_by].desc())
385 q = q.order_by(order_map[order_by].desc())
386
386
387 return q
387 return q
388
388
389 def count_im_participating_in(self, user_id=None, statuses=None):
389 def count_im_participating_in(self, user_id=None, statuses=None):
390 q = self._prepare_participating_query(user_id, statuses=statuses)
390 q = self._prepare_participating_query(user_id, statuses=statuses)
391 return q.count()
391 return q.count()
392
392
393 def get_im_participating_in(
393 def get_im_participating_in(
394 self, user_id=None, statuses=None, offset=0,
394 self, user_id=None, statuses=None, offset=0,
395 length=None, order_by=None, order_dir='desc'):
395 length=None, order_by=None, order_dir='desc'):
396 """
396 """
397 Get all Pull requests that i'm participating in, or i have opened
397 Get all Pull requests that i'm participating in, or i have opened
398 """
398 """
399
399
400 q = self._prepare_participating_query(
400 q = self._prepare_participating_query(
401 user_id, statuses=statuses, order_by=order_by,
401 user_id, statuses=statuses, order_by=order_by,
402 order_dir=order_dir)
402 order_dir=order_dir)
403
403
404 if length:
404 if length:
405 pull_requests = q.limit(length).offset(offset).all()
405 pull_requests = q.limit(length).offset(offset).all()
406 else:
406 else:
407 pull_requests = q.all()
407 pull_requests = q.all()
408
408
409 return pull_requests
409 return pull_requests
410
410
411 def get_versions(self, pull_request):
411 def get_versions(self, pull_request):
412 """
412 """
413 returns version of pull request sorted by ID descending
413 returns version of pull request sorted by ID descending
414 """
414 """
415 return PullRequestVersion.query()\
415 return PullRequestVersion.query()\
416 .filter(PullRequestVersion.pull_request == pull_request)\
416 .filter(PullRequestVersion.pull_request == pull_request)\
417 .order_by(PullRequestVersion.pull_request_version_id.asc())\
417 .order_by(PullRequestVersion.pull_request_version_id.asc())\
418 .all()
418 .all()
419
419
420 def get_pr_version(self, pull_request_id, version=None):
420 def get_pr_version(self, pull_request_id, version=None):
421 at_version = None
421 at_version = None
422
422
423 if version and version == 'latest':
423 if version and version == 'latest':
424 pull_request_ver = PullRequest.get(pull_request_id)
424 pull_request_ver = PullRequest.get(pull_request_id)
425 pull_request_obj = pull_request_ver
425 pull_request_obj = pull_request_ver
426 _org_pull_request_obj = pull_request_obj
426 _org_pull_request_obj = pull_request_obj
427 at_version = 'latest'
427 at_version = 'latest'
428 elif version:
428 elif version:
429 pull_request_ver = PullRequestVersion.get_or_404(version)
429 pull_request_ver = PullRequestVersion.get_or_404(version)
430 pull_request_obj = pull_request_ver
430 pull_request_obj = pull_request_ver
431 _org_pull_request_obj = pull_request_ver.pull_request
431 _org_pull_request_obj = pull_request_ver.pull_request
432 at_version = pull_request_ver.pull_request_version_id
432 at_version = pull_request_ver.pull_request_version_id
433 else:
433 else:
434 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
434 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
435 pull_request_id)
435 pull_request_id)
436
436
437 pull_request_display_obj = PullRequest.get_pr_display_object(
437 pull_request_display_obj = PullRequest.get_pr_display_object(
438 pull_request_obj, _org_pull_request_obj)
438 pull_request_obj, _org_pull_request_obj)
439
439
440 return _org_pull_request_obj, pull_request_obj, \
440 return _org_pull_request_obj, pull_request_obj, \
441 pull_request_display_obj, at_version
441 pull_request_display_obj, at_version
442
442
443 def create(self, created_by, source_repo, source_ref, target_repo,
443 def create(self, created_by, source_repo, source_ref, target_repo,
444 target_ref, revisions, reviewers, title, description=None,
444 target_ref, revisions, reviewers, title, description=None,
445 description_renderer=None,
445 description_renderer=None,
446 reviewer_data=None, translator=None, auth_user=None):
446 reviewer_data=None, translator=None, auth_user=None):
447 translator = translator or get_current_request().translate
447 translator = translator or get_current_request().translate
448
448
449 created_by_user = self._get_user(created_by)
449 created_by_user = self._get_user(created_by)
450 auth_user = auth_user or created_by_user.AuthUser()
450 auth_user = auth_user or created_by_user.AuthUser()
451 source_repo = self._get_repo(source_repo)
451 source_repo = self._get_repo(source_repo)
452 target_repo = self._get_repo(target_repo)
452 target_repo = self._get_repo(target_repo)
453
453
454 pull_request = PullRequest()
454 pull_request = PullRequest()
455 pull_request.source_repo = source_repo
455 pull_request.source_repo = source_repo
456 pull_request.source_ref = source_ref
456 pull_request.source_ref = source_ref
457 pull_request.target_repo = target_repo
457 pull_request.target_repo = target_repo
458 pull_request.target_ref = target_ref
458 pull_request.target_ref = target_ref
459 pull_request.revisions = revisions
459 pull_request.revisions = revisions
460 pull_request.title = title
460 pull_request.title = title
461 pull_request.description = description
461 pull_request.description = description
462 pull_request.description_renderer = description_renderer
462 pull_request.description_renderer = description_renderer
463 pull_request.author = created_by_user
463 pull_request.author = created_by_user
464 pull_request.reviewer_data = reviewer_data
464 pull_request.reviewer_data = reviewer_data
465 pull_request.pull_request_state = pull_request.STATE_CREATING
465 pull_request.pull_request_state = pull_request.STATE_CREATING
466 Session().add(pull_request)
466 Session().add(pull_request)
467 Session().flush()
467 Session().flush()
468
468
469 reviewer_ids = set()
469 reviewer_ids = set()
470 # members / reviewers
470 # members / reviewers
471 for reviewer_object in reviewers:
471 for reviewer_object in reviewers:
472 user_id, reasons, mandatory, rules = reviewer_object
472 user_id, reasons, mandatory, rules = reviewer_object
473 user = self._get_user(user_id)
473 user = self._get_user(user_id)
474
474
475 # skip duplicates
475 # skip duplicates
476 if user.user_id in reviewer_ids:
476 if user.user_id in reviewer_ids:
477 continue
477 continue
478
478
479 reviewer_ids.add(user.user_id)
479 reviewer_ids.add(user.user_id)
480
480
481 reviewer = PullRequestReviewers()
481 reviewer = PullRequestReviewers()
482 reviewer.user = user
482 reviewer.user = user
483 reviewer.pull_request = pull_request
483 reviewer.pull_request = pull_request
484 reviewer.reasons = reasons
484 reviewer.reasons = reasons
485 reviewer.mandatory = mandatory
485 reviewer.mandatory = mandatory
486
486
487 # NOTE(marcink): pick only first rule for now
487 # NOTE(marcink): pick only first rule for now
488 rule_id = list(rules)[0] if rules else None
488 rule_id = list(rules)[0] if rules else None
489 rule = RepoReviewRule.get(rule_id) if rule_id else None
489 rule = RepoReviewRule.get(rule_id) if rule_id else None
490 if rule:
490 if rule:
491 review_group = rule.user_group_vote_rule(user_id)
491 review_group = rule.user_group_vote_rule(user_id)
492 # we check if this particular reviewer is member of a voting group
492 # we check if this particular reviewer is member of a voting group
493 if review_group:
493 if review_group:
494 # NOTE(marcink):
494 # NOTE(marcink):
495 # can be that user is member of more but we pick the first same,
495 # can be that user is member of more but we pick the first same,
496 # same as default reviewers algo
496 # same as default reviewers algo
497 review_group = review_group[0]
497 review_group = review_group[0]
498
498
499 rule_data = {
499 rule_data = {
500 'rule_name':
500 'rule_name':
501 rule.review_rule_name,
501 rule.review_rule_name,
502 'rule_user_group_entry_id':
502 'rule_user_group_entry_id':
503 review_group.repo_review_rule_users_group_id,
503 review_group.repo_review_rule_users_group_id,
504 'rule_user_group_name':
504 'rule_user_group_name':
505 review_group.users_group.users_group_name,
505 review_group.users_group.users_group_name,
506 'rule_user_group_members':
506 'rule_user_group_members':
507 [x.user.username for x in review_group.users_group.members],
507 [x.user.username for x in review_group.users_group.members],
508 'rule_user_group_members_id':
508 'rule_user_group_members_id':
509 [x.user.user_id for x in review_group.users_group.members],
509 [x.user.user_id for x in review_group.users_group.members],
510 }
510 }
511 # e.g {'vote_rule': -1, 'mandatory': True}
511 # e.g {'vote_rule': -1, 'mandatory': True}
512 rule_data.update(review_group.rule_data())
512 rule_data.update(review_group.rule_data())
513
513
514 reviewer.rule_data = rule_data
514 reviewer.rule_data = rule_data
515
515
516 Session().add(reviewer)
516 Session().add(reviewer)
517 Session().flush()
517 Session().flush()
518
518
519 # Set approval status to "Under Review" for all commits which are
519 # Set approval status to "Under Review" for all commits which are
520 # part of this pull request.
520 # part of this pull request.
521 ChangesetStatusModel().set_status(
521 ChangesetStatusModel().set_status(
522 repo=target_repo,
522 repo=target_repo,
523 status=ChangesetStatus.STATUS_UNDER_REVIEW,
523 status=ChangesetStatus.STATUS_UNDER_REVIEW,
524 user=created_by_user,
524 user=created_by_user,
525 pull_request=pull_request
525 pull_request=pull_request
526 )
526 )
527 # we commit early at this point. This has to do with a fact
527 # we commit early at this point. This has to do with a fact
528 # that before queries do some row-locking. And because of that
528 # that before queries do some row-locking. And because of that
529 # we need to commit and finish transaction before below validate call
529 # we need to commit and finish transaction before below validate call
530 # that for large repos could be long resulting in long row locks
530 # that for large repos could be long resulting in long row locks
531 Session().commit()
531 Session().commit()
532
532
533 # prepare workspace, and run initial merge simulation. Set state during that
533 # prepare workspace, and run initial merge simulation. Set state during that
534 # operation
534 # operation
535 pull_request = PullRequest.get(pull_request.pull_request_id)
535 pull_request = PullRequest.get(pull_request.pull_request_id)
536
536
537 # set as merging, for merge simulation, and if finished to created so we mark
537 # set as merging, for merge simulation, and if finished to created so we mark
538 # simulation is working fine
538 # simulation is working fine
539 with pull_request.set_state(PullRequest.STATE_MERGING,
539 with pull_request.set_state(PullRequest.STATE_MERGING,
540 final_state=PullRequest.STATE_CREATED) as state_obj:
540 final_state=PullRequest.STATE_CREATED) as state_obj:
541 MergeCheck.validate(
541 MergeCheck.validate(
542 pull_request, auth_user=auth_user, translator=translator)
542 pull_request, auth_user=auth_user, translator=translator)
543
543
544 self.notify_reviewers(pull_request, reviewer_ids)
544 self.notify_reviewers(pull_request, reviewer_ids)
545 self.trigger_pull_request_hook(
545 self.trigger_pull_request_hook(
546 pull_request, created_by_user, 'create')
546 pull_request, created_by_user, 'create')
547
547
548 creation_data = pull_request.get_api_data(with_merge_state=False)
548 creation_data = pull_request.get_api_data(with_merge_state=False)
549 self._log_audit_action(
549 self._log_audit_action(
550 'repo.pull_request.create', {'data': creation_data},
550 'repo.pull_request.create', {'data': creation_data},
551 auth_user, pull_request)
551 auth_user, pull_request)
552
552
553 return pull_request
553 return pull_request
554
554
555 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
555 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
556 pull_request = self.__get_pull_request(pull_request)
556 pull_request = self.__get_pull_request(pull_request)
557 target_scm = pull_request.target_repo.scm_instance()
557 target_scm = pull_request.target_repo.scm_instance()
558 if action == 'create':
558 if action == 'create':
559 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
559 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
560 elif action == 'merge':
560 elif action == 'merge':
561 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
561 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
562 elif action == 'close':
562 elif action == 'close':
563 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
563 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
564 elif action == 'review_status_change':
564 elif action == 'review_status_change':
565 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
565 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
566 elif action == 'update':
566 elif action == 'update':
567 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
567 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
568 elif action == 'comment':
568 elif action == 'comment':
569 # dummy hook ! for comment. We want this function to handle all cases
569 # dummy hook ! for comment. We want this function to handle all cases
570 def trigger_hook(*args, **kwargs):
570 def trigger_hook(*args, **kwargs):
571 pass
571 pass
572 comment = data['comment']
572 comment = data['comment']
573 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
573 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
574 else:
574 else:
575 return
575 return
576
576
577 trigger_hook(
577 trigger_hook(
578 username=user.username,
578 username=user.username,
579 repo_name=pull_request.target_repo.repo_name,
579 repo_name=pull_request.target_repo.repo_name,
580 repo_alias=target_scm.alias,
580 repo_alias=target_scm.alias,
581 pull_request=pull_request,
581 pull_request=pull_request,
582 data=data)
582 data=data)
583
583
584 def _get_commit_ids(self, pull_request):
584 def _get_commit_ids(self, pull_request):
585 """
585 """
586 Return the commit ids of the merged pull request.
586 Return the commit ids of the merged pull request.
587
587
588 This method is not dealing correctly yet with the lack of autoupdates
588 This method is not dealing correctly yet with the lack of autoupdates
589 nor with the implicit target updates.
589 nor with the implicit target updates.
590 For example: if a commit in the source repo is already in the target it
590 For example: if a commit in the source repo is already in the target it
591 will be reported anyways.
591 will be reported anyways.
592 """
592 """
593 merge_rev = pull_request.merge_rev
593 merge_rev = pull_request.merge_rev
594 if merge_rev is None:
594 if merge_rev is None:
595 raise ValueError('This pull request was not merged yet')
595 raise ValueError('This pull request was not merged yet')
596
596
597 commit_ids = list(pull_request.revisions)
597 commit_ids = list(pull_request.revisions)
598 if merge_rev not in commit_ids:
598 if merge_rev not in commit_ids:
599 commit_ids.append(merge_rev)
599 commit_ids.append(merge_rev)
600
600
601 return commit_ids
601 return commit_ids
602
602
603 def merge_repo(self, pull_request, user, extras):
603 def merge_repo(self, pull_request, user, extras):
604 log.debug("Merging pull request %s", pull_request.pull_request_id)
604 log.debug("Merging pull request %s", pull_request.pull_request_id)
605 extras['user_agent'] = 'internal-merge'
605 extras['user_agent'] = 'internal-merge'
606 merge_state = self._merge_pull_request(pull_request, user, extras)
606 merge_state = self._merge_pull_request(pull_request, user, extras)
607 if merge_state.executed:
607 if merge_state.executed:
608 log.debug("Merge was successful, updating the pull request comments.")
608 log.debug("Merge was successful, updating the pull request comments.")
609 self._comment_and_close_pr(pull_request, user, merge_state)
609 self._comment_and_close_pr(pull_request, user, merge_state)
610
610
611 self._log_audit_action(
611 self._log_audit_action(
612 'repo.pull_request.merge',
612 'repo.pull_request.merge',
613 {'merge_state': merge_state.__dict__},
613 {'merge_state': merge_state.__dict__},
614 user, pull_request)
614 user, pull_request)
615
615
616 else:
616 else:
617 log.warn("Merge failed, not updating the pull request.")
617 log.warn("Merge failed, not updating the pull request.")
618 return merge_state
618 return merge_state
619
619
620 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
620 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
621 target_vcs = pull_request.target_repo.scm_instance()
621 target_vcs = pull_request.target_repo.scm_instance()
622 source_vcs = pull_request.source_repo.scm_instance()
622 source_vcs = pull_request.source_repo.scm_instance()
623
623
624 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
624 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
625 pr_id=pull_request.pull_request_id,
625 pr_id=pull_request.pull_request_id,
626 pr_title=pull_request.title,
626 pr_title=pull_request.title,
627 source_repo=source_vcs.name,
627 source_repo=source_vcs.name,
628 source_ref_name=pull_request.source_ref_parts.name,
628 source_ref_name=pull_request.source_ref_parts.name,
629 target_repo=target_vcs.name,
629 target_repo=target_vcs.name,
630 target_ref_name=pull_request.target_ref_parts.name,
630 target_ref_name=pull_request.target_ref_parts.name,
631 )
631 )
632
632
633 workspace_id = self._workspace_id(pull_request)
633 workspace_id = self._workspace_id(pull_request)
634 repo_id = pull_request.target_repo.repo_id
634 repo_id = pull_request.target_repo.repo_id
635 use_rebase = self._use_rebase_for_merging(pull_request)
635 use_rebase = self._use_rebase_for_merging(pull_request)
636 close_branch = self._close_branch_before_merging(pull_request)
636 close_branch = self._close_branch_before_merging(pull_request)
637 user_name = self._user_name_for_merging(pull_request, user)
637 user_name = self._user_name_for_merging(pull_request, user)
638
638
639 target_ref = self._refresh_reference(
639 target_ref = self._refresh_reference(
640 pull_request.target_ref_parts, target_vcs)
640 pull_request.target_ref_parts, target_vcs)
641
641
642 callback_daemon, extras = prepare_callback_daemon(
642 callback_daemon, extras = prepare_callback_daemon(
643 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
643 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
644 host=vcs_settings.HOOKS_HOST,
644 host=vcs_settings.HOOKS_HOST,
645 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
645 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
646
646
647 with callback_daemon:
647 with callback_daemon:
648 # TODO: johbo: Implement a clean way to run a config_override
648 # TODO: johbo: Implement a clean way to run a config_override
649 # for a single call.
649 # for a single call.
650 target_vcs.config.set(
650 target_vcs.config.set(
651 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
651 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
652
652
653 merge_state = target_vcs.merge(
653 merge_state = target_vcs.merge(
654 repo_id, workspace_id, target_ref, source_vcs,
654 repo_id, workspace_id, target_ref, source_vcs,
655 pull_request.source_ref_parts,
655 pull_request.source_ref_parts,
656 user_name=user_name, user_email=user.email,
656 user_name=user_name, user_email=user.email,
657 message=message, use_rebase=use_rebase,
657 message=message, use_rebase=use_rebase,
658 close_branch=close_branch)
658 close_branch=close_branch)
659 return merge_state
659 return merge_state
660
660
661 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
661 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
662 pull_request.merge_rev = merge_state.merge_ref.commit_id
662 pull_request.merge_rev = merge_state.merge_ref.commit_id
663 pull_request.updated_on = datetime.datetime.now()
663 pull_request.updated_on = datetime.datetime.now()
664 close_msg = close_msg or 'Pull request merged and closed'
664 close_msg = close_msg or 'Pull request merged and closed'
665
665
666 CommentsModel().create(
666 CommentsModel().create(
667 text=safe_unicode(close_msg),
667 text=safe_unicode(close_msg),
668 repo=pull_request.target_repo.repo_id,
668 repo=pull_request.target_repo.repo_id,
669 user=user.user_id,
669 user=user.user_id,
670 pull_request=pull_request.pull_request_id,
670 pull_request=pull_request.pull_request_id,
671 f_path=None,
671 f_path=None,
672 line_no=None,
672 line_no=None,
673 closing_pr=True
673 closing_pr=True
674 )
674 )
675
675
676 Session().add(pull_request)
676 Session().add(pull_request)
677 Session().flush()
677 Session().flush()
678 # TODO: paris: replace invalidation with less radical solution
678 # TODO: paris: replace invalidation with less radical solution
679 ScmModel().mark_for_invalidation(
679 ScmModel().mark_for_invalidation(
680 pull_request.target_repo.repo_name)
680 pull_request.target_repo.repo_name)
681 self.trigger_pull_request_hook(pull_request, user, 'merge')
681 self.trigger_pull_request_hook(pull_request, user, 'merge')
682
682
683 def has_valid_update_type(self, pull_request):
683 def has_valid_update_type(self, pull_request):
684 source_ref_type = pull_request.source_ref_parts.type
684 source_ref_type = pull_request.source_ref_parts.type
685 return source_ref_type in self.REF_TYPES
685 return source_ref_type in self.REF_TYPES
686
686
687 def update_commits(self, pull_request, updating_user):
687 def update_commits(self, pull_request, updating_user):
688 """
688 """
689 Get the updated list of commits for the pull request
689 Get the updated list of commits for the pull request
690 and return the new pull request version and the list
690 and return the new pull request version and the list
691 of commits processed by this update action
691 of commits processed by this update action
692
692
693 updating_user is the user_object who triggered the update
693 updating_user is the user_object who triggered the update
694 """
694 """
695 pull_request = self.__get_pull_request(pull_request)
695 pull_request = self.__get_pull_request(pull_request)
696 source_ref_type = pull_request.source_ref_parts.type
696 source_ref_type = pull_request.source_ref_parts.type
697 source_ref_name = pull_request.source_ref_parts.name
697 source_ref_name = pull_request.source_ref_parts.name
698 source_ref_id = pull_request.source_ref_parts.commit_id
698 source_ref_id = pull_request.source_ref_parts.commit_id
699
699
700 target_ref_type = pull_request.target_ref_parts.type
700 target_ref_type = pull_request.target_ref_parts.type
701 target_ref_name = pull_request.target_ref_parts.name
701 target_ref_name = pull_request.target_ref_parts.name
702 target_ref_id = pull_request.target_ref_parts.commit_id
702 target_ref_id = pull_request.target_ref_parts.commit_id
703
703
704 if not self.has_valid_update_type(pull_request):
704 if not self.has_valid_update_type(pull_request):
705 log.debug("Skipping update of pull request %s due to ref type: %s",
705 log.debug("Skipping update of pull request %s due to ref type: %s",
706 pull_request, source_ref_type)
706 pull_request, source_ref_type)
707 return UpdateResponse(
707 return UpdateResponse(
708 executed=False,
708 executed=False,
709 reason=UpdateFailureReason.WRONG_REF_TYPE,
709 reason=UpdateFailureReason.WRONG_REF_TYPE,
710 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
710 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
711 source_changed=False, target_changed=False)
711 source_changed=False, target_changed=False)
712
712
713 # source repo
713 # source repo
714 source_repo = pull_request.source_repo.scm_instance()
714 source_repo = pull_request.source_repo.scm_instance()
715
715
716 try:
716 try:
717 source_commit = source_repo.get_commit(commit_id=source_ref_name)
717 source_commit = source_repo.get_commit(commit_id=source_ref_name)
718 except CommitDoesNotExistError:
718 except CommitDoesNotExistError:
719 return UpdateResponse(
719 return UpdateResponse(
720 executed=False,
720 executed=False,
721 reason=UpdateFailureReason.MISSING_SOURCE_REF,
721 reason=UpdateFailureReason.MISSING_SOURCE_REF,
722 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
722 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
723 source_changed=False, target_changed=False)
723 source_changed=False, target_changed=False)
724
724
725 source_changed = source_ref_id != source_commit.raw_id
725 source_changed = source_ref_id != source_commit.raw_id
726
726
727 # target repo
727 # target repo
728 target_repo = pull_request.target_repo.scm_instance()
728 target_repo = pull_request.target_repo.scm_instance()
729
729
730 try:
730 try:
731 target_commit = target_repo.get_commit(commit_id=target_ref_name)
731 target_commit = target_repo.get_commit(commit_id=target_ref_name)
732 except CommitDoesNotExistError:
732 except CommitDoesNotExistError:
733 return UpdateResponse(
733 return UpdateResponse(
734 executed=False,
734 executed=False,
735 reason=UpdateFailureReason.MISSING_TARGET_REF,
735 reason=UpdateFailureReason.MISSING_TARGET_REF,
736 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
736 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
737 source_changed=False, target_changed=False)
737 source_changed=False, target_changed=False)
738 target_changed = target_ref_id != target_commit.raw_id
738 target_changed = target_ref_id != target_commit.raw_id
739
739
740 if not (source_changed or target_changed):
740 if not (source_changed or target_changed):
741 log.debug("Nothing changed in pull request %s", pull_request)
741 log.debug("Nothing changed in pull request %s", pull_request)
742 return UpdateResponse(
742 return UpdateResponse(
743 executed=False,
743 executed=False,
744 reason=UpdateFailureReason.NO_CHANGE,
744 reason=UpdateFailureReason.NO_CHANGE,
745 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
745 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
746 source_changed=target_changed, target_changed=source_changed)
746 source_changed=target_changed, target_changed=source_changed)
747
747
748 change_in_found = 'target repo' if target_changed else 'source repo'
748 change_in_found = 'target repo' if target_changed else 'source repo'
749 log.debug('Updating pull request because of change in %s detected',
749 log.debug('Updating pull request because of change in %s detected',
750 change_in_found)
750 change_in_found)
751
751
752 # Finally there is a need for an update, in case of source change
752 # Finally there is a need for an update, in case of source change
753 # we create a new version, else just an update
753 # we create a new version, else just an update
754 if source_changed:
754 if source_changed:
755 pull_request_version = self._create_version_from_snapshot(pull_request)
755 pull_request_version = self._create_version_from_snapshot(pull_request)
756 self._link_comments_to_version(pull_request_version)
756 self._link_comments_to_version(pull_request_version)
757 else:
757 else:
758 try:
758 try:
759 ver = pull_request.versions[-1]
759 ver = pull_request.versions[-1]
760 except IndexError:
760 except IndexError:
761 ver = None
761 ver = None
762
762
763 pull_request.pull_request_version_id = \
763 pull_request.pull_request_version_id = \
764 ver.pull_request_version_id if ver else None
764 ver.pull_request_version_id if ver else None
765 pull_request_version = pull_request
765 pull_request_version = pull_request
766
766
767 try:
767 try:
768 if target_ref_type in self.REF_TYPES:
768 if target_ref_type in self.REF_TYPES:
769 target_commit = target_repo.get_commit(target_ref_name)
769 target_commit = target_repo.get_commit(target_ref_name)
770 else:
770 else:
771 target_commit = target_repo.get_commit(target_ref_id)
771 target_commit = target_repo.get_commit(target_ref_id)
772 except CommitDoesNotExistError:
772 except CommitDoesNotExistError:
773 return UpdateResponse(
773 return UpdateResponse(
774 executed=False,
774 executed=False,
775 reason=UpdateFailureReason.MISSING_TARGET_REF,
775 reason=UpdateFailureReason.MISSING_TARGET_REF,
776 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
776 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
777 source_changed=source_changed, target_changed=target_changed)
777 source_changed=source_changed, target_changed=target_changed)
778
778
779 # re-compute commit ids
779 # re-compute commit ids
780 old_commit_ids = pull_request.revisions
780 old_commit_ids = pull_request.revisions
781 pre_load = ["author", "date", "message", "branch"]
781 pre_load = ["author", "date", "message", "branch"]
782 commit_ranges = target_repo.compare(
782 commit_ranges = target_repo.compare(
783 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
783 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
784 pre_load=pre_load)
784 pre_load=pre_load)
785
785
786 ancestor_commit_id = source_repo.get_common_ancestor(
786 ancestor_commit_id = source_repo.get_common_ancestor(
787 source_commit.raw_id, target_commit.raw_id, target_repo)
787 source_commit.raw_id, target_commit.raw_id, target_repo)
788
788
789 pull_request.source_ref = '%s:%s:%s' % (
789 pull_request.source_ref = '%s:%s:%s' % (
790 source_ref_type, source_ref_name, source_commit.raw_id)
790 source_ref_type, source_ref_name, source_commit.raw_id)
791 pull_request.target_ref = '%s:%s:%s' % (
791 pull_request.target_ref = '%s:%s:%s' % (
792 target_ref_type, target_ref_name, ancestor_commit_id)
792 target_ref_type, target_ref_name, ancestor_commit_id)
793
793
794 pull_request.revisions = [
794 pull_request.revisions = [
795 commit.raw_id for commit in reversed(commit_ranges)]
795 commit.raw_id for commit in reversed(commit_ranges)]
796 pull_request.updated_on = datetime.datetime.now()
796 pull_request.updated_on = datetime.datetime.now()
797 Session().add(pull_request)
797 Session().add(pull_request)
798 new_commit_ids = pull_request.revisions
798 new_commit_ids = pull_request.revisions
799
799
800 old_diff_data, new_diff_data = self._generate_update_diffs(
800 old_diff_data, new_diff_data = self._generate_update_diffs(
801 pull_request, pull_request_version)
801 pull_request, pull_request_version)
802
802
803 # calculate commit and file changes
803 # calculate commit and file changes
804 commit_changes = self._calculate_commit_id_changes(
804 commit_changes = self._calculate_commit_id_changes(
805 old_commit_ids, new_commit_ids)
805 old_commit_ids, new_commit_ids)
806 file_changes = self._calculate_file_changes(
806 file_changes = self._calculate_file_changes(
807 old_diff_data, new_diff_data)
807 old_diff_data, new_diff_data)
808
808
809 # set comments as outdated if DIFFS changed
809 # set comments as outdated if DIFFS changed
810 CommentsModel().outdate_comments(
810 CommentsModel().outdate_comments(
811 pull_request, old_diff_data=old_diff_data,
811 pull_request, old_diff_data=old_diff_data,
812 new_diff_data=new_diff_data)
812 new_diff_data=new_diff_data)
813
813
814 valid_commit_changes = (commit_changes.added or commit_changes.removed)
814 valid_commit_changes = (commit_changes.added or commit_changes.removed)
815 file_node_changes = (
815 file_node_changes = (
816 file_changes.added or file_changes.modified or file_changes.removed)
816 file_changes.added or file_changes.modified or file_changes.removed)
817 pr_has_changes = valid_commit_changes or file_node_changes
817 pr_has_changes = valid_commit_changes or file_node_changes
818
818
819 # Add an automatic comment to the pull request, in case
819 # Add an automatic comment to the pull request, in case
820 # anything has changed
820 # anything has changed
821 if pr_has_changes:
821 if pr_has_changes:
822 update_comment = CommentsModel().create(
822 update_comment = CommentsModel().create(
823 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
823 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
824 repo=pull_request.target_repo,
824 repo=pull_request.target_repo,
825 user=pull_request.author,
825 user=pull_request.author,
826 pull_request=pull_request,
826 pull_request=pull_request,
827 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
827 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
828
828
829 # Update status to "Under Review" for added commits
829 # Update status to "Under Review" for added commits
830 for commit_id in commit_changes.added:
830 for commit_id in commit_changes.added:
831 ChangesetStatusModel().set_status(
831 ChangesetStatusModel().set_status(
832 repo=pull_request.source_repo,
832 repo=pull_request.source_repo,
833 status=ChangesetStatus.STATUS_UNDER_REVIEW,
833 status=ChangesetStatus.STATUS_UNDER_REVIEW,
834 comment=update_comment,
834 comment=update_comment,
835 user=pull_request.author,
835 user=pull_request.author,
836 pull_request=pull_request,
836 pull_request=pull_request,
837 revision=commit_id)
837 revision=commit_id)
838
838
839 # send update email to users
839 # send update email to users
840 try:
840 try:
841 self.notify_users(pull_request=pull_request, updating_user=updating_user,
841 self.notify_users(pull_request=pull_request, updating_user=updating_user,
842 ancestor_commit_id=ancestor_commit_id,
842 ancestor_commit_id=ancestor_commit_id,
843 commit_changes=commit_changes,
843 commit_changes=commit_changes,
844 file_changes=file_changes)
844 file_changes=file_changes)
845 except Exception:
845 except Exception:
846 log.exception('Failed to send email notification to users')
846 log.exception('Failed to send email notification to users')
847
847
848 log.debug(
848 log.debug(
849 'Updated pull request %s, added_ids: %s, common_ids: %s, '
849 'Updated pull request %s, added_ids: %s, common_ids: %s, '
850 'removed_ids: %s', pull_request.pull_request_id,
850 'removed_ids: %s', pull_request.pull_request_id,
851 commit_changes.added, commit_changes.common, commit_changes.removed)
851 commit_changes.added, commit_changes.common, commit_changes.removed)
852 log.debug(
852 log.debug(
853 'Updated pull request with the following file changes: %s',
853 'Updated pull request with the following file changes: %s',
854 file_changes)
854 file_changes)
855
855
856 log.info(
856 log.info(
857 "Updated pull request %s from commit %s to commit %s, "
857 "Updated pull request %s from commit %s to commit %s, "
858 "stored new version %s of this pull request.",
858 "stored new version %s of this pull request.",
859 pull_request.pull_request_id, source_ref_id,
859 pull_request.pull_request_id, source_ref_id,
860 pull_request.source_ref_parts.commit_id,
860 pull_request.source_ref_parts.commit_id,
861 pull_request_version.pull_request_version_id)
861 pull_request_version.pull_request_version_id)
862 Session().commit()
862 Session().commit()
863 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
863 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
864
864
865 return UpdateResponse(
865 return UpdateResponse(
866 executed=True, reason=UpdateFailureReason.NONE,
866 executed=True, reason=UpdateFailureReason.NONE,
867 old=pull_request, new=pull_request_version,
867 old=pull_request, new=pull_request_version,
868 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
868 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
869 source_changed=source_changed, target_changed=target_changed)
869 source_changed=source_changed, target_changed=target_changed)
870
870
871 def _create_version_from_snapshot(self, pull_request):
871 def _create_version_from_snapshot(self, pull_request):
872 version = PullRequestVersion()
872 version = PullRequestVersion()
873 version.title = pull_request.title
873 version.title = pull_request.title
874 version.description = pull_request.description
874 version.description = pull_request.description
875 version.status = pull_request.status
875 version.status = pull_request.status
876 version.pull_request_state = pull_request.pull_request_state
876 version.pull_request_state = pull_request.pull_request_state
877 version.created_on = datetime.datetime.now()
877 version.created_on = datetime.datetime.now()
878 version.updated_on = pull_request.updated_on
878 version.updated_on = pull_request.updated_on
879 version.user_id = pull_request.user_id
879 version.user_id = pull_request.user_id
880 version.source_repo = pull_request.source_repo
880 version.source_repo = pull_request.source_repo
881 version.source_ref = pull_request.source_ref
881 version.source_ref = pull_request.source_ref
882 version.target_repo = pull_request.target_repo
882 version.target_repo = pull_request.target_repo
883 version.target_ref = pull_request.target_ref
883 version.target_ref = pull_request.target_ref
884
884
885 version._last_merge_source_rev = pull_request._last_merge_source_rev
885 version._last_merge_source_rev = pull_request._last_merge_source_rev
886 version._last_merge_target_rev = pull_request._last_merge_target_rev
886 version._last_merge_target_rev = pull_request._last_merge_target_rev
887 version.last_merge_status = pull_request.last_merge_status
887 version.last_merge_status = pull_request.last_merge_status
888 version.last_merge_metadata = pull_request.last_merge_metadata
888 version.shadow_merge_ref = pull_request.shadow_merge_ref
889 version.shadow_merge_ref = pull_request.shadow_merge_ref
889 version.merge_rev = pull_request.merge_rev
890 version.merge_rev = pull_request.merge_rev
890 version.reviewer_data = pull_request.reviewer_data
891 version.reviewer_data = pull_request.reviewer_data
891
892
892 version.revisions = pull_request.revisions
893 version.revisions = pull_request.revisions
893 version.pull_request = pull_request
894 version.pull_request = pull_request
894 Session().add(version)
895 Session().add(version)
895 Session().flush()
896 Session().flush()
896
897
897 return version
898 return version
898
899
899 def _generate_update_diffs(self, pull_request, pull_request_version):
900 def _generate_update_diffs(self, pull_request, pull_request_version):
900
901
901 diff_context = (
902 diff_context = (
902 self.DIFF_CONTEXT +
903 self.DIFF_CONTEXT +
903 CommentsModel.needed_extra_diff_context())
904 CommentsModel.needed_extra_diff_context())
904 hide_whitespace_changes = False
905 hide_whitespace_changes = False
905 source_repo = pull_request_version.source_repo
906 source_repo = pull_request_version.source_repo
906 source_ref_id = pull_request_version.source_ref_parts.commit_id
907 source_ref_id = pull_request_version.source_ref_parts.commit_id
907 target_ref_id = pull_request_version.target_ref_parts.commit_id
908 target_ref_id = pull_request_version.target_ref_parts.commit_id
908 old_diff = self._get_diff_from_pr_or_version(
909 old_diff = self._get_diff_from_pr_or_version(
909 source_repo, source_ref_id, target_ref_id,
910 source_repo, source_ref_id, target_ref_id,
910 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
911 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
911
912
912 source_repo = pull_request.source_repo
913 source_repo = pull_request.source_repo
913 source_ref_id = pull_request.source_ref_parts.commit_id
914 source_ref_id = pull_request.source_ref_parts.commit_id
914 target_ref_id = pull_request.target_ref_parts.commit_id
915 target_ref_id = pull_request.target_ref_parts.commit_id
915
916
916 new_diff = self._get_diff_from_pr_or_version(
917 new_diff = self._get_diff_from_pr_or_version(
917 source_repo, source_ref_id, target_ref_id,
918 source_repo, source_ref_id, target_ref_id,
918 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
919 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
919
920
920 old_diff_data = diffs.DiffProcessor(old_diff)
921 old_diff_data = diffs.DiffProcessor(old_diff)
921 old_diff_data.prepare()
922 old_diff_data.prepare()
922 new_diff_data = diffs.DiffProcessor(new_diff)
923 new_diff_data = diffs.DiffProcessor(new_diff)
923 new_diff_data.prepare()
924 new_diff_data.prepare()
924
925
925 return old_diff_data, new_diff_data
926 return old_diff_data, new_diff_data
926
927
927 def _link_comments_to_version(self, pull_request_version):
928 def _link_comments_to_version(self, pull_request_version):
928 """
929 """
929 Link all unlinked comments of this pull request to the given version.
930 Link all unlinked comments of this pull request to the given version.
930
931
931 :param pull_request_version: The `PullRequestVersion` to which
932 :param pull_request_version: The `PullRequestVersion` to which
932 the comments shall be linked.
933 the comments shall be linked.
933
934
934 """
935 """
935 pull_request = pull_request_version.pull_request
936 pull_request = pull_request_version.pull_request
936 comments = ChangesetComment.query()\
937 comments = ChangesetComment.query()\
937 .filter(
938 .filter(
938 # TODO: johbo: Should we query for the repo at all here?
939 # TODO: johbo: Should we query for the repo at all here?
939 # Pending decision on how comments of PRs are to be related
940 # Pending decision on how comments of PRs are to be related
940 # to either the source repo, the target repo or no repo at all.
941 # to either the source repo, the target repo or no repo at all.
941 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
942 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
942 ChangesetComment.pull_request == pull_request,
943 ChangesetComment.pull_request == pull_request,
943 ChangesetComment.pull_request_version == None)\
944 ChangesetComment.pull_request_version == None)\
944 .order_by(ChangesetComment.comment_id.asc())
945 .order_by(ChangesetComment.comment_id.asc())
945
946
946 # TODO: johbo: Find out why this breaks if it is done in a bulk
947 # TODO: johbo: Find out why this breaks if it is done in a bulk
947 # operation.
948 # operation.
948 for comment in comments:
949 for comment in comments:
949 comment.pull_request_version_id = (
950 comment.pull_request_version_id = (
950 pull_request_version.pull_request_version_id)
951 pull_request_version.pull_request_version_id)
951 Session().add(comment)
952 Session().add(comment)
952
953
953 def _calculate_commit_id_changes(self, old_ids, new_ids):
954 def _calculate_commit_id_changes(self, old_ids, new_ids):
954 added = [x for x in new_ids if x not in old_ids]
955 added = [x for x in new_ids if x not in old_ids]
955 common = [x for x in new_ids if x in old_ids]
956 common = [x for x in new_ids if x in old_ids]
956 removed = [x for x in old_ids if x not in new_ids]
957 removed = [x for x in old_ids if x not in new_ids]
957 total = new_ids
958 total = new_ids
958 return ChangeTuple(added, common, removed, total)
959 return ChangeTuple(added, common, removed, total)
959
960
960 def _calculate_file_changes(self, old_diff_data, new_diff_data):
961 def _calculate_file_changes(self, old_diff_data, new_diff_data):
961
962
962 old_files = OrderedDict()
963 old_files = OrderedDict()
963 for diff_data in old_diff_data.parsed_diff:
964 for diff_data in old_diff_data.parsed_diff:
964 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
965 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
965
966
966 added_files = []
967 added_files = []
967 modified_files = []
968 modified_files = []
968 removed_files = []
969 removed_files = []
969 for diff_data in new_diff_data.parsed_diff:
970 for diff_data in new_diff_data.parsed_diff:
970 new_filename = diff_data['filename']
971 new_filename = diff_data['filename']
971 new_hash = md5_safe(diff_data['raw_diff'])
972 new_hash = md5_safe(diff_data['raw_diff'])
972
973
973 old_hash = old_files.get(new_filename)
974 old_hash = old_files.get(new_filename)
974 if not old_hash:
975 if not old_hash:
975 # file is not present in old diff, we have to figure out from parsed diff
976 # file is not present in old diff, we have to figure out from parsed diff
976 # operation ADD/REMOVE
977 # operation ADD/REMOVE
977 operations_dict = diff_data['stats']['ops']
978 operations_dict = diff_data['stats']['ops']
978 if diffs.DEL_FILENODE in operations_dict:
979 if diffs.DEL_FILENODE in operations_dict:
979 removed_files.append(new_filename)
980 removed_files.append(new_filename)
980 else:
981 else:
981 added_files.append(new_filename)
982 added_files.append(new_filename)
982 else:
983 else:
983 if new_hash != old_hash:
984 if new_hash != old_hash:
984 modified_files.append(new_filename)
985 modified_files.append(new_filename)
985 # now remove a file from old, since we have seen it already
986 # now remove a file from old, since we have seen it already
986 del old_files[new_filename]
987 del old_files[new_filename]
987
988
988 # removed files is when there are present in old, but not in NEW,
989 # removed files is when there are present in old, but not in NEW,
989 # since we remove old files that are present in new diff, left-overs
990 # since we remove old files that are present in new diff, left-overs
990 # if any should be the removed files
991 # if any should be the removed files
991 removed_files.extend(old_files.keys())
992 removed_files.extend(old_files.keys())
992
993
993 return FileChangeTuple(added_files, modified_files, removed_files)
994 return FileChangeTuple(added_files, modified_files, removed_files)
994
995
995 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
996 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
996 """
997 """
997 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
998 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
998 so it's always looking the same disregarding on which default
999 so it's always looking the same disregarding on which default
999 renderer system is using.
1000 renderer system is using.
1000
1001
1001 :param ancestor_commit_id: ancestor raw_id
1002 :param ancestor_commit_id: ancestor raw_id
1002 :param changes: changes named tuple
1003 :param changes: changes named tuple
1003 :param file_changes: file changes named tuple
1004 :param file_changes: file changes named tuple
1004
1005
1005 """
1006 """
1006 new_status = ChangesetStatus.get_status_lbl(
1007 new_status = ChangesetStatus.get_status_lbl(
1007 ChangesetStatus.STATUS_UNDER_REVIEW)
1008 ChangesetStatus.STATUS_UNDER_REVIEW)
1008
1009
1009 changed_files = (
1010 changed_files = (
1010 file_changes.added + file_changes.modified + file_changes.removed)
1011 file_changes.added + file_changes.modified + file_changes.removed)
1011
1012
1012 params = {
1013 params = {
1013 'under_review_label': new_status,
1014 'under_review_label': new_status,
1014 'added_commits': changes.added,
1015 'added_commits': changes.added,
1015 'removed_commits': changes.removed,
1016 'removed_commits': changes.removed,
1016 'changed_files': changed_files,
1017 'changed_files': changed_files,
1017 'added_files': file_changes.added,
1018 'added_files': file_changes.added,
1018 'modified_files': file_changes.modified,
1019 'modified_files': file_changes.modified,
1019 'removed_files': file_changes.removed,
1020 'removed_files': file_changes.removed,
1020 'ancestor_commit_id': ancestor_commit_id
1021 'ancestor_commit_id': ancestor_commit_id
1021 }
1022 }
1022 renderer = RstTemplateRenderer()
1023 renderer = RstTemplateRenderer()
1023 return renderer.render('pull_request_update.mako', **params)
1024 return renderer.render('pull_request_update.mako', **params)
1024
1025
1025 def edit(self, pull_request, title, description, description_renderer, user):
1026 def edit(self, pull_request, title, description, description_renderer, user):
1026 pull_request = self.__get_pull_request(pull_request)
1027 pull_request = self.__get_pull_request(pull_request)
1027 old_data = pull_request.get_api_data(with_merge_state=False)
1028 old_data = pull_request.get_api_data(with_merge_state=False)
1028 if pull_request.is_closed():
1029 if pull_request.is_closed():
1029 raise ValueError('This pull request is closed')
1030 raise ValueError('This pull request is closed')
1030 if title:
1031 if title:
1031 pull_request.title = title
1032 pull_request.title = title
1032 pull_request.description = description
1033 pull_request.description = description
1033 pull_request.updated_on = datetime.datetime.now()
1034 pull_request.updated_on = datetime.datetime.now()
1034 pull_request.description_renderer = description_renderer
1035 pull_request.description_renderer = description_renderer
1035 Session().add(pull_request)
1036 Session().add(pull_request)
1036 self._log_audit_action(
1037 self._log_audit_action(
1037 'repo.pull_request.edit', {'old_data': old_data},
1038 'repo.pull_request.edit', {'old_data': old_data},
1038 user, pull_request)
1039 user, pull_request)
1039
1040
1040 def update_reviewers(self, pull_request, reviewer_data, user):
1041 def update_reviewers(self, pull_request, reviewer_data, user):
1041 """
1042 """
1042 Update the reviewers in the pull request
1043 Update the reviewers in the pull request
1043
1044
1044 :param pull_request: the pr to update
1045 :param pull_request: the pr to update
1045 :param reviewer_data: list of tuples
1046 :param reviewer_data: list of tuples
1046 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
1047 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
1047 """
1048 """
1048 pull_request = self.__get_pull_request(pull_request)
1049 pull_request = self.__get_pull_request(pull_request)
1049 if pull_request.is_closed():
1050 if pull_request.is_closed():
1050 raise ValueError('This pull request is closed')
1051 raise ValueError('This pull request is closed')
1051
1052
1052 reviewers = {}
1053 reviewers = {}
1053 for user_id, reasons, mandatory, rules in reviewer_data:
1054 for user_id, reasons, mandatory, rules in reviewer_data:
1054 if isinstance(user_id, (int, compat.string_types)):
1055 if isinstance(user_id, (int, compat.string_types)):
1055 user_id = self._get_user(user_id).user_id
1056 user_id = self._get_user(user_id).user_id
1056 reviewers[user_id] = {
1057 reviewers[user_id] = {
1057 'reasons': reasons, 'mandatory': mandatory}
1058 'reasons': reasons, 'mandatory': mandatory}
1058
1059
1059 reviewers_ids = set(reviewers.keys())
1060 reviewers_ids = set(reviewers.keys())
1060 current_reviewers = PullRequestReviewers.query()\
1061 current_reviewers = PullRequestReviewers.query()\
1061 .filter(PullRequestReviewers.pull_request ==
1062 .filter(PullRequestReviewers.pull_request ==
1062 pull_request).all()
1063 pull_request).all()
1063 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1064 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1064
1065
1065 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1066 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1066 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1067 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1067
1068
1068 log.debug("Adding %s reviewers", ids_to_add)
1069 log.debug("Adding %s reviewers", ids_to_add)
1069 log.debug("Removing %s reviewers", ids_to_remove)
1070 log.debug("Removing %s reviewers", ids_to_remove)
1070 changed = False
1071 changed = False
1071 added_audit_reviewers = []
1072 added_audit_reviewers = []
1072 removed_audit_reviewers = []
1073 removed_audit_reviewers = []
1073
1074
1074 for uid in ids_to_add:
1075 for uid in ids_to_add:
1075 changed = True
1076 changed = True
1076 _usr = self._get_user(uid)
1077 _usr = self._get_user(uid)
1077 reviewer = PullRequestReviewers()
1078 reviewer = PullRequestReviewers()
1078 reviewer.user = _usr
1079 reviewer.user = _usr
1079 reviewer.pull_request = pull_request
1080 reviewer.pull_request = pull_request
1080 reviewer.reasons = reviewers[uid]['reasons']
1081 reviewer.reasons = reviewers[uid]['reasons']
1081 # NOTE(marcink): mandatory shouldn't be changed now
1082 # NOTE(marcink): mandatory shouldn't be changed now
1082 # reviewer.mandatory = reviewers[uid]['reasons']
1083 # reviewer.mandatory = reviewers[uid]['reasons']
1083 Session().add(reviewer)
1084 Session().add(reviewer)
1084 added_audit_reviewers.append(reviewer.get_dict())
1085 added_audit_reviewers.append(reviewer.get_dict())
1085
1086
1086 for uid in ids_to_remove:
1087 for uid in ids_to_remove:
1087 changed = True
1088 changed = True
1088 # NOTE(marcink): we fetch "ALL" reviewers using .all(). This is an edge case
1089 # NOTE(marcink): we fetch "ALL" reviewers using .all(). This is an edge case
1089 # that prevents and fixes cases that we added the same reviewer twice.
1090 # that prevents and fixes cases that we added the same reviewer twice.
1090 # this CAN happen due to the lack of DB checks
1091 # this CAN happen due to the lack of DB checks
1091 reviewers = PullRequestReviewers.query()\
1092 reviewers = PullRequestReviewers.query()\
1092 .filter(PullRequestReviewers.user_id == uid,
1093 .filter(PullRequestReviewers.user_id == uid,
1093 PullRequestReviewers.pull_request == pull_request)\
1094 PullRequestReviewers.pull_request == pull_request)\
1094 .all()
1095 .all()
1095
1096
1096 for obj in reviewers:
1097 for obj in reviewers:
1097 added_audit_reviewers.append(obj.get_dict())
1098 added_audit_reviewers.append(obj.get_dict())
1098 Session().delete(obj)
1099 Session().delete(obj)
1099
1100
1100 if changed:
1101 if changed:
1101 Session().expire_all()
1102 Session().expire_all()
1102 pull_request.updated_on = datetime.datetime.now()
1103 pull_request.updated_on = datetime.datetime.now()
1103 Session().add(pull_request)
1104 Session().add(pull_request)
1104
1105
1105 # finally store audit logs
1106 # finally store audit logs
1106 for user_data in added_audit_reviewers:
1107 for user_data in added_audit_reviewers:
1107 self._log_audit_action(
1108 self._log_audit_action(
1108 'repo.pull_request.reviewer.add', {'data': user_data},
1109 'repo.pull_request.reviewer.add', {'data': user_data},
1109 user, pull_request)
1110 user, pull_request)
1110 for user_data in removed_audit_reviewers:
1111 for user_data in removed_audit_reviewers:
1111 self._log_audit_action(
1112 self._log_audit_action(
1112 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1113 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1113 user, pull_request)
1114 user, pull_request)
1114
1115
1115 self.notify_reviewers(pull_request, ids_to_add)
1116 self.notify_reviewers(pull_request, ids_to_add)
1116 return ids_to_add, ids_to_remove
1117 return ids_to_add, ids_to_remove
1117
1118
1118 def get_url(self, pull_request, request=None, permalink=False):
1119 def get_url(self, pull_request, request=None, permalink=False):
1119 if not request:
1120 if not request:
1120 request = get_current_request()
1121 request = get_current_request()
1121
1122
1122 if permalink:
1123 if permalink:
1123 return request.route_url(
1124 return request.route_url(
1124 'pull_requests_global',
1125 'pull_requests_global',
1125 pull_request_id=pull_request.pull_request_id,)
1126 pull_request_id=pull_request.pull_request_id,)
1126 else:
1127 else:
1127 return request.route_url('pullrequest_show',
1128 return request.route_url('pullrequest_show',
1128 repo_name=safe_str(pull_request.target_repo.repo_name),
1129 repo_name=safe_str(pull_request.target_repo.repo_name),
1129 pull_request_id=pull_request.pull_request_id,)
1130 pull_request_id=pull_request.pull_request_id,)
1130
1131
1131 def get_shadow_clone_url(self, pull_request, request=None):
1132 def get_shadow_clone_url(self, pull_request, request=None):
1132 """
1133 """
1133 Returns qualified url pointing to the shadow repository. If this pull
1134 Returns qualified url pointing to the shadow repository. If this pull
1134 request is closed there is no shadow repository and ``None`` will be
1135 request is closed there is no shadow repository and ``None`` will be
1135 returned.
1136 returned.
1136 """
1137 """
1137 if pull_request.is_closed():
1138 if pull_request.is_closed():
1138 return None
1139 return None
1139 else:
1140 else:
1140 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1141 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1141 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1142 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1142
1143
1143 def notify_reviewers(self, pull_request, reviewers_ids):
1144 def notify_reviewers(self, pull_request, reviewers_ids):
1144 # notification to reviewers
1145 # notification to reviewers
1145 if not reviewers_ids:
1146 if not reviewers_ids:
1146 return
1147 return
1147
1148
1148 log.debug('Notify following reviewers about pull-request %s', reviewers_ids)
1149 log.debug('Notify following reviewers about pull-request %s', reviewers_ids)
1149
1150
1150 pull_request_obj = pull_request
1151 pull_request_obj = pull_request
1151 # get the current participants of this pull request
1152 # get the current participants of this pull request
1152 recipients = reviewers_ids
1153 recipients = reviewers_ids
1153 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1154 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1154
1155
1155 pr_source_repo = pull_request_obj.source_repo
1156 pr_source_repo = pull_request_obj.source_repo
1156 pr_target_repo = pull_request_obj.target_repo
1157 pr_target_repo = pull_request_obj.target_repo
1157
1158
1158 pr_url = h.route_url('pullrequest_show',
1159 pr_url = h.route_url('pullrequest_show',
1159 repo_name=pr_target_repo.repo_name,
1160 repo_name=pr_target_repo.repo_name,
1160 pull_request_id=pull_request_obj.pull_request_id,)
1161 pull_request_id=pull_request_obj.pull_request_id,)
1161
1162
1162 # set some variables for email notification
1163 # set some variables for email notification
1163 pr_target_repo_url = h.route_url(
1164 pr_target_repo_url = h.route_url(
1164 'repo_summary', repo_name=pr_target_repo.repo_name)
1165 'repo_summary', repo_name=pr_target_repo.repo_name)
1165
1166
1166 pr_source_repo_url = h.route_url(
1167 pr_source_repo_url = h.route_url(
1167 'repo_summary', repo_name=pr_source_repo.repo_name)
1168 'repo_summary', repo_name=pr_source_repo.repo_name)
1168
1169
1169 # pull request specifics
1170 # pull request specifics
1170 pull_request_commits = [
1171 pull_request_commits = [
1171 (x.raw_id, x.message)
1172 (x.raw_id, x.message)
1172 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1173 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1173
1174
1174 kwargs = {
1175 kwargs = {
1175 'user': pull_request.author,
1176 'user': pull_request.author,
1176 'pull_request': pull_request_obj,
1177 'pull_request': pull_request_obj,
1177 'pull_request_commits': pull_request_commits,
1178 'pull_request_commits': pull_request_commits,
1178
1179
1179 'pull_request_target_repo': pr_target_repo,
1180 'pull_request_target_repo': pr_target_repo,
1180 'pull_request_target_repo_url': pr_target_repo_url,
1181 'pull_request_target_repo_url': pr_target_repo_url,
1181
1182
1182 'pull_request_source_repo': pr_source_repo,
1183 'pull_request_source_repo': pr_source_repo,
1183 'pull_request_source_repo_url': pr_source_repo_url,
1184 'pull_request_source_repo_url': pr_source_repo_url,
1184
1185
1185 'pull_request_url': pr_url,
1186 'pull_request_url': pr_url,
1186 }
1187 }
1187
1188
1188 # pre-generate the subject for notification itself
1189 # pre-generate the subject for notification itself
1189 (subject,
1190 (subject,
1190 _h, _e, # we don't care about those
1191 _h, _e, # we don't care about those
1191 body_plaintext) = EmailNotificationModel().render_email(
1192 body_plaintext) = EmailNotificationModel().render_email(
1192 notification_type, **kwargs)
1193 notification_type, **kwargs)
1193
1194
1194 # create notification objects, and emails
1195 # create notification objects, and emails
1195 NotificationModel().create(
1196 NotificationModel().create(
1196 created_by=pull_request.author,
1197 created_by=pull_request.author,
1197 notification_subject=subject,
1198 notification_subject=subject,
1198 notification_body=body_plaintext,
1199 notification_body=body_plaintext,
1199 notification_type=notification_type,
1200 notification_type=notification_type,
1200 recipients=recipients,
1201 recipients=recipients,
1201 email_kwargs=kwargs,
1202 email_kwargs=kwargs,
1202 )
1203 )
1203
1204
1204 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1205 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1205 commit_changes, file_changes):
1206 commit_changes, file_changes):
1206
1207
1207 updating_user_id = updating_user.user_id
1208 updating_user_id = updating_user.user_id
1208 reviewers = set([x.user.user_id for x in pull_request.reviewers])
1209 reviewers = set([x.user.user_id for x in pull_request.reviewers])
1209 # NOTE(marcink): send notification to all other users except to
1210 # NOTE(marcink): send notification to all other users except to
1210 # person who updated the PR
1211 # person who updated the PR
1211 recipients = reviewers.difference(set([updating_user_id]))
1212 recipients = reviewers.difference(set([updating_user_id]))
1212
1213
1213 log.debug('Notify following recipients about pull-request update %s', recipients)
1214 log.debug('Notify following recipients about pull-request update %s', recipients)
1214
1215
1215 pull_request_obj = pull_request
1216 pull_request_obj = pull_request
1216
1217
1217 # send email about the update
1218 # send email about the update
1218 changed_files = (
1219 changed_files = (
1219 file_changes.added + file_changes.modified + file_changes.removed)
1220 file_changes.added + file_changes.modified + file_changes.removed)
1220
1221
1221 pr_source_repo = pull_request_obj.source_repo
1222 pr_source_repo = pull_request_obj.source_repo
1222 pr_target_repo = pull_request_obj.target_repo
1223 pr_target_repo = pull_request_obj.target_repo
1223
1224
1224 pr_url = h.route_url('pullrequest_show',
1225 pr_url = h.route_url('pullrequest_show',
1225 repo_name=pr_target_repo.repo_name,
1226 repo_name=pr_target_repo.repo_name,
1226 pull_request_id=pull_request_obj.pull_request_id,)
1227 pull_request_id=pull_request_obj.pull_request_id,)
1227
1228
1228 # set some variables for email notification
1229 # set some variables for email notification
1229 pr_target_repo_url = h.route_url(
1230 pr_target_repo_url = h.route_url(
1230 'repo_summary', repo_name=pr_target_repo.repo_name)
1231 'repo_summary', repo_name=pr_target_repo.repo_name)
1231
1232
1232 pr_source_repo_url = h.route_url(
1233 pr_source_repo_url = h.route_url(
1233 'repo_summary', repo_name=pr_source_repo.repo_name)
1234 'repo_summary', repo_name=pr_source_repo.repo_name)
1234
1235
1235 email_kwargs = {
1236 email_kwargs = {
1236 'date': datetime.datetime.now(),
1237 'date': datetime.datetime.now(),
1237 'updating_user': updating_user,
1238 'updating_user': updating_user,
1238
1239
1239 'pull_request': pull_request_obj,
1240 'pull_request': pull_request_obj,
1240
1241
1241 'pull_request_target_repo': pr_target_repo,
1242 'pull_request_target_repo': pr_target_repo,
1242 'pull_request_target_repo_url': pr_target_repo_url,
1243 'pull_request_target_repo_url': pr_target_repo_url,
1243
1244
1244 'pull_request_source_repo': pr_source_repo,
1245 'pull_request_source_repo': pr_source_repo,
1245 'pull_request_source_repo_url': pr_source_repo_url,
1246 'pull_request_source_repo_url': pr_source_repo_url,
1246
1247
1247 'pull_request_url': pr_url,
1248 'pull_request_url': pr_url,
1248
1249
1249 'ancestor_commit_id': ancestor_commit_id,
1250 'ancestor_commit_id': ancestor_commit_id,
1250 'added_commits': commit_changes.added,
1251 'added_commits': commit_changes.added,
1251 'removed_commits': commit_changes.removed,
1252 'removed_commits': commit_changes.removed,
1252 'changed_files': changed_files,
1253 'changed_files': changed_files,
1253 'added_files': file_changes.added,
1254 'added_files': file_changes.added,
1254 'modified_files': file_changes.modified,
1255 'modified_files': file_changes.modified,
1255 'removed_files': file_changes.removed,
1256 'removed_files': file_changes.removed,
1256 }
1257 }
1257
1258
1258 (subject,
1259 (subject,
1259 _h, _e, # we don't care about those
1260 _h, _e, # we don't care about those
1260 body_plaintext) = EmailNotificationModel().render_email(
1261 body_plaintext) = EmailNotificationModel().render_email(
1261 EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE, **email_kwargs)
1262 EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE, **email_kwargs)
1262
1263
1263 # create notification objects, and emails
1264 # create notification objects, and emails
1264 NotificationModel().create(
1265 NotificationModel().create(
1265 created_by=updating_user,
1266 created_by=updating_user,
1266 notification_subject=subject,
1267 notification_subject=subject,
1267 notification_body=body_plaintext,
1268 notification_body=body_plaintext,
1268 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1269 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1269 recipients=recipients,
1270 recipients=recipients,
1270 email_kwargs=email_kwargs,
1271 email_kwargs=email_kwargs,
1271 )
1272 )
1272
1273
1273 def delete(self, pull_request, user):
1274 def delete(self, pull_request, user):
1274 pull_request = self.__get_pull_request(pull_request)
1275 pull_request = self.__get_pull_request(pull_request)
1275 old_data = pull_request.get_api_data(with_merge_state=False)
1276 old_data = pull_request.get_api_data(with_merge_state=False)
1276 self._cleanup_merge_workspace(pull_request)
1277 self._cleanup_merge_workspace(pull_request)
1277 self._log_audit_action(
1278 self._log_audit_action(
1278 'repo.pull_request.delete', {'old_data': old_data},
1279 'repo.pull_request.delete', {'old_data': old_data},
1279 user, pull_request)
1280 user, pull_request)
1280 Session().delete(pull_request)
1281 Session().delete(pull_request)
1281
1282
1282 def close_pull_request(self, pull_request, user):
1283 def close_pull_request(self, pull_request, user):
1283 pull_request = self.__get_pull_request(pull_request)
1284 pull_request = self.__get_pull_request(pull_request)
1284 self._cleanup_merge_workspace(pull_request)
1285 self._cleanup_merge_workspace(pull_request)
1285 pull_request.status = PullRequest.STATUS_CLOSED
1286 pull_request.status = PullRequest.STATUS_CLOSED
1286 pull_request.updated_on = datetime.datetime.now()
1287 pull_request.updated_on = datetime.datetime.now()
1287 Session().add(pull_request)
1288 Session().add(pull_request)
1288 self.trigger_pull_request_hook(
1289 self.trigger_pull_request_hook(
1289 pull_request, pull_request.author, 'close')
1290 pull_request, pull_request.author, 'close')
1290
1291
1291 pr_data = pull_request.get_api_data(with_merge_state=False)
1292 pr_data = pull_request.get_api_data(with_merge_state=False)
1292 self._log_audit_action(
1293 self._log_audit_action(
1293 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1294 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1294
1295
1295 def close_pull_request_with_comment(
1296 def close_pull_request_with_comment(
1296 self, pull_request, user, repo, message=None, auth_user=None):
1297 self, pull_request, user, repo, message=None, auth_user=None):
1297
1298
1298 pull_request_review_status = pull_request.calculated_review_status()
1299 pull_request_review_status = pull_request.calculated_review_status()
1299
1300
1300 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1301 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1301 # approved only if we have voting consent
1302 # approved only if we have voting consent
1302 status = ChangesetStatus.STATUS_APPROVED
1303 status = ChangesetStatus.STATUS_APPROVED
1303 else:
1304 else:
1304 status = ChangesetStatus.STATUS_REJECTED
1305 status = ChangesetStatus.STATUS_REJECTED
1305 status_lbl = ChangesetStatus.get_status_lbl(status)
1306 status_lbl = ChangesetStatus.get_status_lbl(status)
1306
1307
1307 default_message = (
1308 default_message = (
1308 'Closing with status change {transition_icon} {status}.'
1309 'Closing with status change {transition_icon} {status}.'
1309 ).format(transition_icon='>', status=status_lbl)
1310 ).format(transition_icon='>', status=status_lbl)
1310 text = message or default_message
1311 text = message or default_message
1311
1312
1312 # create a comment, and link it to new status
1313 # create a comment, and link it to new status
1313 comment = CommentsModel().create(
1314 comment = CommentsModel().create(
1314 text=text,
1315 text=text,
1315 repo=repo.repo_id,
1316 repo=repo.repo_id,
1316 user=user.user_id,
1317 user=user.user_id,
1317 pull_request=pull_request.pull_request_id,
1318 pull_request=pull_request.pull_request_id,
1318 status_change=status_lbl,
1319 status_change=status_lbl,
1319 status_change_type=status,
1320 status_change_type=status,
1320 closing_pr=True,
1321 closing_pr=True,
1321 auth_user=auth_user,
1322 auth_user=auth_user,
1322 )
1323 )
1323
1324
1324 # calculate old status before we change it
1325 # calculate old status before we change it
1325 old_calculated_status = pull_request.calculated_review_status()
1326 old_calculated_status = pull_request.calculated_review_status()
1326 ChangesetStatusModel().set_status(
1327 ChangesetStatusModel().set_status(
1327 repo.repo_id,
1328 repo.repo_id,
1328 status,
1329 status,
1329 user.user_id,
1330 user.user_id,
1330 comment=comment,
1331 comment=comment,
1331 pull_request=pull_request.pull_request_id
1332 pull_request=pull_request.pull_request_id
1332 )
1333 )
1333
1334
1334 Session().flush()
1335 Session().flush()
1335 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1336 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1336 # we now calculate the status of pull request again, and based on that
1337 # we now calculate the status of pull request again, and based on that
1337 # calculation trigger status change. This might happen in cases
1338 # calculation trigger status change. This might happen in cases
1338 # that non-reviewer admin closes a pr, which means his vote doesn't
1339 # that non-reviewer admin closes a pr, which means his vote doesn't
1339 # change the status, while if he's a reviewer this might change it.
1340 # change the status, while if he's a reviewer this might change it.
1340 calculated_status = pull_request.calculated_review_status()
1341 calculated_status = pull_request.calculated_review_status()
1341 if old_calculated_status != calculated_status:
1342 if old_calculated_status != calculated_status:
1342 self.trigger_pull_request_hook(
1343 self.trigger_pull_request_hook(
1343 pull_request, user, 'review_status_change',
1344 pull_request, user, 'review_status_change',
1344 data={'status': calculated_status})
1345 data={'status': calculated_status})
1345
1346
1346 # finally close the PR
1347 # finally close the PR
1347 PullRequestModel().close_pull_request(
1348 PullRequestModel().close_pull_request(
1348 pull_request.pull_request_id, user)
1349 pull_request.pull_request_id, user)
1349
1350
1350 return comment, status
1351 return comment, status
1351
1352
1352 def merge_status(self, pull_request, translator=None,
1353 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1353 force_shadow_repo_refresh=False):
1354 _ = translator or get_current_request().translate
1354 _ = translator or get_current_request().translate
1355
1355
1356 if not self._is_merge_enabled(pull_request):
1356 if not self._is_merge_enabled(pull_request):
1357 return False, _('Server-side pull request merging is disabled.')
1357 return None, False, _('Server-side pull request merging is disabled.')
1358
1358 if pull_request.is_closed():
1359 if pull_request.is_closed():
1359 return False, _('This pull request is closed.')
1360 return None, False, _('This pull request is closed.')
1361
1360 merge_possible, msg = self._check_repo_requirements(
1362 merge_possible, msg = self._check_repo_requirements(
1361 target=pull_request.target_repo, source=pull_request.source_repo,
1363 target=pull_request.target_repo, source=pull_request.source_repo,
1362 translator=_)
1364 translator=_)
1363 if not merge_possible:
1365 if not merge_possible:
1364 return merge_possible, msg
1366 return None, merge_possible, msg
1365
1367
1366 try:
1368 try:
1367 resp = self._try_merge(
1369 merge_response = self._try_merge(
1368 pull_request,
1370 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1369 force_shadow_repo_refresh=force_shadow_repo_refresh)
1371 log.debug("Merge response: %s", merge_response)
1370 log.debug("Merge response: %s", resp)
1372 return merge_response, merge_response.possible, merge_response.merge_status_message
1371 status = resp.possible, resp.merge_status_message
1372 except NotImplementedError:
1373 except NotImplementedError:
1373 status = False, _('Pull request merging is not supported.')
1374 return None, False, _('Pull request merging is not supported.')
1374
1375 return status
1376
1375
1377 def _check_repo_requirements(self, target, source, translator):
1376 def _check_repo_requirements(self, target, source, translator):
1378 """
1377 """
1379 Check if `target` and `source` have compatible requirements.
1378 Check if `target` and `source` have compatible requirements.
1380
1379
1381 Currently this is just checking for largefiles.
1380 Currently this is just checking for largefiles.
1382 """
1381 """
1383 _ = translator
1382 _ = translator
1384 target_has_largefiles = self._has_largefiles(target)
1383 target_has_largefiles = self._has_largefiles(target)
1385 source_has_largefiles = self._has_largefiles(source)
1384 source_has_largefiles = self._has_largefiles(source)
1386 merge_possible = True
1385 merge_possible = True
1387 message = u''
1386 message = u''
1388
1387
1389 if target_has_largefiles != source_has_largefiles:
1388 if target_has_largefiles != source_has_largefiles:
1390 merge_possible = False
1389 merge_possible = False
1391 if source_has_largefiles:
1390 if source_has_largefiles:
1392 message = _(
1391 message = _(
1393 'Target repository large files support is disabled.')
1392 'Target repository large files support is disabled.')
1394 else:
1393 else:
1395 message = _(
1394 message = _(
1396 'Source repository large files support is disabled.')
1395 'Source repository large files support is disabled.')
1397
1396
1398 return merge_possible, message
1397 return merge_possible, message
1399
1398
1400 def _has_largefiles(self, repo):
1399 def _has_largefiles(self, repo):
1401 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1400 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1402 'extensions', 'largefiles')
1401 'extensions', 'largefiles')
1403 return largefiles_ui and largefiles_ui[0].active
1402 return largefiles_ui and largefiles_ui[0].active
1404
1403
1405 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1404 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1406 """
1405 """
1407 Try to merge the pull request and return the merge status.
1406 Try to merge the pull request and return the merge status.
1408 """
1407 """
1409 log.debug(
1408 log.debug(
1410 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1409 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1411 pull_request.pull_request_id, force_shadow_repo_refresh)
1410 pull_request.pull_request_id, force_shadow_repo_refresh)
1412 target_vcs = pull_request.target_repo.scm_instance()
1411 target_vcs = pull_request.target_repo.scm_instance()
1413 # Refresh the target reference.
1412 # Refresh the target reference.
1414 try:
1413 try:
1415 target_ref = self._refresh_reference(
1414 target_ref = self._refresh_reference(
1416 pull_request.target_ref_parts, target_vcs)
1415 pull_request.target_ref_parts, target_vcs)
1417 except CommitDoesNotExistError:
1416 except CommitDoesNotExistError:
1418 merge_state = MergeResponse(
1417 merge_state = MergeResponse(
1419 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1418 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1420 metadata={'target_ref': pull_request.target_ref_parts})
1419 metadata={'target_ref': pull_request.target_ref_parts})
1421 return merge_state
1420 return merge_state
1422
1421
1423 target_locked = pull_request.target_repo.locked
1422 target_locked = pull_request.target_repo.locked
1424 if target_locked and target_locked[0]:
1423 if target_locked and target_locked[0]:
1425 locked_by = 'user:{}'.format(target_locked[0])
1424 locked_by = 'user:{}'.format(target_locked[0])
1426 log.debug("The target repository is locked by %s.", locked_by)
1425 log.debug("The target repository is locked by %s.", locked_by)
1427 merge_state = MergeResponse(
1426 merge_state = MergeResponse(
1428 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1427 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1429 metadata={'locked_by': locked_by})
1428 metadata={'locked_by': locked_by})
1430 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1429 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1431 pull_request, target_ref):
1430 pull_request, target_ref):
1432 log.debug("Refreshing the merge status of the repository.")
1431 log.debug("Refreshing the merge status of the repository.")
1433 merge_state = self._refresh_merge_state(
1432 merge_state = self._refresh_merge_state(
1434 pull_request, target_vcs, target_ref)
1433 pull_request, target_vcs, target_ref)
1435 else:
1434 else:
1436 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1435 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1437 metadata = {
1436 metadata = {
1438 'unresolved_files': '',
1437 'unresolved_files': '',
1439 'target_ref': pull_request.target_ref_parts,
1438 'target_ref': pull_request.target_ref_parts,
1440 'source_ref': pull_request.source_ref_parts,
1439 'source_ref': pull_request.source_ref_parts,
1441 }
1440 }
1441 if pull_request.last_merge_metadata:
1442 metadata.update(pull_request.last_merge_metadata)
1443
1442 if not possible and target_ref.type == 'branch':
1444 if not possible and target_ref.type == 'branch':
1443 # NOTE(marcink): case for mercurial multiple heads on branch
1445 # NOTE(marcink): case for mercurial multiple heads on branch
1444 heads = target_vcs._heads(target_ref.name)
1446 heads = target_vcs._heads(target_ref.name)
1445 if len(heads) != 1:
1447 if len(heads) != 1:
1446 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1448 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1447 metadata.update({
1449 metadata.update({
1448 'heads': heads
1450 'heads': heads
1449 })
1451 })
1452
1450 merge_state = MergeResponse(
1453 merge_state = MergeResponse(
1451 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1454 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1452
1455
1453 return merge_state
1456 return merge_state
1454
1457
1455 def _refresh_reference(self, reference, vcs_repository):
1458 def _refresh_reference(self, reference, vcs_repository):
1456 if reference.type in self.UPDATABLE_REF_TYPES:
1459 if reference.type in self.UPDATABLE_REF_TYPES:
1457 name_or_id = reference.name
1460 name_or_id = reference.name
1458 else:
1461 else:
1459 name_or_id = reference.commit_id
1462 name_or_id = reference.commit_id
1460
1463
1461 refreshed_commit = vcs_repository.get_commit(name_or_id)
1464 refreshed_commit = vcs_repository.get_commit(name_or_id)
1462 refreshed_reference = Reference(
1465 refreshed_reference = Reference(
1463 reference.type, reference.name, refreshed_commit.raw_id)
1466 reference.type, reference.name, refreshed_commit.raw_id)
1464 return refreshed_reference
1467 return refreshed_reference
1465
1468
1466 def _needs_merge_state_refresh(self, pull_request, target_reference):
1469 def _needs_merge_state_refresh(self, pull_request, target_reference):
1467 return not(
1470 return not(
1468 pull_request.revisions and
1471 pull_request.revisions and
1469 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1472 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1470 target_reference.commit_id == pull_request._last_merge_target_rev)
1473 target_reference.commit_id == pull_request._last_merge_target_rev)
1471
1474
1472 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1475 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1473 workspace_id = self._workspace_id(pull_request)
1476 workspace_id = self._workspace_id(pull_request)
1474 source_vcs = pull_request.source_repo.scm_instance()
1477 source_vcs = pull_request.source_repo.scm_instance()
1475 repo_id = pull_request.target_repo.repo_id
1478 repo_id = pull_request.target_repo.repo_id
1476 use_rebase = self._use_rebase_for_merging(pull_request)
1479 use_rebase = self._use_rebase_for_merging(pull_request)
1477 close_branch = self._close_branch_before_merging(pull_request)
1480 close_branch = self._close_branch_before_merging(pull_request)
1478 merge_state = target_vcs.merge(
1481 merge_state = target_vcs.merge(
1479 repo_id, workspace_id,
1482 repo_id, workspace_id,
1480 target_reference, source_vcs, pull_request.source_ref_parts,
1483 target_reference, source_vcs, pull_request.source_ref_parts,
1481 dry_run=True, use_rebase=use_rebase,
1484 dry_run=True, use_rebase=use_rebase,
1482 close_branch=close_branch)
1485 close_branch=close_branch)
1483
1486
1484 # Do not store the response if there was an unknown error.
1487 # Do not store the response if there was an unknown error.
1485 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1488 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1486 pull_request._last_merge_source_rev = \
1489 pull_request._last_merge_source_rev = \
1487 pull_request.source_ref_parts.commit_id
1490 pull_request.source_ref_parts.commit_id
1488 pull_request._last_merge_target_rev = target_reference.commit_id
1491 pull_request._last_merge_target_rev = target_reference.commit_id
1489 pull_request.last_merge_status = merge_state.failure_reason
1492 pull_request.last_merge_status = merge_state.failure_reason
1493 pull_request.last_merge_metadata = merge_state.metadata
1494
1490 pull_request.shadow_merge_ref = merge_state.merge_ref
1495 pull_request.shadow_merge_ref = merge_state.merge_ref
1491 Session().add(pull_request)
1496 Session().add(pull_request)
1492 Session().commit()
1497 Session().commit()
1493
1498
1494 return merge_state
1499 return merge_state
1495
1500
1496 def _workspace_id(self, pull_request):
1501 def _workspace_id(self, pull_request):
1497 workspace_id = 'pr-%s' % pull_request.pull_request_id
1502 workspace_id = 'pr-%s' % pull_request.pull_request_id
1498 return workspace_id
1503 return workspace_id
1499
1504
1500 def generate_repo_data(self, repo, commit_id=None, branch=None,
1505 def generate_repo_data(self, repo, commit_id=None, branch=None,
1501 bookmark=None, translator=None):
1506 bookmark=None, translator=None):
1502 from rhodecode.model.repo import RepoModel
1507 from rhodecode.model.repo import RepoModel
1503
1508
1504 all_refs, selected_ref = \
1509 all_refs, selected_ref = \
1505 self._get_repo_pullrequest_sources(
1510 self._get_repo_pullrequest_sources(
1506 repo.scm_instance(), commit_id=commit_id,
1511 repo.scm_instance(), commit_id=commit_id,
1507 branch=branch, bookmark=bookmark, translator=translator)
1512 branch=branch, bookmark=bookmark, translator=translator)
1508
1513
1509 refs_select2 = []
1514 refs_select2 = []
1510 for element in all_refs:
1515 for element in all_refs:
1511 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1516 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1512 refs_select2.append({'text': element[1], 'children': children})
1517 refs_select2.append({'text': element[1], 'children': children})
1513
1518
1514 return {
1519 return {
1515 'user': {
1520 'user': {
1516 'user_id': repo.user.user_id,
1521 'user_id': repo.user.user_id,
1517 'username': repo.user.username,
1522 'username': repo.user.username,
1518 'firstname': repo.user.first_name,
1523 'firstname': repo.user.first_name,
1519 'lastname': repo.user.last_name,
1524 'lastname': repo.user.last_name,
1520 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1525 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1521 },
1526 },
1522 'name': repo.repo_name,
1527 'name': repo.repo_name,
1523 'link': RepoModel().get_url(repo),
1528 'link': RepoModel().get_url(repo),
1524 'description': h.chop_at_smart(repo.description_safe, '\n'),
1529 'description': h.chop_at_smart(repo.description_safe, '\n'),
1525 'refs': {
1530 'refs': {
1526 'all_refs': all_refs,
1531 'all_refs': all_refs,
1527 'selected_ref': selected_ref,
1532 'selected_ref': selected_ref,
1528 'select2_refs': refs_select2
1533 'select2_refs': refs_select2
1529 }
1534 }
1530 }
1535 }
1531
1536
1532 def generate_pullrequest_title(self, source, source_ref, target):
1537 def generate_pullrequest_title(self, source, source_ref, target):
1533 return u'{source}#{at_ref} to {target}'.format(
1538 return u'{source}#{at_ref} to {target}'.format(
1534 source=source,
1539 source=source,
1535 at_ref=source_ref,
1540 at_ref=source_ref,
1536 target=target,
1541 target=target,
1537 )
1542 )
1538
1543
1539 def _cleanup_merge_workspace(self, pull_request):
1544 def _cleanup_merge_workspace(self, pull_request):
1540 # Merging related cleanup
1545 # Merging related cleanup
1541 repo_id = pull_request.target_repo.repo_id
1546 repo_id = pull_request.target_repo.repo_id
1542 target_scm = pull_request.target_repo.scm_instance()
1547 target_scm = pull_request.target_repo.scm_instance()
1543 workspace_id = self._workspace_id(pull_request)
1548 workspace_id = self._workspace_id(pull_request)
1544
1549
1545 try:
1550 try:
1546 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1551 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1547 except NotImplementedError:
1552 except NotImplementedError:
1548 pass
1553 pass
1549
1554
1550 def _get_repo_pullrequest_sources(
1555 def _get_repo_pullrequest_sources(
1551 self, repo, commit_id=None, branch=None, bookmark=None,
1556 self, repo, commit_id=None, branch=None, bookmark=None,
1552 translator=None):
1557 translator=None):
1553 """
1558 """
1554 Return a structure with repo's interesting commits, suitable for
1559 Return a structure with repo's interesting commits, suitable for
1555 the selectors in pullrequest controller
1560 the selectors in pullrequest controller
1556
1561
1557 :param commit_id: a commit that must be in the list somehow
1562 :param commit_id: a commit that must be in the list somehow
1558 and selected by default
1563 and selected by default
1559 :param branch: a branch that must be in the list and selected
1564 :param branch: a branch that must be in the list and selected
1560 by default - even if closed
1565 by default - even if closed
1561 :param bookmark: a bookmark that must be in the list and selected
1566 :param bookmark: a bookmark that must be in the list and selected
1562 """
1567 """
1563 _ = translator or get_current_request().translate
1568 _ = translator or get_current_request().translate
1564
1569
1565 commit_id = safe_str(commit_id) if commit_id else None
1570 commit_id = safe_str(commit_id) if commit_id else None
1566 branch = safe_unicode(branch) if branch else None
1571 branch = safe_unicode(branch) if branch else None
1567 bookmark = safe_unicode(bookmark) if bookmark else None
1572 bookmark = safe_unicode(bookmark) if bookmark else None
1568
1573
1569 selected = None
1574 selected = None
1570
1575
1571 # order matters: first source that has commit_id in it will be selected
1576 # order matters: first source that has commit_id in it will be selected
1572 sources = []
1577 sources = []
1573 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1578 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1574 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1579 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1575
1580
1576 if commit_id:
1581 if commit_id:
1577 ref_commit = (h.short_id(commit_id), commit_id)
1582 ref_commit = (h.short_id(commit_id), commit_id)
1578 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1583 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1579
1584
1580 sources.append(
1585 sources.append(
1581 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1586 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1582 )
1587 )
1583
1588
1584 groups = []
1589 groups = []
1585
1590
1586 for group_key, ref_list, group_name, match in sources:
1591 for group_key, ref_list, group_name, match in sources:
1587 group_refs = []
1592 group_refs = []
1588 for ref_name, ref_id in ref_list:
1593 for ref_name, ref_id in ref_list:
1589 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1594 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1590 group_refs.append((ref_key, ref_name))
1595 group_refs.append((ref_key, ref_name))
1591
1596
1592 if not selected:
1597 if not selected:
1593 if set([commit_id, match]) & set([ref_id, ref_name]):
1598 if set([commit_id, match]) & set([ref_id, ref_name]):
1594 selected = ref_key
1599 selected = ref_key
1595
1600
1596 if group_refs:
1601 if group_refs:
1597 groups.append((group_refs, group_name))
1602 groups.append((group_refs, group_name))
1598
1603
1599 if not selected:
1604 if not selected:
1600 ref = commit_id or branch or bookmark
1605 ref = commit_id or branch or bookmark
1601 if ref:
1606 if ref:
1602 raise CommitDoesNotExistError(
1607 raise CommitDoesNotExistError(
1603 u'No commit refs could be found matching: {}'.format(ref))
1608 u'No commit refs could be found matching: {}'.format(ref))
1604 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1609 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1605 selected = u'branch:{}:{}'.format(
1610 selected = u'branch:{}:{}'.format(
1606 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1611 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1607 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1612 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1608 )
1613 )
1609 elif repo.commit_ids:
1614 elif repo.commit_ids:
1610 # make the user select in this case
1615 # make the user select in this case
1611 selected = None
1616 selected = None
1612 else:
1617 else:
1613 raise EmptyRepositoryError()
1618 raise EmptyRepositoryError()
1614 return groups, selected
1619 return groups, selected
1615
1620
1616 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1621 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1617 hide_whitespace_changes, diff_context):
1622 hide_whitespace_changes, diff_context):
1618
1623
1619 return self._get_diff_from_pr_or_version(
1624 return self._get_diff_from_pr_or_version(
1620 source_repo, source_ref_id, target_ref_id,
1625 source_repo, source_ref_id, target_ref_id,
1621 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1626 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1622
1627
1623 def _get_diff_from_pr_or_version(
1628 def _get_diff_from_pr_or_version(
1624 self, source_repo, source_ref_id, target_ref_id,
1629 self, source_repo, source_ref_id, target_ref_id,
1625 hide_whitespace_changes, diff_context):
1630 hide_whitespace_changes, diff_context):
1626
1631
1627 target_commit = source_repo.get_commit(
1632 target_commit = source_repo.get_commit(
1628 commit_id=safe_str(target_ref_id))
1633 commit_id=safe_str(target_ref_id))
1629 source_commit = source_repo.get_commit(
1634 source_commit = source_repo.get_commit(
1630 commit_id=safe_str(source_ref_id))
1635 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
1631 if isinstance(source_repo, Repository):
1636 if isinstance(source_repo, Repository):
1632 vcs_repo = source_repo.scm_instance()
1637 vcs_repo = source_repo.scm_instance()
1633 else:
1638 else:
1634 vcs_repo = source_repo
1639 vcs_repo = source_repo
1635
1640
1636 # TODO: johbo: In the context of an update, we cannot reach
1641 # TODO: johbo: In the context of an update, we cannot reach
1637 # the old commit anymore with our normal mechanisms. It needs
1642 # the old commit anymore with our normal mechanisms. It needs
1638 # some sort of special support in the vcs layer to avoid this
1643 # some sort of special support in the vcs layer to avoid this
1639 # workaround.
1644 # workaround.
1640 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1645 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1641 vcs_repo.alias == 'git'):
1646 vcs_repo.alias == 'git'):
1642 source_commit.raw_id = safe_str(source_ref_id)
1647 source_commit.raw_id = safe_str(source_ref_id)
1643
1648
1644 log.debug('calculating diff between '
1649 log.debug('calculating diff between '
1645 'source_ref:%s and target_ref:%s for repo `%s`',
1650 'source_ref:%s and target_ref:%s for repo `%s`',
1646 target_ref_id, source_ref_id,
1651 target_ref_id, source_ref_id,
1647 safe_unicode(vcs_repo.path))
1652 safe_unicode(vcs_repo.path))
1648
1653
1649 vcs_diff = vcs_repo.get_diff(
1654 vcs_diff = vcs_repo.get_diff(
1650 commit1=target_commit, commit2=source_commit,
1655 commit1=target_commit, commit2=source_commit,
1651 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1656 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1652 return vcs_diff
1657 return vcs_diff
1653
1658
1654 def _is_merge_enabled(self, pull_request):
1659 def _is_merge_enabled(self, pull_request):
1655 return self._get_general_setting(
1660 return self._get_general_setting(
1656 pull_request, 'rhodecode_pr_merge_enabled')
1661 pull_request, 'rhodecode_pr_merge_enabled')
1657
1662
1658 def _use_rebase_for_merging(self, pull_request):
1663 def _use_rebase_for_merging(self, pull_request):
1659 repo_type = pull_request.target_repo.repo_type
1664 repo_type = pull_request.target_repo.repo_type
1660 if repo_type == 'hg':
1665 if repo_type == 'hg':
1661 return self._get_general_setting(
1666 return self._get_general_setting(
1662 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1667 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1663 elif repo_type == 'git':
1668 elif repo_type == 'git':
1664 return self._get_general_setting(
1669 return self._get_general_setting(
1665 pull_request, 'rhodecode_git_use_rebase_for_merging')
1670 pull_request, 'rhodecode_git_use_rebase_for_merging')
1666
1671
1667 return False
1672 return False
1668
1673
1669 def _user_name_for_merging(self, pull_request, user):
1674 def _user_name_for_merging(self, pull_request, user):
1670 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
1675 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
1671 if env_user_name_attr and hasattr(user, env_user_name_attr):
1676 if env_user_name_attr and hasattr(user, env_user_name_attr):
1672 user_name_attr = env_user_name_attr
1677 user_name_attr = env_user_name_attr
1673 else:
1678 else:
1674 user_name_attr = 'short_contact'
1679 user_name_attr = 'short_contact'
1675
1680
1676 user_name = getattr(user, user_name_attr)
1681 user_name = getattr(user, user_name_attr)
1677 return user_name
1682 return user_name
1678
1683
1679 def _close_branch_before_merging(self, pull_request):
1684 def _close_branch_before_merging(self, pull_request):
1680 repo_type = pull_request.target_repo.repo_type
1685 repo_type = pull_request.target_repo.repo_type
1681 if repo_type == 'hg':
1686 if repo_type == 'hg':
1682 return self._get_general_setting(
1687 return self._get_general_setting(
1683 pull_request, 'rhodecode_hg_close_branch_before_merging')
1688 pull_request, 'rhodecode_hg_close_branch_before_merging')
1684 elif repo_type == 'git':
1689 elif repo_type == 'git':
1685 return self._get_general_setting(
1690 return self._get_general_setting(
1686 pull_request, 'rhodecode_git_close_branch_before_merging')
1691 pull_request, 'rhodecode_git_close_branch_before_merging')
1687
1692
1688 return False
1693 return False
1689
1694
1690 def _get_general_setting(self, pull_request, settings_key, default=False):
1695 def _get_general_setting(self, pull_request, settings_key, default=False):
1691 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1696 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1692 settings = settings_model.get_general_settings()
1697 settings = settings_model.get_general_settings()
1693 return settings.get(settings_key, default)
1698 return settings.get(settings_key, default)
1694
1699
1695 def _log_audit_action(self, action, action_data, user, pull_request):
1700 def _log_audit_action(self, action, action_data, user, pull_request):
1696 audit_logger.store(
1701 audit_logger.store(
1697 action=action,
1702 action=action,
1698 action_data=action_data,
1703 action_data=action_data,
1699 user=user,
1704 user=user,
1700 repo=pull_request.target_repo)
1705 repo=pull_request.target_repo)
1701
1706
1702 def get_reviewer_functions(self):
1707 def get_reviewer_functions(self):
1703 """
1708 """
1704 Fetches functions for validation and fetching default reviewers.
1709 Fetches functions for validation and fetching default reviewers.
1705 If available we use the EE package, else we fallback to CE
1710 If available we use the EE package, else we fallback to CE
1706 package functions
1711 package functions
1707 """
1712 """
1708 try:
1713 try:
1709 from rc_reviewers.utils import get_default_reviewers_data
1714 from rc_reviewers.utils import get_default_reviewers_data
1710 from rc_reviewers.utils import validate_default_reviewers
1715 from rc_reviewers.utils import validate_default_reviewers
1711 except ImportError:
1716 except ImportError:
1712 from rhodecode.apps.repository.utils import get_default_reviewers_data
1717 from rhodecode.apps.repository.utils import get_default_reviewers_data
1713 from rhodecode.apps.repository.utils import validate_default_reviewers
1718 from rhodecode.apps.repository.utils import validate_default_reviewers
1714
1719
1715 return get_default_reviewers_data, validate_default_reviewers
1720 return get_default_reviewers_data, validate_default_reviewers
1716
1721
1717
1722
1718 class MergeCheck(object):
1723 class MergeCheck(object):
1719 """
1724 """
1720 Perform Merge Checks and returns a check object which stores information
1725 Perform Merge Checks and returns a check object which stores information
1721 about merge errors, and merge conditions
1726 about merge errors, and merge conditions
1722 """
1727 """
1723 TODO_CHECK = 'todo'
1728 TODO_CHECK = 'todo'
1724 PERM_CHECK = 'perm'
1729 PERM_CHECK = 'perm'
1725 REVIEW_CHECK = 'review'
1730 REVIEW_CHECK = 'review'
1726 MERGE_CHECK = 'merge'
1731 MERGE_CHECK = 'merge'
1727 WIP_CHECK = 'wip'
1732 WIP_CHECK = 'wip'
1728
1733
1729 def __init__(self):
1734 def __init__(self):
1730 self.review_status = None
1735 self.review_status = None
1731 self.merge_possible = None
1736 self.merge_possible = None
1732 self.merge_msg = ''
1737 self.merge_msg = ''
1738 self.merge_response = None
1733 self.failed = None
1739 self.failed = None
1734 self.errors = []
1740 self.errors = []
1735 self.error_details = OrderedDict()
1741 self.error_details = OrderedDict()
1736
1742
1743 def __repr__(self):
1744 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
1745 self.merge_possible, self.failed, self.errors)
1746
1737 def push_error(self, error_type, message, error_key, details):
1747 def push_error(self, error_type, message, error_key, details):
1738 self.failed = True
1748 self.failed = True
1739 self.errors.append([error_type, message])
1749 self.errors.append([error_type, message])
1740 self.error_details[error_key] = dict(
1750 self.error_details[error_key] = dict(
1741 details=details,
1751 details=details,
1742 error_type=error_type,
1752 error_type=error_type,
1743 message=message
1753 message=message
1744 )
1754 )
1745
1755
1746 @classmethod
1756 @classmethod
1747 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1757 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1748 force_shadow_repo_refresh=False):
1758 force_shadow_repo_refresh=False):
1749 _ = translator
1759 _ = translator
1750 merge_check = cls()
1760 merge_check = cls()
1751
1761
1752 # title has WIP:
1762 # title has WIP:
1753 if pull_request.work_in_progress:
1763 if pull_request.work_in_progress:
1754 log.debug("MergeCheck: cannot merge, title has wip: marker.")
1764 log.debug("MergeCheck: cannot merge, title has wip: marker.")
1755
1765
1756 msg = _('WIP marker in title prevents from accidental merge.')
1766 msg = _('WIP marker in title prevents from accidental merge.')
1757 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
1767 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
1758 if fail_early:
1768 if fail_early:
1759 return merge_check
1769 return merge_check
1760
1770
1761 # permissions to merge
1771 # permissions to merge
1762 user_allowed_to_merge = PullRequestModel().check_user_merge(
1772 user_allowed_to_merge = PullRequestModel().check_user_merge(
1763 pull_request, auth_user)
1773 pull_request, auth_user)
1764 if not user_allowed_to_merge:
1774 if not user_allowed_to_merge:
1765 log.debug("MergeCheck: cannot merge, approval is pending.")
1775 log.debug("MergeCheck: cannot merge, approval is pending.")
1766
1776
1767 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1777 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1768 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1778 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1769 if fail_early:
1779 if fail_early:
1770 return merge_check
1780 return merge_check
1771
1781
1772 # permission to merge into the target branch
1782 # permission to merge into the target branch
1773 target_commit_id = pull_request.target_ref_parts.commit_id
1783 target_commit_id = pull_request.target_ref_parts.commit_id
1774 if pull_request.target_ref_parts.type == 'branch':
1784 if pull_request.target_ref_parts.type == 'branch':
1775 branch_name = pull_request.target_ref_parts.name
1785 branch_name = pull_request.target_ref_parts.name
1776 else:
1786 else:
1777 # for mercurial we can always figure out the branch from the commit
1787 # for mercurial we can always figure out the branch from the commit
1778 # in case of bookmark
1788 # in case of bookmark
1779 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1789 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1780 branch_name = target_commit.branch
1790 branch_name = target_commit.branch
1781
1791
1782 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1792 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1783 pull_request.target_repo.repo_name, branch_name)
1793 pull_request.target_repo.repo_name, branch_name)
1784 if branch_perm and branch_perm == 'branch.none':
1794 if branch_perm and branch_perm == 'branch.none':
1785 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1795 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1786 branch_name, rule)
1796 branch_name, rule)
1787 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1797 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1788 if fail_early:
1798 if fail_early:
1789 return merge_check
1799 return merge_check
1790
1800
1791 # review status, must be always present
1801 # review status, must be always present
1792 review_status = pull_request.calculated_review_status()
1802 review_status = pull_request.calculated_review_status()
1793 merge_check.review_status = review_status
1803 merge_check.review_status = review_status
1794
1804
1795 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1805 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1796 if not status_approved:
1806 if not status_approved:
1797 log.debug("MergeCheck: cannot merge, approval is pending.")
1807 log.debug("MergeCheck: cannot merge, approval is pending.")
1798
1808
1799 msg = _('Pull request reviewer approval is pending.')
1809 msg = _('Pull request reviewer approval is pending.')
1800
1810
1801 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
1811 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
1802
1812
1803 if fail_early:
1813 if fail_early:
1804 return merge_check
1814 return merge_check
1805
1815
1806 # left over TODOs
1816 # left over TODOs
1807 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
1817 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
1808 if todos:
1818 if todos:
1809 log.debug("MergeCheck: cannot merge, {} "
1819 log.debug("MergeCheck: cannot merge, {} "
1810 "unresolved TODOs left.".format(len(todos)))
1820 "unresolved TODOs left.".format(len(todos)))
1811
1821
1812 if len(todos) == 1:
1822 if len(todos) == 1:
1813 msg = _('Cannot merge, {} TODO still not resolved.').format(
1823 msg = _('Cannot merge, {} TODO still not resolved.').format(
1814 len(todos))
1824 len(todos))
1815 else:
1825 else:
1816 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1826 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1817 len(todos))
1827 len(todos))
1818
1828
1819 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1829 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1820
1830
1821 if fail_early:
1831 if fail_early:
1822 return merge_check
1832 return merge_check
1823
1833
1824 # merge possible, here is the filesystem simulation + shadow repo
1834 # merge possible, here is the filesystem simulation + shadow repo
1825 merge_status, msg = PullRequestModel().merge_status(
1835 merge_response, merge_status, msg = PullRequestModel().merge_status(
1826 pull_request, translator=translator,
1836 pull_request, translator=translator,
1827 force_shadow_repo_refresh=force_shadow_repo_refresh)
1837 force_shadow_repo_refresh=force_shadow_repo_refresh)
1838
1828 merge_check.merge_possible = merge_status
1839 merge_check.merge_possible = merge_status
1829 merge_check.merge_msg = msg
1840 merge_check.merge_msg = msg
1841 merge_check.merge_response = merge_response
1842
1830 if not merge_status:
1843 if not merge_status:
1831 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
1844 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
1832 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1845 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1833
1846
1834 if fail_early:
1847 if fail_early:
1835 return merge_check
1848 return merge_check
1836
1849
1837 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1850 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1838 return merge_check
1851 return merge_check
1839
1852
1840 @classmethod
1853 @classmethod
1841 def get_merge_conditions(cls, pull_request, translator):
1854 def get_merge_conditions(cls, pull_request, translator):
1842 _ = translator
1855 _ = translator
1843 merge_details = {}
1856 merge_details = {}
1844
1857
1845 model = PullRequestModel()
1858 model = PullRequestModel()
1846 use_rebase = model._use_rebase_for_merging(pull_request)
1859 use_rebase = model._use_rebase_for_merging(pull_request)
1847
1860
1848 if use_rebase:
1861 if use_rebase:
1849 merge_details['merge_strategy'] = dict(
1862 merge_details['merge_strategy'] = dict(
1850 details={},
1863 details={},
1851 message=_('Merge strategy: rebase')
1864 message=_('Merge strategy: rebase')
1852 )
1865 )
1853 else:
1866 else:
1854 merge_details['merge_strategy'] = dict(
1867 merge_details['merge_strategy'] = dict(
1855 details={},
1868 details={},
1856 message=_('Merge strategy: explicit merge commit')
1869 message=_('Merge strategy: explicit merge commit')
1857 )
1870 )
1858
1871
1859 close_branch = model._close_branch_before_merging(pull_request)
1872 close_branch = model._close_branch_before_merging(pull_request)
1860 if close_branch:
1873 if close_branch:
1861 repo_type = pull_request.target_repo.repo_type
1874 repo_type = pull_request.target_repo.repo_type
1862 close_msg = ''
1875 close_msg = ''
1863 if repo_type == 'hg':
1876 if repo_type == 'hg':
1864 close_msg = _('Source branch will be closed after merge.')
1877 close_msg = _('Source branch will be closed after merge.')
1865 elif repo_type == 'git':
1878 elif repo_type == 'git':
1866 close_msg = _('Source branch will be deleted after merge.')
1879 close_msg = _('Source branch will be deleted after merge.')
1867
1880
1868 merge_details['close_branch'] = dict(
1881 merge_details['close_branch'] = dict(
1869 details={},
1882 details={},
1870 message=close_msg
1883 message=close_msg
1871 )
1884 )
1872
1885
1873 return merge_details
1886 return merge_details
1874
1887
1875
1888
1876 ChangeTuple = collections.namedtuple(
1889 ChangeTuple = collections.namedtuple(
1877 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1890 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1878
1891
1879 FileChangeTuple = collections.namedtuple(
1892 FileChangeTuple = collections.namedtuple(
1880 'FileChangeTuple', ['added', 'modified', 'removed'])
1893 'FileChangeTuple', ['added', 'modified', 'removed'])
@@ -1,981 +1,981 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import mock
21 import mock
22 import pytest
22 import pytest
23 import textwrap
23 import textwrap
24
24
25 import rhodecode
25 import rhodecode
26 from rhodecode.lib.utils2 import safe_unicode
26 from rhodecode.lib.utils2 import safe_unicode
27 from rhodecode.lib.vcs.backends import get_backend
27 from rhodecode.lib.vcs.backends import get_backend
28 from rhodecode.lib.vcs.backends.base import (
28 from rhodecode.lib.vcs.backends.base import (
29 MergeResponse, MergeFailureReason, Reference)
29 MergeResponse, MergeFailureReason, Reference)
30 from rhodecode.lib.vcs.exceptions import RepositoryError
30 from rhodecode.lib.vcs.exceptions import RepositoryError
31 from rhodecode.lib.vcs.nodes import FileNode
31 from rhodecode.lib.vcs.nodes import FileNode
32 from rhodecode.model.comment import CommentsModel
32 from rhodecode.model.comment import CommentsModel
33 from rhodecode.model.db import PullRequest, Session
33 from rhodecode.model.db import PullRequest, Session
34 from rhodecode.model.pull_request import PullRequestModel
34 from rhodecode.model.pull_request import PullRequestModel
35 from rhodecode.model.user import UserModel
35 from rhodecode.model.user import UserModel
36 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
36 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
37
37
38
38
39 pytestmark = [
39 pytestmark = [
40 pytest.mark.backends("git", "hg"),
40 pytest.mark.backends("git", "hg"),
41 ]
41 ]
42
42
43
43
44 @pytest.mark.usefixtures('config_stub')
44 @pytest.mark.usefixtures('config_stub')
45 class TestPullRequestModel(object):
45 class TestPullRequestModel(object):
46
46
47 @pytest.fixture()
47 @pytest.fixture()
48 def pull_request(self, request, backend, pr_util):
48 def pull_request(self, request, backend, pr_util):
49 """
49 """
50 A pull request combined with multiples patches.
50 A pull request combined with multiples patches.
51 """
51 """
52 BackendClass = get_backend(backend.alias)
52 BackendClass = get_backend(backend.alias)
53 merge_resp = MergeResponse(
53 merge_resp = MergeResponse(
54 False, False, None, MergeFailureReason.UNKNOWN,
54 False, False, None, MergeFailureReason.UNKNOWN,
55 metadata={'exception': 'MockError'})
55 metadata={'exception': 'MockError'})
56 self.merge_patcher = mock.patch.object(
56 self.merge_patcher = mock.patch.object(
57 BackendClass, 'merge', return_value=merge_resp)
57 BackendClass, 'merge', return_value=merge_resp)
58 self.workspace_remove_patcher = mock.patch.object(
58 self.workspace_remove_patcher = mock.patch.object(
59 BackendClass, 'cleanup_merge_workspace')
59 BackendClass, 'cleanup_merge_workspace')
60
60
61 self.workspace_remove_mock = self.workspace_remove_patcher.start()
61 self.workspace_remove_mock = self.workspace_remove_patcher.start()
62 self.merge_mock = self.merge_patcher.start()
62 self.merge_mock = self.merge_patcher.start()
63 self.comment_patcher = mock.patch(
63 self.comment_patcher = mock.patch(
64 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status')
64 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status')
65 self.comment_patcher.start()
65 self.comment_patcher.start()
66 self.notification_patcher = mock.patch(
66 self.notification_patcher = mock.patch(
67 'rhodecode.model.notification.NotificationModel.create')
67 'rhodecode.model.notification.NotificationModel.create')
68 self.notification_patcher.start()
68 self.notification_patcher.start()
69 self.helper_patcher = mock.patch(
69 self.helper_patcher = mock.patch(
70 'rhodecode.lib.helpers.route_path')
70 'rhodecode.lib.helpers.route_path')
71 self.helper_patcher.start()
71 self.helper_patcher.start()
72
72
73 self.hook_patcher = mock.patch.object(PullRequestModel,
73 self.hook_patcher = mock.patch.object(PullRequestModel,
74 'trigger_pull_request_hook')
74 'trigger_pull_request_hook')
75 self.hook_mock = self.hook_patcher.start()
75 self.hook_mock = self.hook_patcher.start()
76
76
77 self.invalidation_patcher = mock.patch(
77 self.invalidation_patcher = mock.patch(
78 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation')
78 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation')
79 self.invalidation_mock = self.invalidation_patcher.start()
79 self.invalidation_mock = self.invalidation_patcher.start()
80
80
81 self.pull_request = pr_util.create_pull_request(
81 self.pull_request = pr_util.create_pull_request(
82 mergeable=True, name_suffix=u'Δ…Δ‡')
82 mergeable=True, name_suffix=u'Δ…Δ‡')
83 self.source_commit = self.pull_request.source_ref_parts.commit_id
83 self.source_commit = self.pull_request.source_ref_parts.commit_id
84 self.target_commit = self.pull_request.target_ref_parts.commit_id
84 self.target_commit = self.pull_request.target_ref_parts.commit_id
85 self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id
85 self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id
86 self.repo_id = self.pull_request.target_repo.repo_id
86 self.repo_id = self.pull_request.target_repo.repo_id
87
87
88 @request.addfinalizer
88 @request.addfinalizer
89 def cleanup_pull_request():
89 def cleanup_pull_request():
90 calls = [mock.call(
90 calls = [mock.call(
91 self.pull_request, self.pull_request.author, 'create')]
91 self.pull_request, self.pull_request.author, 'create')]
92 self.hook_mock.assert_has_calls(calls)
92 self.hook_mock.assert_has_calls(calls)
93
93
94 self.workspace_remove_patcher.stop()
94 self.workspace_remove_patcher.stop()
95 self.merge_patcher.stop()
95 self.merge_patcher.stop()
96 self.comment_patcher.stop()
96 self.comment_patcher.stop()
97 self.notification_patcher.stop()
97 self.notification_patcher.stop()
98 self.helper_patcher.stop()
98 self.helper_patcher.stop()
99 self.hook_patcher.stop()
99 self.hook_patcher.stop()
100 self.invalidation_patcher.stop()
100 self.invalidation_patcher.stop()
101
101
102 return self.pull_request
102 return self.pull_request
103
103
104 def test_get_all(self, pull_request):
104 def test_get_all(self, pull_request):
105 prs = PullRequestModel().get_all(pull_request.target_repo)
105 prs = PullRequestModel().get_all(pull_request.target_repo)
106 assert isinstance(prs, list)
106 assert isinstance(prs, list)
107 assert len(prs) == 1
107 assert len(prs) == 1
108
108
109 def test_count_all(self, pull_request):
109 def test_count_all(self, pull_request):
110 pr_count = PullRequestModel().count_all(pull_request.target_repo)
110 pr_count = PullRequestModel().count_all(pull_request.target_repo)
111 assert pr_count == 1
111 assert pr_count == 1
112
112
113 def test_get_awaiting_review(self, pull_request):
113 def test_get_awaiting_review(self, pull_request):
114 prs = PullRequestModel().get_awaiting_review(pull_request.target_repo)
114 prs = PullRequestModel().get_awaiting_review(pull_request.target_repo)
115 assert isinstance(prs, list)
115 assert isinstance(prs, list)
116 assert len(prs) == 1
116 assert len(prs) == 1
117
117
118 def test_count_awaiting_review(self, pull_request):
118 def test_count_awaiting_review(self, pull_request):
119 pr_count = PullRequestModel().count_awaiting_review(
119 pr_count = PullRequestModel().count_awaiting_review(
120 pull_request.target_repo)
120 pull_request.target_repo)
121 assert pr_count == 1
121 assert pr_count == 1
122
122
123 def test_get_awaiting_my_review(self, pull_request):
123 def test_get_awaiting_my_review(self, pull_request):
124 PullRequestModel().update_reviewers(
124 PullRequestModel().update_reviewers(
125 pull_request, [(pull_request.author, ['author'], False, [])],
125 pull_request, [(pull_request.author, ['author'], False, [])],
126 pull_request.author)
126 pull_request.author)
127 Session().commit()
127 Session().commit()
128
128
129 prs = PullRequestModel().get_awaiting_my_review(
129 prs = PullRequestModel().get_awaiting_my_review(
130 pull_request.target_repo, user_id=pull_request.author.user_id)
130 pull_request.target_repo, user_id=pull_request.author.user_id)
131 assert isinstance(prs, list)
131 assert isinstance(prs, list)
132 assert len(prs) == 1
132 assert len(prs) == 1
133
133
134 def test_count_awaiting_my_review(self, pull_request):
134 def test_count_awaiting_my_review(self, pull_request):
135 PullRequestModel().update_reviewers(
135 PullRequestModel().update_reviewers(
136 pull_request, [(pull_request.author, ['author'], False, [])],
136 pull_request, [(pull_request.author, ['author'], False, [])],
137 pull_request.author)
137 pull_request.author)
138 Session().commit()
138 Session().commit()
139
139
140 pr_count = PullRequestModel().count_awaiting_my_review(
140 pr_count = PullRequestModel().count_awaiting_my_review(
141 pull_request.target_repo, user_id=pull_request.author.user_id)
141 pull_request.target_repo, user_id=pull_request.author.user_id)
142 assert pr_count == 1
142 assert pr_count == 1
143
143
144 def test_delete_calls_cleanup_merge(self, pull_request):
144 def test_delete_calls_cleanup_merge(self, pull_request):
145 repo_id = pull_request.target_repo.repo_id
145 repo_id = pull_request.target_repo.repo_id
146 PullRequestModel().delete(pull_request, pull_request.author)
146 PullRequestModel().delete(pull_request, pull_request.author)
147 Session().commit()
147 Session().commit()
148
148
149 self.workspace_remove_mock.assert_called_once_with(
149 self.workspace_remove_mock.assert_called_once_with(
150 repo_id, self.workspace_id)
150 repo_id, self.workspace_id)
151
151
152 def test_close_calls_cleanup_and_hook(self, pull_request):
152 def test_close_calls_cleanup_and_hook(self, pull_request):
153 PullRequestModel().close_pull_request(
153 PullRequestModel().close_pull_request(
154 pull_request, pull_request.author)
154 pull_request, pull_request.author)
155 Session().commit()
155 Session().commit()
156
156
157 repo_id = pull_request.target_repo.repo_id
157 repo_id = pull_request.target_repo.repo_id
158
158
159 self.workspace_remove_mock.assert_called_once_with(
159 self.workspace_remove_mock.assert_called_once_with(
160 repo_id, self.workspace_id)
160 repo_id, self.workspace_id)
161 self.hook_mock.assert_called_with(
161 self.hook_mock.assert_called_with(
162 self.pull_request, self.pull_request.author, 'close')
162 self.pull_request, self.pull_request.author, 'close')
163
163
164 def test_merge_status(self, pull_request):
164 def test_merge_status(self, pull_request):
165 self.merge_mock.return_value = MergeResponse(
165 self.merge_mock.return_value = MergeResponse(
166 True, False, None, MergeFailureReason.NONE)
166 True, False, None, MergeFailureReason.NONE)
167
167
168 assert pull_request._last_merge_source_rev is None
168 assert pull_request._last_merge_source_rev is None
169 assert pull_request._last_merge_target_rev is None
169 assert pull_request._last_merge_target_rev is None
170 assert pull_request.last_merge_status is None
170 assert pull_request.last_merge_status is None
171
171
172 status, msg = PullRequestModel().merge_status(pull_request)
172 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
173 assert status is True
173 assert status is True
174 assert msg == 'This pull request can be automatically merged.'
174 assert msg == 'This pull request can be automatically merged.'
175 self.merge_mock.assert_called_with(
175 self.merge_mock.assert_called_with(
176 self.repo_id, self.workspace_id,
176 self.repo_id, self.workspace_id,
177 pull_request.target_ref_parts,
177 pull_request.target_ref_parts,
178 pull_request.source_repo.scm_instance(),
178 pull_request.source_repo.scm_instance(),
179 pull_request.source_ref_parts, dry_run=True,
179 pull_request.source_ref_parts, dry_run=True,
180 use_rebase=False, close_branch=False)
180 use_rebase=False, close_branch=False)
181
181
182 assert pull_request._last_merge_source_rev == self.source_commit
182 assert pull_request._last_merge_source_rev == self.source_commit
183 assert pull_request._last_merge_target_rev == self.target_commit
183 assert pull_request._last_merge_target_rev == self.target_commit
184 assert pull_request.last_merge_status is MergeFailureReason.NONE
184 assert pull_request.last_merge_status is MergeFailureReason.NONE
185
185
186 self.merge_mock.reset_mock()
186 self.merge_mock.reset_mock()
187 status, msg = PullRequestModel().merge_status(pull_request)
187 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
188 assert status is True
188 assert status is True
189 assert msg == 'This pull request can be automatically merged.'
189 assert msg == 'This pull request can be automatically merged.'
190 assert self.merge_mock.called is False
190 assert self.merge_mock.called is False
191
191
192 def test_merge_status_known_failure(self, pull_request):
192 def test_merge_status_known_failure(self, pull_request):
193 self.merge_mock.return_value = MergeResponse(
193 self.merge_mock.return_value = MergeResponse(
194 False, False, None, MergeFailureReason.MERGE_FAILED,
194 False, False, None, MergeFailureReason.MERGE_FAILED,
195 metadata={'unresolved_files': 'file1'})
195 metadata={'unresolved_files': 'file1'})
196
196
197 assert pull_request._last_merge_source_rev is None
197 assert pull_request._last_merge_source_rev is None
198 assert pull_request._last_merge_target_rev is None
198 assert pull_request._last_merge_target_rev is None
199 assert pull_request.last_merge_status is None
199 assert pull_request.last_merge_status is None
200
200
201 status, msg = PullRequestModel().merge_status(pull_request)
201 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
202 assert status is False
202 assert status is False
203 assert msg == 'This pull request cannot be merged because of merge conflicts. file1'
203 assert msg == 'This pull request cannot be merged because of merge conflicts. file1'
204 self.merge_mock.assert_called_with(
204 self.merge_mock.assert_called_with(
205 self.repo_id, self.workspace_id,
205 self.repo_id, self.workspace_id,
206 pull_request.target_ref_parts,
206 pull_request.target_ref_parts,
207 pull_request.source_repo.scm_instance(),
207 pull_request.source_repo.scm_instance(),
208 pull_request.source_ref_parts, dry_run=True,
208 pull_request.source_ref_parts, dry_run=True,
209 use_rebase=False, close_branch=False)
209 use_rebase=False, close_branch=False)
210
210
211 assert pull_request._last_merge_source_rev == self.source_commit
211 assert pull_request._last_merge_source_rev == self.source_commit
212 assert pull_request._last_merge_target_rev == self.target_commit
212 assert pull_request._last_merge_target_rev == self.target_commit
213 assert pull_request.last_merge_status is MergeFailureReason.MERGE_FAILED
213 assert pull_request.last_merge_status is MergeFailureReason.MERGE_FAILED
214
214
215 self.merge_mock.reset_mock()
215 self.merge_mock.reset_mock()
216 status, msg = PullRequestModel().merge_status(pull_request)
216 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
217 assert status is False
217 assert status is False
218 assert msg == 'This pull request cannot be merged because of merge conflicts. '
218 assert msg == 'This pull request cannot be merged because of merge conflicts. file1'
219 assert self.merge_mock.called is False
219 assert self.merge_mock.called is False
220
220
221 def test_merge_status_unknown_failure(self, pull_request):
221 def test_merge_status_unknown_failure(self, pull_request):
222 self.merge_mock.return_value = MergeResponse(
222 self.merge_mock.return_value = MergeResponse(
223 False, False, None, MergeFailureReason.UNKNOWN,
223 False, False, None, MergeFailureReason.UNKNOWN,
224 metadata={'exception': 'MockError'})
224 metadata={'exception': 'MockError'})
225
225
226 assert pull_request._last_merge_source_rev is None
226 assert pull_request._last_merge_source_rev is None
227 assert pull_request._last_merge_target_rev is None
227 assert pull_request._last_merge_target_rev is None
228 assert pull_request.last_merge_status is None
228 assert pull_request.last_merge_status is None
229
229
230 status, msg = PullRequestModel().merge_status(pull_request)
230 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
231 assert status is False
231 assert status is False
232 assert msg == (
232 assert msg == (
233 'This pull request cannot be merged because of an unhandled exception. '
233 'This pull request cannot be merged because of an unhandled exception. '
234 'MockError')
234 'MockError')
235 self.merge_mock.assert_called_with(
235 self.merge_mock.assert_called_with(
236 self.repo_id, self.workspace_id,
236 self.repo_id, self.workspace_id,
237 pull_request.target_ref_parts,
237 pull_request.target_ref_parts,
238 pull_request.source_repo.scm_instance(),
238 pull_request.source_repo.scm_instance(),
239 pull_request.source_ref_parts, dry_run=True,
239 pull_request.source_ref_parts, dry_run=True,
240 use_rebase=False, close_branch=False)
240 use_rebase=False, close_branch=False)
241
241
242 assert pull_request._last_merge_source_rev is None
242 assert pull_request._last_merge_source_rev is None
243 assert pull_request._last_merge_target_rev is None
243 assert pull_request._last_merge_target_rev is None
244 assert pull_request.last_merge_status is None
244 assert pull_request.last_merge_status is None
245
245
246 self.merge_mock.reset_mock()
246 self.merge_mock.reset_mock()
247 status, msg = PullRequestModel().merge_status(pull_request)
247 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
248 assert status is False
248 assert status is False
249 assert msg == (
249 assert msg == (
250 'This pull request cannot be merged because of an unhandled exception. '
250 'This pull request cannot be merged because of an unhandled exception. '
251 'MockError')
251 'MockError')
252 assert self.merge_mock.called is True
252 assert self.merge_mock.called is True
253
253
254 def test_merge_status_when_target_is_locked(self, pull_request):
254 def test_merge_status_when_target_is_locked(self, pull_request):
255 pull_request.target_repo.locked = [1, u'12345.50', 'lock_web']
255 pull_request.target_repo.locked = [1, u'12345.50', 'lock_web']
256 status, msg = PullRequestModel().merge_status(pull_request)
256 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
257 assert status is False
257 assert status is False
258 assert msg == (
258 assert msg == (
259 'This pull request cannot be merged because the target repository '
259 'This pull request cannot be merged because the target repository '
260 'is locked by user:1.')
260 'is locked by user:1.')
261
261
262 def test_merge_status_requirements_check_target(self, pull_request):
262 def test_merge_status_requirements_check_target(self, pull_request):
263
263
264 def has_largefiles(self, repo):
264 def has_largefiles(self, repo):
265 return repo == pull_request.source_repo
265 return repo == pull_request.source_repo
266
266
267 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
267 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
268 with patcher:
268 with patcher:
269 status, msg = PullRequestModel().merge_status(pull_request)
269 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
270
270
271 assert status is False
271 assert status is False
272 assert msg == 'Target repository large files support is disabled.'
272 assert msg == 'Target repository large files support is disabled.'
273
273
274 def test_merge_status_requirements_check_source(self, pull_request):
274 def test_merge_status_requirements_check_source(self, pull_request):
275
275
276 def has_largefiles(self, repo):
276 def has_largefiles(self, repo):
277 return repo == pull_request.target_repo
277 return repo == pull_request.target_repo
278
278
279 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
279 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
280 with patcher:
280 with patcher:
281 status, msg = PullRequestModel().merge_status(pull_request)
281 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
282
282
283 assert status is False
283 assert status is False
284 assert msg == 'Source repository large files support is disabled.'
284 assert msg == 'Source repository large files support is disabled.'
285
285
286 def test_merge(self, pull_request, merge_extras):
286 def test_merge(self, pull_request, merge_extras):
287 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
287 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
288 merge_ref = Reference(
288 merge_ref = Reference(
289 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
289 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
290 self.merge_mock.return_value = MergeResponse(
290 self.merge_mock.return_value = MergeResponse(
291 True, True, merge_ref, MergeFailureReason.NONE)
291 True, True, merge_ref, MergeFailureReason.NONE)
292
292
293 merge_extras['repository'] = pull_request.target_repo.repo_name
293 merge_extras['repository'] = pull_request.target_repo.repo_name
294 PullRequestModel().merge_repo(
294 PullRequestModel().merge_repo(
295 pull_request, pull_request.author, extras=merge_extras)
295 pull_request, pull_request.author, extras=merge_extras)
296 Session().commit()
296 Session().commit()
297
297
298 message = (
298 message = (
299 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
299 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
300 u'\n\n {pr_title}'.format(
300 u'\n\n {pr_title}'.format(
301 pr_id=pull_request.pull_request_id,
301 pr_id=pull_request.pull_request_id,
302 source_repo=safe_unicode(
302 source_repo=safe_unicode(
303 pull_request.source_repo.scm_instance().name),
303 pull_request.source_repo.scm_instance().name),
304 source_ref_name=pull_request.source_ref_parts.name,
304 source_ref_name=pull_request.source_ref_parts.name,
305 pr_title=safe_unicode(pull_request.title)
305 pr_title=safe_unicode(pull_request.title)
306 )
306 )
307 )
307 )
308 self.merge_mock.assert_called_with(
308 self.merge_mock.assert_called_with(
309 self.repo_id, self.workspace_id,
309 self.repo_id, self.workspace_id,
310 pull_request.target_ref_parts,
310 pull_request.target_ref_parts,
311 pull_request.source_repo.scm_instance(),
311 pull_request.source_repo.scm_instance(),
312 pull_request.source_ref_parts,
312 pull_request.source_ref_parts,
313 user_name=user.short_contact, user_email=user.email, message=message,
313 user_name=user.short_contact, user_email=user.email, message=message,
314 use_rebase=False, close_branch=False
314 use_rebase=False, close_branch=False
315 )
315 )
316 self.invalidation_mock.assert_called_once_with(
316 self.invalidation_mock.assert_called_once_with(
317 pull_request.target_repo.repo_name)
317 pull_request.target_repo.repo_name)
318
318
319 self.hook_mock.assert_called_with(
319 self.hook_mock.assert_called_with(
320 self.pull_request, self.pull_request.author, 'merge')
320 self.pull_request, self.pull_request.author, 'merge')
321
321
322 pull_request = PullRequest.get(pull_request.pull_request_id)
322 pull_request = PullRequest.get(pull_request.pull_request_id)
323 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
323 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
324
324
325 def test_merge_with_status_lock(self, pull_request, merge_extras):
325 def test_merge_with_status_lock(self, pull_request, merge_extras):
326 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
326 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
327 merge_ref = Reference(
327 merge_ref = Reference(
328 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
328 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
329 self.merge_mock.return_value = MergeResponse(
329 self.merge_mock.return_value = MergeResponse(
330 True, True, merge_ref, MergeFailureReason.NONE)
330 True, True, merge_ref, MergeFailureReason.NONE)
331
331
332 merge_extras['repository'] = pull_request.target_repo.repo_name
332 merge_extras['repository'] = pull_request.target_repo.repo_name
333
333
334 with pull_request.set_state(PullRequest.STATE_UPDATING):
334 with pull_request.set_state(PullRequest.STATE_UPDATING):
335 assert pull_request.pull_request_state == PullRequest.STATE_UPDATING
335 assert pull_request.pull_request_state == PullRequest.STATE_UPDATING
336 PullRequestModel().merge_repo(
336 PullRequestModel().merge_repo(
337 pull_request, pull_request.author, extras=merge_extras)
337 pull_request, pull_request.author, extras=merge_extras)
338 Session().commit()
338 Session().commit()
339
339
340 assert pull_request.pull_request_state == PullRequest.STATE_CREATED
340 assert pull_request.pull_request_state == PullRequest.STATE_CREATED
341
341
342 message = (
342 message = (
343 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
343 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
344 u'\n\n {pr_title}'.format(
344 u'\n\n {pr_title}'.format(
345 pr_id=pull_request.pull_request_id,
345 pr_id=pull_request.pull_request_id,
346 source_repo=safe_unicode(
346 source_repo=safe_unicode(
347 pull_request.source_repo.scm_instance().name),
347 pull_request.source_repo.scm_instance().name),
348 source_ref_name=pull_request.source_ref_parts.name,
348 source_ref_name=pull_request.source_ref_parts.name,
349 pr_title=safe_unicode(pull_request.title)
349 pr_title=safe_unicode(pull_request.title)
350 )
350 )
351 )
351 )
352 self.merge_mock.assert_called_with(
352 self.merge_mock.assert_called_with(
353 self.repo_id, self.workspace_id,
353 self.repo_id, self.workspace_id,
354 pull_request.target_ref_parts,
354 pull_request.target_ref_parts,
355 pull_request.source_repo.scm_instance(),
355 pull_request.source_repo.scm_instance(),
356 pull_request.source_ref_parts,
356 pull_request.source_ref_parts,
357 user_name=user.short_contact, user_email=user.email, message=message,
357 user_name=user.short_contact, user_email=user.email, message=message,
358 use_rebase=False, close_branch=False
358 use_rebase=False, close_branch=False
359 )
359 )
360 self.invalidation_mock.assert_called_once_with(
360 self.invalidation_mock.assert_called_once_with(
361 pull_request.target_repo.repo_name)
361 pull_request.target_repo.repo_name)
362
362
363 self.hook_mock.assert_called_with(
363 self.hook_mock.assert_called_with(
364 self.pull_request, self.pull_request.author, 'merge')
364 self.pull_request, self.pull_request.author, 'merge')
365
365
366 pull_request = PullRequest.get(pull_request.pull_request_id)
366 pull_request = PullRequest.get(pull_request.pull_request_id)
367 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
367 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
368
368
369 def test_merge_failed(self, pull_request, merge_extras):
369 def test_merge_failed(self, pull_request, merge_extras):
370 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
370 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
371 merge_ref = Reference(
371 merge_ref = Reference(
372 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
372 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
373 self.merge_mock.return_value = MergeResponse(
373 self.merge_mock.return_value = MergeResponse(
374 False, False, merge_ref, MergeFailureReason.MERGE_FAILED)
374 False, False, merge_ref, MergeFailureReason.MERGE_FAILED)
375
375
376 merge_extras['repository'] = pull_request.target_repo.repo_name
376 merge_extras['repository'] = pull_request.target_repo.repo_name
377 PullRequestModel().merge_repo(
377 PullRequestModel().merge_repo(
378 pull_request, pull_request.author, extras=merge_extras)
378 pull_request, pull_request.author, extras=merge_extras)
379 Session().commit()
379 Session().commit()
380
380
381 message = (
381 message = (
382 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
382 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
383 u'\n\n {pr_title}'.format(
383 u'\n\n {pr_title}'.format(
384 pr_id=pull_request.pull_request_id,
384 pr_id=pull_request.pull_request_id,
385 source_repo=safe_unicode(
385 source_repo=safe_unicode(
386 pull_request.source_repo.scm_instance().name),
386 pull_request.source_repo.scm_instance().name),
387 source_ref_name=pull_request.source_ref_parts.name,
387 source_ref_name=pull_request.source_ref_parts.name,
388 pr_title=safe_unicode(pull_request.title)
388 pr_title=safe_unicode(pull_request.title)
389 )
389 )
390 )
390 )
391 self.merge_mock.assert_called_with(
391 self.merge_mock.assert_called_with(
392 self.repo_id, self.workspace_id,
392 self.repo_id, self.workspace_id,
393 pull_request.target_ref_parts,
393 pull_request.target_ref_parts,
394 pull_request.source_repo.scm_instance(),
394 pull_request.source_repo.scm_instance(),
395 pull_request.source_ref_parts,
395 pull_request.source_ref_parts,
396 user_name=user.short_contact, user_email=user.email, message=message,
396 user_name=user.short_contact, user_email=user.email, message=message,
397 use_rebase=False, close_branch=False
397 use_rebase=False, close_branch=False
398 )
398 )
399
399
400 pull_request = PullRequest.get(pull_request.pull_request_id)
400 pull_request = PullRequest.get(pull_request.pull_request_id)
401 assert self.invalidation_mock.called is False
401 assert self.invalidation_mock.called is False
402 assert pull_request.merge_rev is None
402 assert pull_request.merge_rev is None
403
403
404 def test_get_commit_ids(self, pull_request):
404 def test_get_commit_ids(self, pull_request):
405 # The PR has been not merged yet, so expect an exception
405 # The PR has been not merged yet, so expect an exception
406 with pytest.raises(ValueError):
406 with pytest.raises(ValueError):
407 PullRequestModel()._get_commit_ids(pull_request)
407 PullRequestModel()._get_commit_ids(pull_request)
408
408
409 # Merge revision is in the revisions list
409 # Merge revision is in the revisions list
410 pull_request.merge_rev = pull_request.revisions[0]
410 pull_request.merge_rev = pull_request.revisions[0]
411 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
411 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
412 assert commit_ids == pull_request.revisions
412 assert commit_ids == pull_request.revisions
413
413
414 # Merge revision is not in the revisions list
414 # Merge revision is not in the revisions list
415 pull_request.merge_rev = 'f000' * 10
415 pull_request.merge_rev = 'f000' * 10
416 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
416 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
417 assert commit_ids == pull_request.revisions + [pull_request.merge_rev]
417 assert commit_ids == pull_request.revisions + [pull_request.merge_rev]
418
418
419 def test_get_diff_from_pr_version(self, pull_request):
419 def test_get_diff_from_pr_version(self, pull_request):
420 source_repo = pull_request.source_repo
420 source_repo = pull_request.source_repo
421 source_ref_id = pull_request.source_ref_parts.commit_id
421 source_ref_id = pull_request.source_ref_parts.commit_id
422 target_ref_id = pull_request.target_ref_parts.commit_id
422 target_ref_id = pull_request.target_ref_parts.commit_id
423 diff = PullRequestModel()._get_diff_from_pr_or_version(
423 diff = PullRequestModel()._get_diff_from_pr_or_version(
424 source_repo, source_ref_id, target_ref_id,
424 source_repo, source_ref_id, target_ref_id,
425 hide_whitespace_changes=False, diff_context=6)
425 hide_whitespace_changes=False, diff_context=6)
426 assert 'file_1' in diff.raw
426 assert 'file_1' in diff.raw
427
427
428 def test_generate_title_returns_unicode(self):
428 def test_generate_title_returns_unicode(self):
429 title = PullRequestModel().generate_pullrequest_title(
429 title = PullRequestModel().generate_pullrequest_title(
430 source='source-dummy',
430 source='source-dummy',
431 source_ref='source-ref-dummy',
431 source_ref='source-ref-dummy',
432 target='target-dummy',
432 target='target-dummy',
433 )
433 )
434 assert type(title) == unicode
434 assert type(title) == unicode
435
435
436 @pytest.mark.parametrize('title, has_wip', [
436 @pytest.mark.parametrize('title, has_wip', [
437 ('hello', False),
437 ('hello', False),
438 ('hello wip', False),
438 ('hello wip', False),
439 ('hello wip: xxx', False),
439 ('hello wip: xxx', False),
440 ('[wip] hello', True),
440 ('[wip] hello', True),
441 ('[wip] hello', True),
441 ('[wip] hello', True),
442 ('wip: hello', True),
442 ('wip: hello', True),
443 ('wip hello', True),
443 ('wip hello', True),
444
444
445 ])
445 ])
446 def test_wip_title_marker(self, pull_request, title, has_wip):
446 def test_wip_title_marker(self, pull_request, title, has_wip):
447 pull_request.title = title
447 pull_request.title = title
448 assert pull_request.work_in_progress == has_wip
448 assert pull_request.work_in_progress == has_wip
449
449
450
450
451 @pytest.mark.usefixtures('config_stub')
451 @pytest.mark.usefixtures('config_stub')
452 class TestIntegrationMerge(object):
452 class TestIntegrationMerge(object):
453 @pytest.mark.parametrize('extra_config', (
453 @pytest.mark.parametrize('extra_config', (
454 {'vcs.hooks.protocol': 'http', 'vcs.hooks.direct_calls': False},
454 {'vcs.hooks.protocol': 'http', 'vcs.hooks.direct_calls': False},
455 ))
455 ))
456 def test_merge_triggers_push_hooks(
456 def test_merge_triggers_push_hooks(
457 self, pr_util, user_admin, capture_rcextensions, merge_extras,
457 self, pr_util, user_admin, capture_rcextensions, merge_extras,
458 extra_config):
458 extra_config):
459
459
460 pull_request = pr_util.create_pull_request(
460 pull_request = pr_util.create_pull_request(
461 approved=True, mergeable=True)
461 approved=True, mergeable=True)
462 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
462 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
463 merge_extras['repository'] = pull_request.target_repo.repo_name
463 merge_extras['repository'] = pull_request.target_repo.repo_name
464 Session().commit()
464 Session().commit()
465
465
466 with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False):
466 with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False):
467 merge_state = PullRequestModel().merge_repo(
467 merge_state = PullRequestModel().merge_repo(
468 pull_request, user_admin, extras=merge_extras)
468 pull_request, user_admin, extras=merge_extras)
469 Session().commit()
469 Session().commit()
470
470
471 assert merge_state.executed
471 assert merge_state.executed
472 assert '_pre_push_hook' in capture_rcextensions
472 assert '_pre_push_hook' in capture_rcextensions
473 assert '_push_hook' in capture_rcextensions
473 assert '_push_hook' in capture_rcextensions
474
474
475 def test_merge_can_be_rejected_by_pre_push_hook(
475 def test_merge_can_be_rejected_by_pre_push_hook(
476 self, pr_util, user_admin, capture_rcextensions, merge_extras):
476 self, pr_util, user_admin, capture_rcextensions, merge_extras):
477 pull_request = pr_util.create_pull_request(
477 pull_request = pr_util.create_pull_request(
478 approved=True, mergeable=True)
478 approved=True, mergeable=True)
479 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
479 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
480 merge_extras['repository'] = pull_request.target_repo.repo_name
480 merge_extras['repository'] = pull_request.target_repo.repo_name
481 Session().commit()
481 Session().commit()
482
482
483 with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull:
483 with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull:
484 pre_pull.side_effect = RepositoryError("Disallow push!")
484 pre_pull.side_effect = RepositoryError("Disallow push!")
485 merge_status = PullRequestModel().merge_repo(
485 merge_status = PullRequestModel().merge_repo(
486 pull_request, user_admin, extras=merge_extras)
486 pull_request, user_admin, extras=merge_extras)
487 Session().commit()
487 Session().commit()
488
488
489 assert not merge_status.executed
489 assert not merge_status.executed
490 assert 'pre_push' not in capture_rcextensions
490 assert 'pre_push' not in capture_rcextensions
491 assert 'post_push' not in capture_rcextensions
491 assert 'post_push' not in capture_rcextensions
492
492
493 def test_merge_fails_if_target_is_locked(
493 def test_merge_fails_if_target_is_locked(
494 self, pr_util, user_regular, merge_extras):
494 self, pr_util, user_regular, merge_extras):
495 pull_request = pr_util.create_pull_request(
495 pull_request = pr_util.create_pull_request(
496 approved=True, mergeable=True)
496 approved=True, mergeable=True)
497 locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web']
497 locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web']
498 pull_request.target_repo.locked = locked_by
498 pull_request.target_repo.locked = locked_by
499 # TODO: johbo: Check if this can work based on the database, currently
499 # TODO: johbo: Check if this can work based on the database, currently
500 # all data is pre-computed, that's why just updating the DB is not
500 # all data is pre-computed, that's why just updating the DB is not
501 # enough.
501 # enough.
502 merge_extras['locked_by'] = locked_by
502 merge_extras['locked_by'] = locked_by
503 merge_extras['repository'] = pull_request.target_repo.repo_name
503 merge_extras['repository'] = pull_request.target_repo.repo_name
504 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
504 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
505 Session().commit()
505 Session().commit()
506 merge_status = PullRequestModel().merge_repo(
506 merge_status = PullRequestModel().merge_repo(
507 pull_request, user_regular, extras=merge_extras)
507 pull_request, user_regular, extras=merge_extras)
508 Session().commit()
508 Session().commit()
509
509
510 assert not merge_status.executed
510 assert not merge_status.executed
511
511
512
512
513 @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [
513 @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [
514 (False, 1, 0),
514 (False, 1, 0),
515 (True, 0, 1),
515 (True, 0, 1),
516 ])
516 ])
517 def test_outdated_comments(
517 def test_outdated_comments(
518 pr_util, use_outdated, inlines_count, outdated_count, config_stub):
518 pr_util, use_outdated, inlines_count, outdated_count, config_stub):
519 pull_request = pr_util.create_pull_request()
519 pull_request = pr_util.create_pull_request()
520 pr_util.create_inline_comment(file_path='not_in_updated_diff')
520 pr_util.create_inline_comment(file_path='not_in_updated_diff')
521
521
522 with outdated_comments_patcher(use_outdated) as outdated_comment_mock:
522 with outdated_comments_patcher(use_outdated) as outdated_comment_mock:
523 pr_util.add_one_commit()
523 pr_util.add_one_commit()
524 assert_inline_comments(
524 assert_inline_comments(
525 pull_request, visible=inlines_count, outdated=outdated_count)
525 pull_request, visible=inlines_count, outdated=outdated_count)
526 outdated_comment_mock.assert_called_with(pull_request)
526 outdated_comment_mock.assert_called_with(pull_request)
527
527
528
528
529 @pytest.mark.parametrize('mr_type, expected_msg', [
529 @pytest.mark.parametrize('mr_type, expected_msg', [
530 (MergeFailureReason.NONE,
530 (MergeFailureReason.NONE,
531 'This pull request can be automatically merged.'),
531 'This pull request can be automatically merged.'),
532 (MergeFailureReason.UNKNOWN,
532 (MergeFailureReason.UNKNOWN,
533 'This pull request cannot be merged because of an unhandled exception. CRASH'),
533 'This pull request cannot be merged because of an unhandled exception. CRASH'),
534 (MergeFailureReason.MERGE_FAILED,
534 (MergeFailureReason.MERGE_FAILED,
535 'This pull request cannot be merged because of merge conflicts. CONFLICT_FILE'),
535 'This pull request cannot be merged because of merge conflicts. CONFLICT_FILE'),
536 (MergeFailureReason.PUSH_FAILED,
536 (MergeFailureReason.PUSH_FAILED,
537 'This pull request could not be merged because push to target:`some-repo@merge_commit` failed.'),
537 'This pull request could not be merged because push to target:`some-repo@merge_commit` failed.'),
538 (MergeFailureReason.TARGET_IS_NOT_HEAD,
538 (MergeFailureReason.TARGET_IS_NOT_HEAD,
539 'This pull request cannot be merged because the target `ref_name` is not a head.'),
539 'This pull request cannot be merged because the target `ref_name` is not a head.'),
540 (MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES,
540 (MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES,
541 'This pull request cannot be merged because the source contains more branches than the target.'),
541 'This pull request cannot be merged because the source contains more branches than the target.'),
542 (MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
542 (MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
543 'This pull request cannot be merged because the target `ref_name` has multiple heads: `a,b,c`.'),
543 'This pull request cannot be merged because the target `ref_name` has multiple heads: `a,b,c`.'),
544 (MergeFailureReason.TARGET_IS_LOCKED,
544 (MergeFailureReason.TARGET_IS_LOCKED,
545 'This pull request cannot be merged because the target repository is locked by user:123.'),
545 'This pull request cannot be merged because the target repository is locked by user:123.'),
546 (MergeFailureReason.MISSING_TARGET_REF,
546 (MergeFailureReason.MISSING_TARGET_REF,
547 'This pull request cannot be merged because the target reference `ref_name` is missing.'),
547 'This pull request cannot be merged because the target reference `ref_name` is missing.'),
548 (MergeFailureReason.MISSING_SOURCE_REF,
548 (MergeFailureReason.MISSING_SOURCE_REF,
549 'This pull request cannot be merged because the source reference `ref_name` is missing.'),
549 'This pull request cannot be merged because the source reference `ref_name` is missing.'),
550 (MergeFailureReason.SUBREPO_MERGE_FAILED,
550 (MergeFailureReason.SUBREPO_MERGE_FAILED,
551 'This pull request cannot be merged because of conflicts related to sub repositories.'),
551 'This pull request cannot be merged because of conflicts related to sub repositories.'),
552
552
553 ])
553 ])
554 def test_merge_response_message(mr_type, expected_msg):
554 def test_merge_response_message(mr_type, expected_msg):
555 merge_ref = Reference('type', 'ref_name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
555 merge_ref = Reference('type', 'ref_name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
556 metadata = {
556 metadata = {
557 'unresolved_files': 'CONFLICT_FILE',
557 'unresolved_files': 'CONFLICT_FILE',
558 'exception': "CRASH",
558 'exception': "CRASH",
559 'target': 'some-repo',
559 'target': 'some-repo',
560 'merge_commit': 'merge_commit',
560 'merge_commit': 'merge_commit',
561 'target_ref': merge_ref,
561 'target_ref': merge_ref,
562 'source_ref': merge_ref,
562 'source_ref': merge_ref,
563 'heads': ','.join(['a', 'b', 'c']),
563 'heads': ','.join(['a', 'b', 'c']),
564 'locked_by': 'user:123'
564 'locked_by': 'user:123'
565 }
565 }
566
566
567 merge_response = MergeResponse(True, True, merge_ref, mr_type, metadata=metadata)
567 merge_response = MergeResponse(True, True, merge_ref, mr_type, metadata=metadata)
568 assert merge_response.merge_status_message == expected_msg
568 assert merge_response.merge_status_message == expected_msg
569
569
570
570
571 @pytest.fixture()
571 @pytest.fixture()
572 def merge_extras(user_regular):
572 def merge_extras(user_regular):
573 """
573 """
574 Context for the vcs operation when running a merge.
574 Context for the vcs operation when running a merge.
575 """
575 """
576 extras = {
576 extras = {
577 'ip': '127.0.0.1',
577 'ip': '127.0.0.1',
578 'username': user_regular.username,
578 'username': user_regular.username,
579 'user_id': user_regular.user_id,
579 'user_id': user_regular.user_id,
580 'action': 'push',
580 'action': 'push',
581 'repository': 'fake_target_repo_name',
581 'repository': 'fake_target_repo_name',
582 'scm': 'git',
582 'scm': 'git',
583 'config': 'fake_config_ini_path',
583 'config': 'fake_config_ini_path',
584 'repo_store': '',
584 'repo_store': '',
585 'make_lock': None,
585 'make_lock': None,
586 'locked_by': [None, None, None],
586 'locked_by': [None, None, None],
587 'server_url': 'http://test.example.com:5000',
587 'server_url': 'http://test.example.com:5000',
588 'hooks': ['push', 'pull'],
588 'hooks': ['push', 'pull'],
589 'is_shadow_repo': False,
589 'is_shadow_repo': False,
590 }
590 }
591 return extras
591 return extras
592
592
593
593
594 @pytest.mark.usefixtures('config_stub')
594 @pytest.mark.usefixtures('config_stub')
595 class TestUpdateCommentHandling(object):
595 class TestUpdateCommentHandling(object):
596
596
597 @pytest.fixture(autouse=True, scope='class')
597 @pytest.fixture(autouse=True, scope='class')
598 def enable_outdated_comments(self, request, baseapp):
598 def enable_outdated_comments(self, request, baseapp):
599 config_patch = mock.patch.dict(
599 config_patch = mock.patch.dict(
600 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True})
600 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True})
601 config_patch.start()
601 config_patch.start()
602
602
603 @request.addfinalizer
603 @request.addfinalizer
604 def cleanup():
604 def cleanup():
605 config_patch.stop()
605 config_patch.stop()
606
606
607 def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util):
607 def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util):
608 commits = [
608 commits = [
609 {'message': 'a'},
609 {'message': 'a'},
610 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
610 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
611 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
611 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
612 ]
612 ]
613 pull_request = pr_util.create_pull_request(
613 pull_request = pr_util.create_pull_request(
614 commits=commits, target_head='a', source_head='b', revisions=['b'])
614 commits=commits, target_head='a', source_head='b', revisions=['b'])
615 pr_util.create_inline_comment(file_path='file_b')
615 pr_util.create_inline_comment(file_path='file_b')
616 pr_util.add_one_commit(head='c')
616 pr_util.add_one_commit(head='c')
617
617
618 assert_inline_comments(pull_request, visible=1, outdated=0)
618 assert_inline_comments(pull_request, visible=1, outdated=0)
619
619
620 def test_comment_stays_unflagged_on_change_above(self, pr_util):
620 def test_comment_stays_unflagged_on_change_above(self, pr_util):
621 original_content = ''.join(
621 original_content = ''.join(
622 ['line {}\n'.format(x) for x in range(1, 11)])
622 ['line {}\n'.format(x) for x in range(1, 11)])
623 updated_content = 'new_line_at_top\n' + original_content
623 updated_content = 'new_line_at_top\n' + original_content
624 commits = [
624 commits = [
625 {'message': 'a'},
625 {'message': 'a'},
626 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
626 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
627 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
627 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
628 ]
628 ]
629 pull_request = pr_util.create_pull_request(
629 pull_request = pr_util.create_pull_request(
630 commits=commits, target_head='a', source_head='b', revisions=['b'])
630 commits=commits, target_head='a', source_head='b', revisions=['b'])
631
631
632 with outdated_comments_patcher():
632 with outdated_comments_patcher():
633 comment = pr_util.create_inline_comment(
633 comment = pr_util.create_inline_comment(
634 line_no=u'n8', file_path='file_b')
634 line_no=u'n8', file_path='file_b')
635 pr_util.add_one_commit(head='c')
635 pr_util.add_one_commit(head='c')
636
636
637 assert_inline_comments(pull_request, visible=1, outdated=0)
637 assert_inline_comments(pull_request, visible=1, outdated=0)
638 assert comment.line_no == u'n9'
638 assert comment.line_no == u'n9'
639
639
640 def test_comment_stays_unflagged_on_change_below(self, pr_util):
640 def test_comment_stays_unflagged_on_change_below(self, pr_util):
641 original_content = ''.join(['line {}\n'.format(x) for x in range(10)])
641 original_content = ''.join(['line {}\n'.format(x) for x in range(10)])
642 updated_content = original_content + 'new_line_at_end\n'
642 updated_content = original_content + 'new_line_at_end\n'
643 commits = [
643 commits = [
644 {'message': 'a'},
644 {'message': 'a'},
645 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
645 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
646 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
646 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
647 ]
647 ]
648 pull_request = pr_util.create_pull_request(
648 pull_request = pr_util.create_pull_request(
649 commits=commits, target_head='a', source_head='b', revisions=['b'])
649 commits=commits, target_head='a', source_head='b', revisions=['b'])
650 pr_util.create_inline_comment(file_path='file_b')
650 pr_util.create_inline_comment(file_path='file_b')
651 pr_util.add_one_commit(head='c')
651 pr_util.add_one_commit(head='c')
652
652
653 assert_inline_comments(pull_request, visible=1, outdated=0)
653 assert_inline_comments(pull_request, visible=1, outdated=0)
654
654
655 @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9'])
655 @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9'])
656 def test_comment_flagged_on_change_around_context(self, pr_util, line_no):
656 def test_comment_flagged_on_change_around_context(self, pr_util, line_no):
657 base_lines = ['line {}\n'.format(x) for x in range(1, 13)]
657 base_lines = ['line {}\n'.format(x) for x in range(1, 13)]
658 change_lines = list(base_lines)
658 change_lines = list(base_lines)
659 change_lines.insert(6, 'line 6a added\n')
659 change_lines.insert(6, 'line 6a added\n')
660
660
661 # Changes on the last line of sight
661 # Changes on the last line of sight
662 update_lines = list(change_lines)
662 update_lines = list(change_lines)
663 update_lines[0] = 'line 1 changed\n'
663 update_lines[0] = 'line 1 changed\n'
664 update_lines[-1] = 'line 12 changed\n'
664 update_lines[-1] = 'line 12 changed\n'
665
665
666 def file_b(lines):
666 def file_b(lines):
667 return FileNode('file_b', ''.join(lines))
667 return FileNode('file_b', ''.join(lines))
668
668
669 commits = [
669 commits = [
670 {'message': 'a', 'added': [file_b(base_lines)]},
670 {'message': 'a', 'added': [file_b(base_lines)]},
671 {'message': 'b', 'changed': [file_b(change_lines)]},
671 {'message': 'b', 'changed': [file_b(change_lines)]},
672 {'message': 'c', 'changed': [file_b(update_lines)]},
672 {'message': 'c', 'changed': [file_b(update_lines)]},
673 ]
673 ]
674
674
675 pull_request = pr_util.create_pull_request(
675 pull_request = pr_util.create_pull_request(
676 commits=commits, target_head='a', source_head='b', revisions=['b'])
676 commits=commits, target_head='a', source_head='b', revisions=['b'])
677 pr_util.create_inline_comment(line_no=line_no, file_path='file_b')
677 pr_util.create_inline_comment(line_no=line_no, file_path='file_b')
678
678
679 with outdated_comments_patcher():
679 with outdated_comments_patcher():
680 pr_util.add_one_commit(head='c')
680 pr_util.add_one_commit(head='c')
681 assert_inline_comments(pull_request, visible=0, outdated=1)
681 assert_inline_comments(pull_request, visible=0, outdated=1)
682
682
683 @pytest.mark.parametrize("change, content", [
683 @pytest.mark.parametrize("change, content", [
684 ('changed', 'changed\n'),
684 ('changed', 'changed\n'),
685 ('removed', ''),
685 ('removed', ''),
686 ], ids=['changed', 'removed'])
686 ], ids=['changed', 'removed'])
687 def test_comment_flagged_on_change(self, pr_util, change, content):
687 def test_comment_flagged_on_change(self, pr_util, change, content):
688 commits = [
688 commits = [
689 {'message': 'a'},
689 {'message': 'a'},
690 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
690 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
691 {'message': 'c', change: [FileNode('file_b', content)]},
691 {'message': 'c', change: [FileNode('file_b', content)]},
692 ]
692 ]
693 pull_request = pr_util.create_pull_request(
693 pull_request = pr_util.create_pull_request(
694 commits=commits, target_head='a', source_head='b', revisions=['b'])
694 commits=commits, target_head='a', source_head='b', revisions=['b'])
695 pr_util.create_inline_comment(file_path='file_b')
695 pr_util.create_inline_comment(file_path='file_b')
696
696
697 with outdated_comments_patcher():
697 with outdated_comments_patcher():
698 pr_util.add_one_commit(head='c')
698 pr_util.add_one_commit(head='c')
699 assert_inline_comments(pull_request, visible=0, outdated=1)
699 assert_inline_comments(pull_request, visible=0, outdated=1)
700
700
701
701
702 @pytest.mark.usefixtures('config_stub')
702 @pytest.mark.usefixtures('config_stub')
703 class TestUpdateChangedFiles(object):
703 class TestUpdateChangedFiles(object):
704
704
705 def test_no_changes_on_unchanged_diff(self, pr_util):
705 def test_no_changes_on_unchanged_diff(self, pr_util):
706 commits = [
706 commits = [
707 {'message': 'a'},
707 {'message': 'a'},
708 {'message': 'b',
708 {'message': 'b',
709 'added': [FileNode('file_b', 'test_content b\n')]},
709 'added': [FileNode('file_b', 'test_content b\n')]},
710 {'message': 'c',
710 {'message': 'c',
711 'added': [FileNode('file_c', 'test_content c\n')]},
711 'added': [FileNode('file_c', 'test_content c\n')]},
712 ]
712 ]
713 # open a PR from a to b, adding file_b
713 # open a PR from a to b, adding file_b
714 pull_request = pr_util.create_pull_request(
714 pull_request = pr_util.create_pull_request(
715 commits=commits, target_head='a', source_head='b', revisions=['b'],
715 commits=commits, target_head='a', source_head='b', revisions=['b'],
716 name_suffix='per-file-review')
716 name_suffix='per-file-review')
717
717
718 # modify PR adding new file file_c
718 # modify PR adding new file file_c
719 pr_util.add_one_commit(head='c')
719 pr_util.add_one_commit(head='c')
720
720
721 assert_pr_file_changes(
721 assert_pr_file_changes(
722 pull_request,
722 pull_request,
723 added=['file_c'],
723 added=['file_c'],
724 modified=[],
724 modified=[],
725 removed=[])
725 removed=[])
726
726
727 def test_modify_and_undo_modification_diff(self, pr_util):
727 def test_modify_and_undo_modification_diff(self, pr_util):
728 commits = [
728 commits = [
729 {'message': 'a'},
729 {'message': 'a'},
730 {'message': 'b',
730 {'message': 'b',
731 'added': [FileNode('file_b', 'test_content b\n')]},
731 'added': [FileNode('file_b', 'test_content b\n')]},
732 {'message': 'c',
732 {'message': 'c',
733 'changed': [FileNode('file_b', 'test_content b modified\n')]},
733 'changed': [FileNode('file_b', 'test_content b modified\n')]},
734 {'message': 'd',
734 {'message': 'd',
735 'changed': [FileNode('file_b', 'test_content b\n')]},
735 'changed': [FileNode('file_b', 'test_content b\n')]},
736 ]
736 ]
737 # open a PR from a to b, adding file_b
737 # open a PR from a to b, adding file_b
738 pull_request = pr_util.create_pull_request(
738 pull_request = pr_util.create_pull_request(
739 commits=commits, target_head='a', source_head='b', revisions=['b'],
739 commits=commits, target_head='a', source_head='b', revisions=['b'],
740 name_suffix='per-file-review')
740 name_suffix='per-file-review')
741
741
742 # modify PR modifying file file_b
742 # modify PR modifying file file_b
743 pr_util.add_one_commit(head='c')
743 pr_util.add_one_commit(head='c')
744
744
745 assert_pr_file_changes(
745 assert_pr_file_changes(
746 pull_request,
746 pull_request,
747 added=[],
747 added=[],
748 modified=['file_b'],
748 modified=['file_b'],
749 removed=[])
749 removed=[])
750
750
751 # move the head again to d, which rollbacks change,
751 # move the head again to d, which rollbacks change,
752 # meaning we should indicate no changes
752 # meaning we should indicate no changes
753 pr_util.add_one_commit(head='d')
753 pr_util.add_one_commit(head='d')
754
754
755 assert_pr_file_changes(
755 assert_pr_file_changes(
756 pull_request,
756 pull_request,
757 added=[],
757 added=[],
758 modified=[],
758 modified=[],
759 removed=[])
759 removed=[])
760
760
761 def test_updated_all_files_in_pr(self, pr_util):
761 def test_updated_all_files_in_pr(self, pr_util):
762 commits = [
762 commits = [
763 {'message': 'a'},
763 {'message': 'a'},
764 {'message': 'b', 'added': [
764 {'message': 'b', 'added': [
765 FileNode('file_a', 'test_content a\n'),
765 FileNode('file_a', 'test_content a\n'),
766 FileNode('file_b', 'test_content b\n'),
766 FileNode('file_b', 'test_content b\n'),
767 FileNode('file_c', 'test_content c\n')]},
767 FileNode('file_c', 'test_content c\n')]},
768 {'message': 'c', 'changed': [
768 {'message': 'c', 'changed': [
769 FileNode('file_a', 'test_content a changed\n'),
769 FileNode('file_a', 'test_content a changed\n'),
770 FileNode('file_b', 'test_content b changed\n'),
770 FileNode('file_b', 'test_content b changed\n'),
771 FileNode('file_c', 'test_content c changed\n')]},
771 FileNode('file_c', 'test_content c changed\n')]},
772 ]
772 ]
773 # open a PR from a to b, changing 3 files
773 # open a PR from a to b, changing 3 files
774 pull_request = pr_util.create_pull_request(
774 pull_request = pr_util.create_pull_request(
775 commits=commits, target_head='a', source_head='b', revisions=['b'],
775 commits=commits, target_head='a', source_head='b', revisions=['b'],
776 name_suffix='per-file-review')
776 name_suffix='per-file-review')
777
777
778 pr_util.add_one_commit(head='c')
778 pr_util.add_one_commit(head='c')
779
779
780 assert_pr_file_changes(
780 assert_pr_file_changes(
781 pull_request,
781 pull_request,
782 added=[],
782 added=[],
783 modified=['file_a', 'file_b', 'file_c'],
783 modified=['file_a', 'file_b', 'file_c'],
784 removed=[])
784 removed=[])
785
785
786 def test_updated_and_removed_all_files_in_pr(self, pr_util):
786 def test_updated_and_removed_all_files_in_pr(self, pr_util):
787 commits = [
787 commits = [
788 {'message': 'a'},
788 {'message': 'a'},
789 {'message': 'b', 'added': [
789 {'message': 'b', 'added': [
790 FileNode('file_a', 'test_content a\n'),
790 FileNode('file_a', 'test_content a\n'),
791 FileNode('file_b', 'test_content b\n'),
791 FileNode('file_b', 'test_content b\n'),
792 FileNode('file_c', 'test_content c\n')]},
792 FileNode('file_c', 'test_content c\n')]},
793 {'message': 'c', 'removed': [
793 {'message': 'c', 'removed': [
794 FileNode('file_a', 'test_content a changed\n'),
794 FileNode('file_a', 'test_content a changed\n'),
795 FileNode('file_b', 'test_content b changed\n'),
795 FileNode('file_b', 'test_content b changed\n'),
796 FileNode('file_c', 'test_content c changed\n')]},
796 FileNode('file_c', 'test_content c changed\n')]},
797 ]
797 ]
798 # open a PR from a to b, removing 3 files
798 # open a PR from a to b, removing 3 files
799 pull_request = pr_util.create_pull_request(
799 pull_request = pr_util.create_pull_request(
800 commits=commits, target_head='a', source_head='b', revisions=['b'],
800 commits=commits, target_head='a', source_head='b', revisions=['b'],
801 name_suffix='per-file-review')
801 name_suffix='per-file-review')
802
802
803 pr_util.add_one_commit(head='c')
803 pr_util.add_one_commit(head='c')
804
804
805 assert_pr_file_changes(
805 assert_pr_file_changes(
806 pull_request,
806 pull_request,
807 added=[],
807 added=[],
808 modified=[],
808 modified=[],
809 removed=['file_a', 'file_b', 'file_c'])
809 removed=['file_a', 'file_b', 'file_c'])
810
810
811
811
812 def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub):
812 def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub):
813 model = PullRequestModel()
813 model = PullRequestModel()
814 pull_request = pr_util.create_pull_request()
814 pull_request = pr_util.create_pull_request()
815 pr_util.update_source_repository()
815 pr_util.update_source_repository()
816
816
817 model.update_commits(pull_request, pull_request.author)
817 model.update_commits(pull_request, pull_request.author)
818
818
819 # Expect that it has a version entry now
819 # Expect that it has a version entry now
820 assert len(model.get_versions(pull_request)) == 1
820 assert len(model.get_versions(pull_request)) == 1
821
821
822
822
823 def test_update_skips_new_version_if_unchanged(pr_util, config_stub):
823 def test_update_skips_new_version_if_unchanged(pr_util, config_stub):
824 pull_request = pr_util.create_pull_request()
824 pull_request = pr_util.create_pull_request()
825 model = PullRequestModel()
825 model = PullRequestModel()
826 model.update_commits(pull_request, pull_request.author)
826 model.update_commits(pull_request, pull_request.author)
827
827
828 # Expect that it still has no versions
828 # Expect that it still has no versions
829 assert len(model.get_versions(pull_request)) == 0
829 assert len(model.get_versions(pull_request)) == 0
830
830
831
831
832 def test_update_assigns_comments_to_the_new_version(pr_util, config_stub):
832 def test_update_assigns_comments_to_the_new_version(pr_util, config_stub):
833 model = PullRequestModel()
833 model = PullRequestModel()
834 pull_request = pr_util.create_pull_request()
834 pull_request = pr_util.create_pull_request()
835 comment = pr_util.create_comment()
835 comment = pr_util.create_comment()
836 pr_util.update_source_repository()
836 pr_util.update_source_repository()
837
837
838 model.update_commits(pull_request, pull_request.author)
838 model.update_commits(pull_request, pull_request.author)
839
839
840 # Expect that the comment is linked to the pr version now
840 # Expect that the comment is linked to the pr version now
841 assert comment.pull_request_version == model.get_versions(pull_request)[0]
841 assert comment.pull_request_version == model.get_versions(pull_request)[0]
842
842
843
843
844 def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util, config_stub):
844 def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util, config_stub):
845 model = PullRequestModel()
845 model = PullRequestModel()
846 pull_request = pr_util.create_pull_request()
846 pull_request = pr_util.create_pull_request()
847 pr_util.update_source_repository()
847 pr_util.update_source_repository()
848 pr_util.update_source_repository()
848 pr_util.update_source_repository()
849
849
850 update_response = model.update_commits(pull_request, pull_request.author)
850 update_response = model.update_commits(pull_request, pull_request.author)
851
851
852 commit_id = update_response.common_ancestor_id
852 commit_id = update_response.common_ancestor_id
853 # Expect to find a new comment about the change
853 # Expect to find a new comment about the change
854 expected_message = textwrap.dedent(
854 expected_message = textwrap.dedent(
855 """\
855 """\
856 Pull request updated. Auto status change to |under_review|
856 Pull request updated. Auto status change to |under_review|
857
857
858 .. role:: added
858 .. role:: added
859 .. role:: removed
859 .. role:: removed
860 .. parsed-literal::
860 .. parsed-literal::
861
861
862 Changed commits:
862 Changed commits:
863 * :added:`1 added`
863 * :added:`1 added`
864 * :removed:`0 removed`
864 * :removed:`0 removed`
865
865
866 Changed files:
866 Changed files:
867 * `A file_2 <#a_c-{}-92ed3b5f07b4>`_
867 * `A file_2 <#a_c-{}-92ed3b5f07b4>`_
868
868
869 .. |under_review| replace:: *"Under Review"*"""
869 .. |under_review| replace:: *"Under Review"*"""
870 ).format(commit_id[:12])
870 ).format(commit_id[:12])
871 pull_request_comments = sorted(
871 pull_request_comments = sorted(
872 pull_request.comments, key=lambda c: c.modified_at)
872 pull_request.comments, key=lambda c: c.modified_at)
873 update_comment = pull_request_comments[-1]
873 update_comment = pull_request_comments[-1]
874 assert update_comment.text == expected_message
874 assert update_comment.text == expected_message
875
875
876
876
877 def test_create_version_from_snapshot_updates_attributes(pr_util, config_stub):
877 def test_create_version_from_snapshot_updates_attributes(pr_util, config_stub):
878 pull_request = pr_util.create_pull_request()
878 pull_request = pr_util.create_pull_request()
879
879
880 # Avoiding default values
880 # Avoiding default values
881 pull_request.status = PullRequest.STATUS_CLOSED
881 pull_request.status = PullRequest.STATUS_CLOSED
882 pull_request._last_merge_source_rev = "0" * 40
882 pull_request._last_merge_source_rev = "0" * 40
883 pull_request._last_merge_target_rev = "1" * 40
883 pull_request._last_merge_target_rev = "1" * 40
884 pull_request.last_merge_status = 1
884 pull_request.last_merge_status = 1
885 pull_request.merge_rev = "2" * 40
885 pull_request.merge_rev = "2" * 40
886
886
887 # Remember automatic values
887 # Remember automatic values
888 created_on = pull_request.created_on
888 created_on = pull_request.created_on
889 updated_on = pull_request.updated_on
889 updated_on = pull_request.updated_on
890
890
891 # Create a new version of the pull request
891 # Create a new version of the pull request
892 version = PullRequestModel()._create_version_from_snapshot(pull_request)
892 version = PullRequestModel()._create_version_from_snapshot(pull_request)
893
893
894 # Check attributes
894 # Check attributes
895 assert version.title == pr_util.create_parameters['title']
895 assert version.title == pr_util.create_parameters['title']
896 assert version.description == pr_util.create_parameters['description']
896 assert version.description == pr_util.create_parameters['description']
897 assert version.status == PullRequest.STATUS_CLOSED
897 assert version.status == PullRequest.STATUS_CLOSED
898
898
899 # versions get updated created_on
899 # versions get updated created_on
900 assert version.created_on != created_on
900 assert version.created_on != created_on
901
901
902 assert version.updated_on == updated_on
902 assert version.updated_on == updated_on
903 assert version.user_id == pull_request.user_id
903 assert version.user_id == pull_request.user_id
904 assert version.revisions == pr_util.create_parameters['revisions']
904 assert version.revisions == pr_util.create_parameters['revisions']
905 assert version.source_repo == pr_util.source_repository
905 assert version.source_repo == pr_util.source_repository
906 assert version.source_ref == pr_util.create_parameters['source_ref']
906 assert version.source_ref == pr_util.create_parameters['source_ref']
907 assert version.target_repo == pr_util.target_repository
907 assert version.target_repo == pr_util.target_repository
908 assert version.target_ref == pr_util.create_parameters['target_ref']
908 assert version.target_ref == pr_util.create_parameters['target_ref']
909 assert version._last_merge_source_rev == pull_request._last_merge_source_rev
909 assert version._last_merge_source_rev == pull_request._last_merge_source_rev
910 assert version._last_merge_target_rev == pull_request._last_merge_target_rev
910 assert version._last_merge_target_rev == pull_request._last_merge_target_rev
911 assert version.last_merge_status == pull_request.last_merge_status
911 assert version.last_merge_status == pull_request.last_merge_status
912 assert version.merge_rev == pull_request.merge_rev
912 assert version.merge_rev == pull_request.merge_rev
913 assert version.pull_request == pull_request
913 assert version.pull_request == pull_request
914
914
915
915
916 def test_link_comments_to_version_only_updates_unlinked_comments(pr_util, config_stub):
916 def test_link_comments_to_version_only_updates_unlinked_comments(pr_util, config_stub):
917 version1 = pr_util.create_version_of_pull_request()
917 version1 = pr_util.create_version_of_pull_request()
918 comment_linked = pr_util.create_comment(linked_to=version1)
918 comment_linked = pr_util.create_comment(linked_to=version1)
919 comment_unlinked = pr_util.create_comment()
919 comment_unlinked = pr_util.create_comment()
920 version2 = pr_util.create_version_of_pull_request()
920 version2 = pr_util.create_version_of_pull_request()
921
921
922 PullRequestModel()._link_comments_to_version(version2)
922 PullRequestModel()._link_comments_to_version(version2)
923 Session().commit()
923 Session().commit()
924
924
925 # Expect that only the new comment is linked to version2
925 # Expect that only the new comment is linked to version2
926 assert (
926 assert (
927 comment_unlinked.pull_request_version_id ==
927 comment_unlinked.pull_request_version_id ==
928 version2.pull_request_version_id)
928 version2.pull_request_version_id)
929 assert (
929 assert (
930 comment_linked.pull_request_version_id ==
930 comment_linked.pull_request_version_id ==
931 version1.pull_request_version_id)
931 version1.pull_request_version_id)
932 assert (
932 assert (
933 comment_unlinked.pull_request_version_id !=
933 comment_unlinked.pull_request_version_id !=
934 comment_linked.pull_request_version_id)
934 comment_linked.pull_request_version_id)
935
935
936
936
937 def test_calculate_commits():
937 def test_calculate_commits():
938 old_ids = [1, 2, 3]
938 old_ids = [1, 2, 3]
939 new_ids = [1, 3, 4, 5]
939 new_ids = [1, 3, 4, 5]
940 change = PullRequestModel()._calculate_commit_id_changes(old_ids, new_ids)
940 change = PullRequestModel()._calculate_commit_id_changes(old_ids, new_ids)
941 assert change.added == [4, 5]
941 assert change.added == [4, 5]
942 assert change.common == [1, 3]
942 assert change.common == [1, 3]
943 assert change.removed == [2]
943 assert change.removed == [2]
944 assert change.total == [1, 3, 4, 5]
944 assert change.total == [1, 3, 4, 5]
945
945
946
946
947 def assert_inline_comments(pull_request, visible=None, outdated=None):
947 def assert_inline_comments(pull_request, visible=None, outdated=None):
948 if visible is not None:
948 if visible is not None:
949 inline_comments = CommentsModel().get_inline_comments(
949 inline_comments = CommentsModel().get_inline_comments(
950 pull_request.target_repo.repo_id, pull_request=pull_request)
950 pull_request.target_repo.repo_id, pull_request=pull_request)
951 inline_cnt = CommentsModel().get_inline_comments_count(
951 inline_cnt = CommentsModel().get_inline_comments_count(
952 inline_comments)
952 inline_comments)
953 assert inline_cnt == visible
953 assert inline_cnt == visible
954 if outdated is not None:
954 if outdated is not None:
955 outdated_comments = CommentsModel().get_outdated_comments(
955 outdated_comments = CommentsModel().get_outdated_comments(
956 pull_request.target_repo.repo_id, pull_request)
956 pull_request.target_repo.repo_id, pull_request)
957 assert len(outdated_comments) == outdated
957 assert len(outdated_comments) == outdated
958
958
959
959
960 def assert_pr_file_changes(
960 def assert_pr_file_changes(
961 pull_request, added=None, modified=None, removed=None):
961 pull_request, added=None, modified=None, removed=None):
962 pr_versions = PullRequestModel().get_versions(pull_request)
962 pr_versions = PullRequestModel().get_versions(pull_request)
963 # always use first version, ie original PR to calculate changes
963 # always use first version, ie original PR to calculate changes
964 pull_request_version = pr_versions[0]
964 pull_request_version = pr_versions[0]
965 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(
965 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(
966 pull_request, pull_request_version)
966 pull_request, pull_request_version)
967 file_changes = PullRequestModel()._calculate_file_changes(
967 file_changes = PullRequestModel()._calculate_file_changes(
968 old_diff_data, new_diff_data)
968 old_diff_data, new_diff_data)
969
969
970 assert added == file_changes.added, \
970 assert added == file_changes.added, \
971 'expected added:%s vs value:%s' % (added, file_changes.added)
971 'expected added:%s vs value:%s' % (added, file_changes.added)
972 assert modified == file_changes.modified, \
972 assert modified == file_changes.modified, \
973 'expected modified:%s vs value:%s' % (modified, file_changes.modified)
973 'expected modified:%s vs value:%s' % (modified, file_changes.modified)
974 assert removed == file_changes.removed, \
974 assert removed == file_changes.removed, \
975 'expected removed:%s vs value:%s' % (removed, file_changes.removed)
975 'expected removed:%s vs value:%s' % (removed, file_changes.removed)
976
976
977
977
978 def outdated_comments_patcher(use_outdated=True):
978 def outdated_comments_patcher(use_outdated=True):
979 return mock.patch.object(
979 return mock.patch.object(
980 CommentsModel, 'use_outdated_comments',
980 CommentsModel, 'use_outdated_comments',
981 return_value=use_outdated)
981 return_value=use_outdated)
General Comments 0
You need to be logged in to leave comments. Login now