##// END OF EJS Templates
tests: fix cache problems after empty repo check change.
marcink -
r3738:b8214661 new-ui
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,137 +1,143 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 import pytest
22 import pytest
23
23
24 from rhodecode.model.meta import Session
24 from rhodecode.model.meta import Session
25 from rhodecode.model.repo import RepoModel
25 from rhodecode.model.repo import RepoModel
26 from rhodecode.model.user import UserModel
26 from rhodecode.model.user import UserModel
27 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
27 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
28 from rhodecode.api.tests.utils import (
28 from rhodecode.api.tests.utils import (
29 build_data, api_call, assert_ok, assert_error, expected_permissions)
29 build_data, api_call, assert_ok, assert_error, expected_permissions)
30
30
31
31
32 @pytest.mark.usefixtures("testuser_api", "app")
32 @pytest.mark.usefixtures("testuser_api", "app")
33 class TestGetRepo(object):
33 class TestGetRepo(object):
34 @pytest.mark.parametrize("apikey_attr, expect_secrets", [
34 @pytest.mark.parametrize("apikey_attr, expect_secrets", [
35 ('apikey', True),
35 ('apikey', True),
36 ('apikey_regular', False),
36 ('apikey_regular', False),
37 ])
37 ])
38 @pytest.mark.parametrize("cache_param", [
38 @pytest.mark.parametrize("cache_param", [
39 True,
39 True,
40 False,
40 False,
41 None,
41 None,
42 ])
42 ])
43 def test_api_get_repo(
43 def test_api_get_repo(
44 self, apikey_attr, expect_secrets, cache_param, backend,
44 self, apikey_attr, expect_secrets, cache_param, backend,
45 user_util):
45 user_util):
46 repo = backend.create_repo()
46 repo = backend.create_repo()
47 repo_id = repo.repo_id
47 usr = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
48 usr = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
48 group = user_util.create_user_group(members=[usr])
49 group = user_util.create_user_group(members=[usr])
49 user_util.grant_user_group_permission_to_repo(
50 user_util.grant_user_group_permission_to_repo(
50 repo=repo, user_group=group, permission_name='repository.read')
51 repo=repo, user_group=group, permission_name='repository.read')
51 Session().commit()
52 Session().commit()
52 kwargs = {
53 kwargs = {
53 'repoid': repo.repo_name,
54 'repoid': repo.repo_name,
54 }
55 }
55 if cache_param is not None:
56 if cache_param is not None:
56 kwargs['cache'] = cache_param
57 kwargs['cache'] = cache_param
57
58
58 apikey = getattr(self, apikey_attr)
59 apikey = getattr(self, apikey_attr)
59 id_, params = build_data(apikey, 'get_repo', **kwargs)
60 id_, params = build_data(apikey, 'get_repo', **kwargs)
60 response = api_call(self.app, params)
61 response = api_call(self.app, params)
61
62
62 ret = repo.get_api_data()
63 ret = repo.get_api_data()
63
64
64 permissions = expected_permissions(repo)
65 permissions = expected_permissions(repo)
65
66
66 followers = []
67 followers = []
68
69 repo = RepoModel().get(repo_id)
67 for user in repo.followers:
70 for user in repo.followers:
68 followers.append(user.user.get_api_data(
71 followers.append(user.user.get_api_data(
69 include_secrets=expect_secrets))
72 include_secrets=expect_secrets))
70
73
71 ret['permissions'] = permissions
74 ret['permissions'] = permissions
72 ret['followers'] = followers
75 ret['followers'] = followers
73
76
74 expected = ret
77 expected = ret
75
78
76 assert_ok(id_, expected, given=response.body)
79 assert_ok(id_, expected, given=response.body)
77
80
78 @pytest.mark.parametrize("grant_perm", [
81 @pytest.mark.parametrize("grant_perm", [
79 'repository.admin',
82 'repository.admin',
80 'repository.write',
83 'repository.write',
81 'repository.read',
84 'repository.read',
82 ])
85 ])
83 def test_api_get_repo_by_non_admin(self, grant_perm, backend):
86 def test_api_get_repo_by_non_admin(self, grant_perm, backend):
84 # TODO: Depending on which tests are running before this one, we
87 # TODO: Depending on which tests are running before this one, we
85 # start with a different number of permissions in the database.
88 # start with a different number of permissions in the database.
86 repo = RepoModel().get_by_repo_name(backend.repo_name)
89 repo = RepoModel().get_by_repo_name(backend.repo_name)
90 repo_id = repo.repo_id
87 permission_count = len(repo.repo_to_perm)
91 permission_count = len(repo.repo_to_perm)
88
92
89 RepoModel().grant_user_permission(repo=backend.repo_name,
93 RepoModel().grant_user_permission(repo=backend.repo_name,
90 user=self.TEST_USER_LOGIN,
94 user=self.TEST_USER_LOGIN,
91 perm=grant_perm)
95 perm=grant_perm)
92 Session().commit()
96 Session().commit()
93 id_, params = build_data(
97 id_, params = build_data(
94 self.apikey_regular, 'get_repo', repoid=backend.repo_name)
98 self.apikey_regular, 'get_repo', repoid=backend.repo_name)
95 response = api_call(self.app, params)
99 response = api_call(self.app, params)
96
100
97 repo = RepoModel().get_by_repo_name(backend.repo_name)
101 repo = RepoModel().get_by_repo_name(backend.repo_name)
98 ret = repo.get_api_data()
102 ret = repo.get_api_data()
99
103
100 assert permission_count + 1, len(repo.repo_to_perm)
104 assert permission_count + 1, len(repo.repo_to_perm)
101
105
102 permissions = expected_permissions(repo)
106 permissions = expected_permissions(repo)
103
107
104 followers = []
108 followers = []
109
110 repo = RepoModel().get(repo_id)
105 for user in repo.followers:
111 for user in repo.followers:
106 followers.append(user.user.get_api_data())
112 followers.append(user.user.get_api_data())
107
113
108 ret['permissions'] = permissions
114 ret['permissions'] = permissions
109 ret['followers'] = followers
115 ret['followers'] = followers
110
116
111 expected = ret
117 expected = ret
112 try:
118 try:
113 assert_ok(id_, expected, given=response.body)
119 assert_ok(id_, expected, given=response.body)
114 finally:
120 finally:
115 RepoModel().revoke_user_permission(
121 RepoModel().revoke_user_permission(
116 backend.repo_name, self.TEST_USER_LOGIN)
122 backend.repo_name, self.TEST_USER_LOGIN)
117
123
118 def test_api_get_repo_by_non_admin_no_permission_to_repo(self, backend):
124 def test_api_get_repo_by_non_admin_no_permission_to_repo(self, backend):
119 RepoModel().grant_user_permission(repo=backend.repo_name,
125 RepoModel().grant_user_permission(repo=backend.repo_name,
120 user=self.TEST_USER_LOGIN,
126 user=self.TEST_USER_LOGIN,
121 perm='repository.none')
127 perm='repository.none')
122
128
123 id_, params = build_data(
129 id_, params = build_data(
124 self.apikey_regular, 'get_repo', repoid=backend.repo_name)
130 self.apikey_regular, 'get_repo', repoid=backend.repo_name)
125 response = api_call(self.app, params)
131 response = api_call(self.app, params)
126
132
127 expected = 'repository `%s` does not exist' % (backend.repo_name)
133 expected = 'repository `%s` does not exist' % (backend.repo_name)
128 assert_error(id_, expected, given=response.body)
134 assert_error(id_, expected, given=response.body)
129
135
130 def test_api_get_repo_not_existing(self):
136 def test_api_get_repo_not_existing(self):
131 id_, params = build_data(
137 id_, params = build_data(
132 self.apikey, 'get_repo', repoid='no-such-repo')
138 self.apikey, 'get_repo', repoid='no-such-repo')
133 response = api_call(self.app, params)
139 response = api_call(self.app, params)
134
140
135 ret = 'repository `%s` does not exist' % 'no-such-repo'
141 ret = 'repository `%s` does not exist' % 'no-such-repo'
136 expected = ret
142 expected = ret
137 assert_error(id_, expected, given=response.body)
143 assert_error(id_, expected, given=response.body)
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,1739 +1,1745 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2019 RhodeCode GmbH
3 # Copyright (C) 2012-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 pull request model for RhodeCode
23 pull request model for RhodeCode
24 """
24 """
25
25
26
26
27 import json
27 import json
28 import logging
28 import logging
29 import datetime
29 import datetime
30 import urllib
30 import urllib
31 import collections
31 import collections
32
32
33 from pyramid import compat
33 from pyramid import compat
34 from pyramid.threadlocal import get_current_request
34 from pyramid.threadlocal import get_current_request
35
35
36 from rhodecode import events
36 from rhodecode import events
37 from rhodecode.translation import lazy_ugettext
37 from rhodecode.translation import lazy_ugettext
38 from rhodecode.lib import helpers as h, hooks_utils, diffs
38 from rhodecode.lib import helpers as h, hooks_utils, diffs
39 from rhodecode.lib import audit_logger
39 from rhodecode.lib import audit_logger
40 from rhodecode.lib.compat import OrderedDict
40 from rhodecode.lib.compat import OrderedDict
41 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
41 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
42 from rhodecode.lib.markup_renderer import (
42 from rhodecode.lib.markup_renderer import (
43 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
43 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
44 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
44 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
45 from rhodecode.lib.vcs.backends.base import (
45 from rhodecode.lib.vcs.backends.base import (
46 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
46 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
47 from rhodecode.lib.vcs.conf import settings as vcs_settings
47 from rhodecode.lib.vcs.conf import settings as vcs_settings
48 from rhodecode.lib.vcs.exceptions import (
48 from rhodecode.lib.vcs.exceptions import (
49 CommitDoesNotExistError, EmptyRepositoryError)
49 CommitDoesNotExistError, EmptyRepositoryError)
50 from rhodecode.model import BaseModel
50 from rhodecode.model import BaseModel
51 from rhodecode.model.changeset_status import ChangesetStatusModel
51 from rhodecode.model.changeset_status import ChangesetStatusModel
52 from rhodecode.model.comment import CommentsModel
52 from rhodecode.model.comment import CommentsModel
53 from rhodecode.model.db import (
53 from rhodecode.model.db import (
54 or_, PullRequest, PullRequestReviewers, ChangesetStatus,
54 or_, PullRequest, PullRequestReviewers, ChangesetStatus,
55 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
55 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
56 from rhodecode.model.meta import Session
56 from rhodecode.model.meta import Session
57 from rhodecode.model.notification import NotificationModel, \
57 from rhodecode.model.notification import NotificationModel, \
58 EmailNotificationModel
58 EmailNotificationModel
59 from rhodecode.model.scm import ScmModel
59 from rhodecode.model.scm import ScmModel
60 from rhodecode.model.settings import VcsSettingsModel
60 from rhodecode.model.settings import VcsSettingsModel
61
61
62
62
63 log = logging.getLogger(__name__)
63 log = logging.getLogger(__name__)
64
64
65
65
66 # Data structure to hold the response data when updating commits during a pull
66 # Data structure to hold the response data when updating commits during a pull
67 # request update.
67 # request update.
68 UpdateResponse = collections.namedtuple('UpdateResponse', [
68 UpdateResponse = collections.namedtuple('UpdateResponse', [
69 'executed', 'reason', 'new', 'old', 'changes',
69 'executed', 'reason', 'new', 'old', 'changes',
70 'source_changed', 'target_changed'])
70 'source_changed', 'target_changed'])
71
71
72
72
73 class PullRequestModel(BaseModel):
73 class PullRequestModel(BaseModel):
74
74
75 cls = PullRequest
75 cls = PullRequest
76
76
77 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
77 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
78
78
79 UPDATE_STATUS_MESSAGES = {
79 UPDATE_STATUS_MESSAGES = {
80 UpdateFailureReason.NONE: lazy_ugettext(
80 UpdateFailureReason.NONE: lazy_ugettext(
81 'Pull request update successful.'),
81 'Pull request update successful.'),
82 UpdateFailureReason.UNKNOWN: lazy_ugettext(
82 UpdateFailureReason.UNKNOWN: lazy_ugettext(
83 'Pull request update failed because of an unknown error.'),
83 'Pull request update failed because of an unknown error.'),
84 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
84 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
85 'No update needed because the source and target have not changed.'),
85 'No update needed because the source and target have not changed.'),
86 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
86 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
87 'Pull request cannot be updated because the reference type is '
87 'Pull request cannot be updated because the reference type is '
88 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
88 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
89 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
89 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
90 'This pull request cannot be updated because the target '
90 'This pull request cannot be updated because the target '
91 'reference is missing.'),
91 'reference is missing.'),
92 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
92 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
93 'This pull request cannot be updated because the source '
93 'This pull request cannot be updated because the source '
94 'reference is missing.'),
94 'reference is missing.'),
95 }
95 }
96 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
96 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
97 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
97 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
98
98
99 def __get_pull_request(self, pull_request):
99 def __get_pull_request(self, pull_request):
100 return self._get_instance((
100 return self._get_instance((
101 PullRequest, PullRequestVersion), pull_request)
101 PullRequest, PullRequestVersion), pull_request)
102
102
103 def _check_perms(self, perms, pull_request, user, api=False):
103 def _check_perms(self, perms, pull_request, user, api=False):
104 if not api:
104 if not api:
105 return h.HasRepoPermissionAny(*perms)(
105 return h.HasRepoPermissionAny(*perms)(
106 user=user, repo_name=pull_request.target_repo.repo_name)
106 user=user, repo_name=pull_request.target_repo.repo_name)
107 else:
107 else:
108 return h.HasRepoPermissionAnyApi(*perms)(
108 return h.HasRepoPermissionAnyApi(*perms)(
109 user=user, repo_name=pull_request.target_repo.repo_name)
109 user=user, repo_name=pull_request.target_repo.repo_name)
110
110
111 def check_user_read(self, pull_request, user, api=False):
111 def check_user_read(self, pull_request, user, api=False):
112 _perms = ('repository.admin', 'repository.write', 'repository.read',)
112 _perms = ('repository.admin', 'repository.write', 'repository.read',)
113 return self._check_perms(_perms, pull_request, user, api)
113 return self._check_perms(_perms, pull_request, user, api)
114
114
115 def check_user_merge(self, pull_request, user, api=False):
115 def check_user_merge(self, pull_request, user, api=False):
116 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
116 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
117 return self._check_perms(_perms, pull_request, user, api)
117 return self._check_perms(_perms, pull_request, user, api)
118
118
119 def check_user_update(self, pull_request, user, api=False):
119 def check_user_update(self, pull_request, user, api=False):
120 owner = user.user_id == pull_request.user_id
120 owner = user.user_id == pull_request.user_id
121 return self.check_user_merge(pull_request, user, api) or owner
121 return self.check_user_merge(pull_request, user, api) or owner
122
122
123 def check_user_delete(self, pull_request, user):
123 def check_user_delete(self, pull_request, user):
124 owner = user.user_id == pull_request.user_id
124 owner = user.user_id == pull_request.user_id
125 _perms = ('repository.admin',)
125 _perms = ('repository.admin',)
126 return self._check_perms(_perms, pull_request, user) or owner
126 return self._check_perms(_perms, pull_request, user) or owner
127
127
128 def check_user_change_status(self, pull_request, user, api=False):
128 def check_user_change_status(self, pull_request, user, api=False):
129 reviewer = user.user_id in [x.user_id for x in
129 reviewer = user.user_id in [x.user_id for x in
130 pull_request.reviewers]
130 pull_request.reviewers]
131 return self.check_user_update(pull_request, user, api) or reviewer
131 return self.check_user_update(pull_request, user, api) or reviewer
132
132
133 def check_user_comment(self, pull_request, user):
133 def check_user_comment(self, pull_request, user):
134 owner = user.user_id == pull_request.user_id
134 owner = user.user_id == pull_request.user_id
135 return self.check_user_read(pull_request, user) or owner
135 return self.check_user_read(pull_request, user) or owner
136
136
137 def get(self, pull_request):
137 def get(self, pull_request):
138 return self.__get_pull_request(pull_request)
138 return self.__get_pull_request(pull_request)
139
139
140 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
140 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
141 opened_by=None, order_by=None,
141 opened_by=None, order_by=None,
142 order_dir='desc', only_created=True):
142 order_dir='desc', only_created=True):
143 repo = None
143 repo = None
144 if repo_name:
144 if repo_name:
145 repo = self._get_repo(repo_name)
145 repo = self._get_repo(repo_name)
146
146
147 q = PullRequest.query()
147 q = PullRequest.query()
148
148
149 # source or target
149 # source or target
150 if repo and source:
150 if repo and source:
151 q = q.filter(PullRequest.source_repo == repo)
151 q = q.filter(PullRequest.source_repo == repo)
152 elif repo:
152 elif repo:
153 q = q.filter(PullRequest.target_repo == repo)
153 q = q.filter(PullRequest.target_repo == repo)
154
154
155 # closed,opened
155 # closed,opened
156 if statuses:
156 if statuses:
157 q = q.filter(PullRequest.status.in_(statuses))
157 q = q.filter(PullRequest.status.in_(statuses))
158
158
159 # opened by filter
159 # opened by filter
160 if opened_by:
160 if opened_by:
161 q = q.filter(PullRequest.user_id.in_(opened_by))
161 q = q.filter(PullRequest.user_id.in_(opened_by))
162
162
163 # only get those that are in "created" state
163 # only get those that are in "created" state
164 if only_created:
164 if only_created:
165 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
165 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
166
166
167 if order_by:
167 if order_by:
168 order_map = {
168 order_map = {
169 'name_raw': PullRequest.pull_request_id,
169 'name_raw': PullRequest.pull_request_id,
170 'id': PullRequest.pull_request_id,
170 'id': PullRequest.pull_request_id,
171 'title': PullRequest.title,
171 'title': PullRequest.title,
172 'updated_on_raw': PullRequest.updated_on,
172 'updated_on_raw': PullRequest.updated_on,
173 'target_repo': PullRequest.target_repo_id
173 'target_repo': PullRequest.target_repo_id
174 }
174 }
175 if order_dir == 'asc':
175 if order_dir == 'asc':
176 q = q.order_by(order_map[order_by].asc())
176 q = q.order_by(order_map[order_by].asc())
177 else:
177 else:
178 q = q.order_by(order_map[order_by].desc())
178 q = q.order_by(order_map[order_by].desc())
179
179
180 return q
180 return q
181
181
182 def count_all(self, repo_name, source=False, statuses=None,
182 def count_all(self, repo_name, source=False, statuses=None,
183 opened_by=None):
183 opened_by=None):
184 """
184 """
185 Count the number of pull requests for a specific repository.
185 Count the number of pull requests for a specific repository.
186
186
187 :param repo_name: target or source repo
187 :param repo_name: target or source repo
188 :param source: boolean flag to specify if repo_name refers to source
188 :param source: boolean flag to specify if repo_name refers to source
189 :param statuses: list of pull request statuses
189 :param statuses: list of pull request statuses
190 :param opened_by: author user of the pull request
190 :param opened_by: author user of the pull request
191 :returns: int number of pull requests
191 :returns: int number of pull requests
192 """
192 """
193 q = self._prepare_get_all_query(
193 q = self._prepare_get_all_query(
194 repo_name, source=source, statuses=statuses, opened_by=opened_by)
194 repo_name, source=source, statuses=statuses, opened_by=opened_by)
195
195
196 return q.count()
196 return q.count()
197
197
198 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
198 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
199 offset=0, length=None, order_by=None, order_dir='desc'):
199 offset=0, length=None, order_by=None, order_dir='desc'):
200 """
200 """
201 Get all pull requests for a specific repository.
201 Get all pull requests for a specific repository.
202
202
203 :param repo_name: target or source repo
203 :param repo_name: target or source repo
204 :param source: boolean flag to specify if repo_name refers to source
204 :param source: boolean flag to specify if repo_name refers to source
205 :param statuses: list of pull request statuses
205 :param statuses: list of pull request statuses
206 :param opened_by: author user of the pull request
206 :param opened_by: author user of the pull request
207 :param offset: pagination offset
207 :param offset: pagination offset
208 :param length: length of returned list
208 :param length: length of returned list
209 :param order_by: order of the returned list
209 :param order_by: order of the returned list
210 :param order_dir: 'asc' or 'desc' ordering direction
210 :param order_dir: 'asc' or 'desc' ordering direction
211 :returns: list of pull requests
211 :returns: list of pull requests
212 """
212 """
213 q = self._prepare_get_all_query(
213 q = self._prepare_get_all_query(
214 repo_name, source=source, statuses=statuses, opened_by=opened_by,
214 repo_name, source=source, statuses=statuses, opened_by=opened_by,
215 order_by=order_by, order_dir=order_dir)
215 order_by=order_by, order_dir=order_dir)
216
216
217 if length:
217 if length:
218 pull_requests = q.limit(length).offset(offset).all()
218 pull_requests = q.limit(length).offset(offset).all()
219 else:
219 else:
220 pull_requests = q.all()
220 pull_requests = q.all()
221
221
222 return pull_requests
222 return pull_requests
223
223
224 def count_awaiting_review(self, repo_name, source=False, statuses=None,
224 def count_awaiting_review(self, repo_name, source=False, statuses=None,
225 opened_by=None):
225 opened_by=None):
226 """
226 """
227 Count the number of pull requests for a specific repository that are
227 Count the number of pull requests for a specific repository that are
228 awaiting review.
228 awaiting review.
229
229
230 :param repo_name: target or source repo
230 :param repo_name: target or source repo
231 :param source: boolean flag to specify if repo_name refers to source
231 :param source: boolean flag to specify if repo_name refers to source
232 :param statuses: list of pull request statuses
232 :param statuses: list of pull request statuses
233 :param opened_by: author user of the pull request
233 :param opened_by: author user of the pull request
234 :returns: int number of pull requests
234 :returns: int number of pull requests
235 """
235 """
236 pull_requests = self.get_awaiting_review(
236 pull_requests = self.get_awaiting_review(
237 repo_name, source=source, statuses=statuses, opened_by=opened_by)
237 repo_name, source=source, statuses=statuses, opened_by=opened_by)
238
238
239 return len(pull_requests)
239 return len(pull_requests)
240
240
241 def get_awaiting_review(self, repo_name, source=False, statuses=None,
241 def get_awaiting_review(self, repo_name, source=False, statuses=None,
242 opened_by=None, offset=0, length=None,
242 opened_by=None, offset=0, length=None,
243 order_by=None, order_dir='desc'):
243 order_by=None, order_dir='desc'):
244 """
244 """
245 Get all pull requests for a specific repository that are awaiting
245 Get all pull requests for a specific repository that are awaiting
246 review.
246 review.
247
247
248 :param repo_name: target or source repo
248 :param repo_name: target or source repo
249 :param source: boolean flag to specify if repo_name refers to source
249 :param source: boolean flag to specify if repo_name refers to source
250 :param statuses: list of pull request statuses
250 :param statuses: list of pull request statuses
251 :param opened_by: author user of the pull request
251 :param opened_by: author user of the pull request
252 :param offset: pagination offset
252 :param offset: pagination offset
253 :param length: length of returned list
253 :param length: length of returned list
254 :param order_by: order of the returned list
254 :param order_by: order of the returned list
255 :param order_dir: 'asc' or 'desc' ordering direction
255 :param order_dir: 'asc' or 'desc' ordering direction
256 :returns: list of pull requests
256 :returns: list of pull requests
257 """
257 """
258 pull_requests = self.get_all(
258 pull_requests = self.get_all(
259 repo_name, source=source, statuses=statuses, opened_by=opened_by,
259 repo_name, source=source, statuses=statuses, opened_by=opened_by,
260 order_by=order_by, order_dir=order_dir)
260 order_by=order_by, order_dir=order_dir)
261
261
262 _filtered_pull_requests = []
262 _filtered_pull_requests = []
263 for pr in pull_requests:
263 for pr in pull_requests:
264 status = pr.calculated_review_status()
264 status = pr.calculated_review_status()
265 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
265 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
266 ChangesetStatus.STATUS_UNDER_REVIEW]:
266 ChangesetStatus.STATUS_UNDER_REVIEW]:
267 _filtered_pull_requests.append(pr)
267 _filtered_pull_requests.append(pr)
268 if length:
268 if length:
269 return _filtered_pull_requests[offset:offset+length]
269 return _filtered_pull_requests[offset:offset+length]
270 else:
270 else:
271 return _filtered_pull_requests
271 return _filtered_pull_requests
272
272
273 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
273 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
274 opened_by=None, user_id=None):
274 opened_by=None, user_id=None):
275 """
275 """
276 Count the number of pull requests for a specific repository that are
276 Count the number of pull requests for a specific repository that are
277 awaiting review from a specific user.
277 awaiting review from a specific user.
278
278
279 :param repo_name: target or source repo
279 :param repo_name: target or source repo
280 :param source: boolean flag to specify if repo_name refers to source
280 :param source: boolean flag to specify if repo_name refers to source
281 :param statuses: list of pull request statuses
281 :param statuses: list of pull request statuses
282 :param opened_by: author user of the pull request
282 :param opened_by: author user of the pull request
283 :param user_id: reviewer user of the pull request
283 :param user_id: reviewer user of the pull request
284 :returns: int number of pull requests
284 :returns: int number of pull requests
285 """
285 """
286 pull_requests = self.get_awaiting_my_review(
286 pull_requests = self.get_awaiting_my_review(
287 repo_name, source=source, statuses=statuses, opened_by=opened_by,
287 repo_name, source=source, statuses=statuses, opened_by=opened_by,
288 user_id=user_id)
288 user_id=user_id)
289
289
290 return len(pull_requests)
290 return len(pull_requests)
291
291
292 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
292 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
293 opened_by=None, user_id=None, offset=0,
293 opened_by=None, user_id=None, offset=0,
294 length=None, order_by=None, order_dir='desc'):
294 length=None, order_by=None, order_dir='desc'):
295 """
295 """
296 Get all pull requests for a specific repository that are awaiting
296 Get all pull requests for a specific repository that are awaiting
297 review from a specific user.
297 review from a specific user.
298
298
299 :param repo_name: target or source repo
299 :param repo_name: target or source repo
300 :param source: boolean flag to specify if repo_name refers to source
300 :param source: boolean flag to specify if repo_name refers to source
301 :param statuses: list of pull request statuses
301 :param statuses: list of pull request statuses
302 :param opened_by: author user of the pull request
302 :param opened_by: author user of the pull request
303 :param user_id: reviewer user of the pull request
303 :param user_id: reviewer user of the pull request
304 :param offset: pagination offset
304 :param offset: pagination offset
305 :param length: length of returned list
305 :param length: length of returned list
306 :param order_by: order of the returned list
306 :param order_by: order of the returned list
307 :param order_dir: 'asc' or 'desc' ordering direction
307 :param order_dir: 'asc' or 'desc' ordering direction
308 :returns: list of pull requests
308 :returns: list of pull requests
309 """
309 """
310 pull_requests = self.get_all(
310 pull_requests = self.get_all(
311 repo_name, source=source, statuses=statuses, opened_by=opened_by,
311 repo_name, source=source, statuses=statuses, opened_by=opened_by,
312 order_by=order_by, order_dir=order_dir)
312 order_by=order_by, order_dir=order_dir)
313
313
314 _my = PullRequestModel().get_not_reviewed(user_id)
314 _my = PullRequestModel().get_not_reviewed(user_id)
315 my_participation = []
315 my_participation = []
316 for pr in pull_requests:
316 for pr in pull_requests:
317 if pr in _my:
317 if pr in _my:
318 my_participation.append(pr)
318 my_participation.append(pr)
319 _filtered_pull_requests = my_participation
319 _filtered_pull_requests = my_participation
320 if length:
320 if length:
321 return _filtered_pull_requests[offset:offset+length]
321 return _filtered_pull_requests[offset:offset+length]
322 else:
322 else:
323 return _filtered_pull_requests
323 return _filtered_pull_requests
324
324
325 def get_not_reviewed(self, user_id):
325 def get_not_reviewed(self, user_id):
326 return [
326 return [
327 x.pull_request for x in PullRequestReviewers.query().filter(
327 x.pull_request for x in PullRequestReviewers.query().filter(
328 PullRequestReviewers.user_id == user_id).all()
328 PullRequestReviewers.user_id == user_id).all()
329 ]
329 ]
330
330
331 def _prepare_participating_query(self, user_id=None, statuses=None,
331 def _prepare_participating_query(self, user_id=None, statuses=None,
332 order_by=None, order_dir='desc'):
332 order_by=None, order_dir='desc'):
333 q = PullRequest.query()
333 q = PullRequest.query()
334 if user_id:
334 if user_id:
335 reviewers_subquery = Session().query(
335 reviewers_subquery = Session().query(
336 PullRequestReviewers.pull_request_id).filter(
336 PullRequestReviewers.pull_request_id).filter(
337 PullRequestReviewers.user_id == user_id).subquery()
337 PullRequestReviewers.user_id == user_id).subquery()
338 user_filter = or_(
338 user_filter = or_(
339 PullRequest.user_id == user_id,
339 PullRequest.user_id == user_id,
340 PullRequest.pull_request_id.in_(reviewers_subquery)
340 PullRequest.pull_request_id.in_(reviewers_subquery)
341 )
341 )
342 q = PullRequest.query().filter(user_filter)
342 q = PullRequest.query().filter(user_filter)
343
343
344 # closed,opened
344 # closed,opened
345 if statuses:
345 if statuses:
346 q = q.filter(PullRequest.status.in_(statuses))
346 q = q.filter(PullRequest.status.in_(statuses))
347
347
348 if order_by:
348 if order_by:
349 order_map = {
349 order_map = {
350 'name_raw': PullRequest.pull_request_id,
350 'name_raw': PullRequest.pull_request_id,
351 'title': PullRequest.title,
351 'title': PullRequest.title,
352 'updated_on_raw': PullRequest.updated_on,
352 'updated_on_raw': PullRequest.updated_on,
353 'target_repo': PullRequest.target_repo_id
353 'target_repo': PullRequest.target_repo_id
354 }
354 }
355 if order_dir == 'asc':
355 if order_dir == 'asc':
356 q = q.order_by(order_map[order_by].asc())
356 q = q.order_by(order_map[order_by].asc())
357 else:
357 else:
358 q = q.order_by(order_map[order_by].desc())
358 q = q.order_by(order_map[order_by].desc())
359
359
360 return q
360 return q
361
361
362 def count_im_participating_in(self, user_id=None, statuses=None):
362 def count_im_participating_in(self, user_id=None, statuses=None):
363 q = self._prepare_participating_query(user_id, statuses=statuses)
363 q = self._prepare_participating_query(user_id, statuses=statuses)
364 return q.count()
364 return q.count()
365
365
366 def get_im_participating_in(
366 def get_im_participating_in(
367 self, user_id=None, statuses=None, offset=0,
367 self, user_id=None, statuses=None, offset=0,
368 length=None, order_by=None, order_dir='desc'):
368 length=None, order_by=None, order_dir='desc'):
369 """
369 """
370 Get all Pull requests that i'm participating in, or i have opened
370 Get all Pull requests that i'm participating in, or i have opened
371 """
371 """
372
372
373 q = self._prepare_participating_query(
373 q = self._prepare_participating_query(
374 user_id, statuses=statuses, order_by=order_by,
374 user_id, statuses=statuses, order_by=order_by,
375 order_dir=order_dir)
375 order_dir=order_dir)
376
376
377 if length:
377 if length:
378 pull_requests = q.limit(length).offset(offset).all()
378 pull_requests = q.limit(length).offset(offset).all()
379 else:
379 else:
380 pull_requests = q.all()
380 pull_requests = q.all()
381
381
382 return pull_requests
382 return pull_requests
383
383
384 def get_versions(self, pull_request):
384 def get_versions(self, pull_request):
385 """
385 """
386 returns version of pull request sorted by ID descending
386 returns version of pull request sorted by ID descending
387 """
387 """
388 return PullRequestVersion.query()\
388 return PullRequestVersion.query()\
389 .filter(PullRequestVersion.pull_request == pull_request)\
389 .filter(PullRequestVersion.pull_request == pull_request)\
390 .order_by(PullRequestVersion.pull_request_version_id.asc())\
390 .order_by(PullRequestVersion.pull_request_version_id.asc())\
391 .all()
391 .all()
392
392
393 def get_pr_version(self, pull_request_id, version=None):
393 def get_pr_version(self, pull_request_id, version=None):
394 at_version = None
394 at_version = None
395
395
396 if version and version == 'latest':
396 if version and version == 'latest':
397 pull_request_ver = PullRequest.get(pull_request_id)
397 pull_request_ver = PullRequest.get(pull_request_id)
398 pull_request_obj = pull_request_ver
398 pull_request_obj = pull_request_ver
399 _org_pull_request_obj = pull_request_obj
399 _org_pull_request_obj = pull_request_obj
400 at_version = 'latest'
400 at_version = 'latest'
401 elif version:
401 elif version:
402 pull_request_ver = PullRequestVersion.get_or_404(version)
402 pull_request_ver = PullRequestVersion.get_or_404(version)
403 pull_request_obj = pull_request_ver
403 pull_request_obj = pull_request_ver
404 _org_pull_request_obj = pull_request_ver.pull_request
404 _org_pull_request_obj = pull_request_ver.pull_request
405 at_version = pull_request_ver.pull_request_version_id
405 at_version = pull_request_ver.pull_request_version_id
406 else:
406 else:
407 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
407 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
408 pull_request_id)
408 pull_request_id)
409
409
410 pull_request_display_obj = PullRequest.get_pr_display_object(
410 pull_request_display_obj = PullRequest.get_pr_display_object(
411 pull_request_obj, _org_pull_request_obj)
411 pull_request_obj, _org_pull_request_obj)
412
412
413 return _org_pull_request_obj, pull_request_obj, \
413 return _org_pull_request_obj, pull_request_obj, \
414 pull_request_display_obj, at_version
414 pull_request_display_obj, at_version
415
415
416 def create(self, created_by, source_repo, source_ref, target_repo,
416 def create(self, created_by, source_repo, source_ref, target_repo,
417 target_ref, revisions, reviewers, title, description=None,
417 target_ref, revisions, reviewers, title, description=None,
418 description_renderer=None,
418 description_renderer=None,
419 reviewer_data=None, translator=None, auth_user=None):
419 reviewer_data=None, translator=None, auth_user=None):
420 translator = translator or get_current_request().translate
420 translator = translator or get_current_request().translate
421
421
422 created_by_user = self._get_user(created_by)
422 created_by_user = self._get_user(created_by)
423 auth_user = auth_user or created_by_user.AuthUser()
423 auth_user = auth_user or created_by_user.AuthUser()
424 source_repo = self._get_repo(source_repo)
424 source_repo = self._get_repo(source_repo)
425 target_repo = self._get_repo(target_repo)
425 target_repo = self._get_repo(target_repo)
426
426
427 pull_request = PullRequest()
427 pull_request = PullRequest()
428 pull_request.source_repo = source_repo
428 pull_request.source_repo = source_repo
429 pull_request.source_ref = source_ref
429 pull_request.source_ref = source_ref
430 pull_request.target_repo = target_repo
430 pull_request.target_repo = target_repo
431 pull_request.target_ref = target_ref
431 pull_request.target_ref = target_ref
432 pull_request.revisions = revisions
432 pull_request.revisions = revisions
433 pull_request.title = title
433 pull_request.title = title
434 pull_request.description = description
434 pull_request.description = description
435 pull_request.description_renderer = description_renderer
435 pull_request.description_renderer = description_renderer
436 pull_request.author = created_by_user
436 pull_request.author = created_by_user
437 pull_request.reviewer_data = reviewer_data
437 pull_request.reviewer_data = reviewer_data
438 pull_request.pull_request_state = pull_request.STATE_CREATING
438 pull_request.pull_request_state = pull_request.STATE_CREATING
439 Session().add(pull_request)
439 Session().add(pull_request)
440 Session().flush()
440 Session().flush()
441
441
442 reviewer_ids = set()
442 reviewer_ids = set()
443 # members / reviewers
443 # members / reviewers
444 for reviewer_object in reviewers:
444 for reviewer_object in reviewers:
445 user_id, reasons, mandatory, rules = reviewer_object
445 user_id, reasons, mandatory, rules = reviewer_object
446 user = self._get_user(user_id)
446 user = self._get_user(user_id)
447
447
448 # skip duplicates
448 # skip duplicates
449 if user.user_id in reviewer_ids:
449 if user.user_id in reviewer_ids:
450 continue
450 continue
451
451
452 reviewer_ids.add(user.user_id)
452 reviewer_ids.add(user.user_id)
453
453
454 reviewer = PullRequestReviewers()
454 reviewer = PullRequestReviewers()
455 reviewer.user = user
455 reviewer.user = user
456 reviewer.pull_request = pull_request
456 reviewer.pull_request = pull_request
457 reviewer.reasons = reasons
457 reviewer.reasons = reasons
458 reviewer.mandatory = mandatory
458 reviewer.mandatory = mandatory
459
459
460 # NOTE(marcink): pick only first rule for now
460 # NOTE(marcink): pick only first rule for now
461 rule_id = list(rules)[0] if rules else None
461 rule_id = list(rules)[0] if rules else None
462 rule = RepoReviewRule.get(rule_id) if rule_id else None
462 rule = RepoReviewRule.get(rule_id) if rule_id else None
463 if rule:
463 if rule:
464 review_group = rule.user_group_vote_rule(user_id)
464 review_group = rule.user_group_vote_rule(user_id)
465 # we check if this particular reviewer is member of a voting group
465 # we check if this particular reviewer is member of a voting group
466 if review_group:
466 if review_group:
467 # NOTE(marcink):
467 # NOTE(marcink):
468 # can be that user is member of more but we pick the first same,
468 # can be that user is member of more but we pick the first same,
469 # same as default reviewers algo
469 # same as default reviewers algo
470 review_group = review_group[0]
470 review_group = review_group[0]
471
471
472 rule_data = {
472 rule_data = {
473 'rule_name':
473 'rule_name':
474 rule.review_rule_name,
474 rule.review_rule_name,
475 'rule_user_group_entry_id':
475 'rule_user_group_entry_id':
476 review_group.repo_review_rule_users_group_id,
476 review_group.repo_review_rule_users_group_id,
477 'rule_user_group_name':
477 'rule_user_group_name':
478 review_group.users_group.users_group_name,
478 review_group.users_group.users_group_name,
479 'rule_user_group_members':
479 'rule_user_group_members':
480 [x.user.username for x in review_group.users_group.members],
480 [x.user.username for x in review_group.users_group.members],
481 'rule_user_group_members_id':
481 'rule_user_group_members_id':
482 [x.user.user_id for x in review_group.users_group.members],
482 [x.user.user_id for x in review_group.users_group.members],
483 }
483 }
484 # e.g {'vote_rule': -1, 'mandatory': True}
484 # e.g {'vote_rule': -1, 'mandatory': True}
485 rule_data.update(review_group.rule_data())
485 rule_data.update(review_group.rule_data())
486
486
487 reviewer.rule_data = rule_data
487 reviewer.rule_data = rule_data
488
488
489 Session().add(reviewer)
489 Session().add(reviewer)
490 Session().flush()
490 Session().flush()
491
491
492 # Set approval status to "Under Review" for all commits which are
492 # Set approval status to "Under Review" for all commits which are
493 # part of this pull request.
493 # part of this pull request.
494 ChangesetStatusModel().set_status(
494 ChangesetStatusModel().set_status(
495 repo=target_repo,
495 repo=target_repo,
496 status=ChangesetStatus.STATUS_UNDER_REVIEW,
496 status=ChangesetStatus.STATUS_UNDER_REVIEW,
497 user=created_by_user,
497 user=created_by_user,
498 pull_request=pull_request
498 pull_request=pull_request
499 )
499 )
500 # we commit early at this point. This has to do with a fact
500 # we commit early at this point. This has to do with a fact
501 # that before queries do some row-locking. And because of that
501 # that before queries do some row-locking. And because of that
502 # we need to commit and finish transaction before below validate call
502 # we need to commit and finish transaction before below validate call
503 # that for large repos could be long resulting in long row locks
503 # that for large repos could be long resulting in long row locks
504 Session().commit()
504 Session().commit()
505
505
506 # prepare workspace, and run initial merge simulation. Set state during that
506 # prepare workspace, and run initial merge simulation. Set state during that
507 # operation
507 # operation
508 pull_request = PullRequest.get(pull_request.pull_request_id)
508 pull_request = PullRequest.get(pull_request.pull_request_id)
509
509
510 # set as merging, for simulation, and if finished to created so we mark
510 # set as merging, for simulation, and if finished to created so we mark
511 # simulation is working fine
511 # simulation is working fine
512 with pull_request.set_state(PullRequest.STATE_MERGING,
512 with pull_request.set_state(PullRequest.STATE_MERGING,
513 final_state=PullRequest.STATE_CREATED):
513 final_state=PullRequest.STATE_CREATED):
514 MergeCheck.validate(
514 MergeCheck.validate(
515 pull_request, auth_user=auth_user, translator=translator)
515 pull_request, auth_user=auth_user, translator=translator)
516
516
517 self.notify_reviewers(pull_request, reviewer_ids)
517 self.notify_reviewers(pull_request, reviewer_ids)
518 self.trigger_pull_request_hook(
518 self.trigger_pull_request_hook(
519 pull_request, created_by_user, 'create')
519 pull_request, created_by_user, 'create')
520
520
521 creation_data = pull_request.get_api_data(with_merge_state=False)
521 creation_data = pull_request.get_api_data(with_merge_state=False)
522 self._log_audit_action(
522 self._log_audit_action(
523 'repo.pull_request.create', {'data': creation_data},
523 'repo.pull_request.create', {'data': creation_data},
524 auth_user, pull_request)
524 auth_user, pull_request)
525
525
526 return pull_request
526 return pull_request
527
527
528 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
528 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
529 pull_request = self.__get_pull_request(pull_request)
529 pull_request = self.__get_pull_request(pull_request)
530 target_scm = pull_request.target_repo.scm_instance()
530 target_scm = pull_request.target_repo.scm_instance()
531 if action == 'create':
531 if action == 'create':
532 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
532 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
533 elif action == 'merge':
533 elif action == 'merge':
534 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
534 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
535 elif action == 'close':
535 elif action == 'close':
536 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
536 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
537 elif action == 'review_status_change':
537 elif action == 'review_status_change':
538 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
538 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
539 elif action == 'update':
539 elif action == 'update':
540 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
540 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
541 elif action == 'comment':
541 elif action == 'comment':
542 # dummy hook ! for comment. We want this function to handle all cases
542 # dummy hook ! for comment. We want this function to handle all cases
543 def trigger_hook(*args, **kwargs):
543 def trigger_hook(*args, **kwargs):
544 pass
544 pass
545 comment = data['comment']
545 comment = data['comment']
546 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
546 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
547 else:
547 else:
548 return
548 return
549
549
550 trigger_hook(
550 trigger_hook(
551 username=user.username,
551 username=user.username,
552 repo_name=pull_request.target_repo.repo_name,
552 repo_name=pull_request.target_repo.repo_name,
553 repo_alias=target_scm.alias,
553 repo_alias=target_scm.alias,
554 pull_request=pull_request,
554 pull_request=pull_request,
555 data=data)
555 data=data)
556
556
557 def _get_commit_ids(self, pull_request):
557 def _get_commit_ids(self, pull_request):
558 """
558 """
559 Return the commit ids of the merged pull request.
559 Return the commit ids of the merged pull request.
560
560
561 This method is not dealing correctly yet with the lack of autoupdates
561 This method is not dealing correctly yet with the lack of autoupdates
562 nor with the implicit target updates.
562 nor with the implicit target updates.
563 For example: if a commit in the source repo is already in the target it
563 For example: if a commit in the source repo is already in the target it
564 will be reported anyways.
564 will be reported anyways.
565 """
565 """
566 merge_rev = pull_request.merge_rev
566 merge_rev = pull_request.merge_rev
567 if merge_rev is None:
567 if merge_rev is None:
568 raise ValueError('This pull request was not merged yet')
568 raise ValueError('This pull request was not merged yet')
569
569
570 commit_ids = list(pull_request.revisions)
570 commit_ids = list(pull_request.revisions)
571 if merge_rev not in commit_ids:
571 if merge_rev not in commit_ids:
572 commit_ids.append(merge_rev)
572 commit_ids.append(merge_rev)
573
573
574 return commit_ids
574 return commit_ids
575
575
576 def merge_repo(self, pull_request, user, extras):
576 def merge_repo(self, pull_request, user, extras):
577 log.debug("Merging pull request %s", pull_request.pull_request_id)
577 log.debug("Merging pull request %s", pull_request.pull_request_id)
578 extras['user_agent'] = 'internal-merge'
578 extras['user_agent'] = 'internal-merge'
579 merge_state = self._merge_pull_request(pull_request, user, extras)
579 merge_state = self._merge_pull_request(pull_request, user, extras)
580 if merge_state.executed:
580 if merge_state.executed:
581 log.debug("Merge was successful, updating the pull request comments.")
581 log.debug("Merge was successful, updating the pull request comments.")
582 self._comment_and_close_pr(pull_request, user, merge_state)
582 self._comment_and_close_pr(pull_request, user, merge_state)
583
583
584 self._log_audit_action(
584 self._log_audit_action(
585 'repo.pull_request.merge',
585 'repo.pull_request.merge',
586 {'merge_state': merge_state.__dict__},
586 {'merge_state': merge_state.__dict__},
587 user, pull_request)
587 user, pull_request)
588
588
589 else:
589 else:
590 log.warn("Merge failed, not updating the pull request.")
590 log.warn("Merge failed, not updating the pull request.")
591 return merge_state
591 return merge_state
592
592
593 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
593 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
594 target_vcs = pull_request.target_repo.scm_instance()
594 target_vcs = pull_request.target_repo.scm_instance()
595 source_vcs = pull_request.source_repo.scm_instance()
595 source_vcs = pull_request.source_repo.scm_instance()
596
596
597 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
597 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
598 pr_id=pull_request.pull_request_id,
598 pr_id=pull_request.pull_request_id,
599 pr_title=pull_request.title,
599 pr_title=pull_request.title,
600 source_repo=source_vcs.name,
600 source_repo=source_vcs.name,
601 source_ref_name=pull_request.source_ref_parts.name,
601 source_ref_name=pull_request.source_ref_parts.name,
602 target_repo=target_vcs.name,
602 target_repo=target_vcs.name,
603 target_ref_name=pull_request.target_ref_parts.name,
603 target_ref_name=pull_request.target_ref_parts.name,
604 )
604 )
605
605
606 workspace_id = self._workspace_id(pull_request)
606 workspace_id = self._workspace_id(pull_request)
607 repo_id = pull_request.target_repo.repo_id
607 repo_id = pull_request.target_repo.repo_id
608 use_rebase = self._use_rebase_for_merging(pull_request)
608 use_rebase = self._use_rebase_for_merging(pull_request)
609 close_branch = self._close_branch_before_merging(pull_request)
609 close_branch = self._close_branch_before_merging(pull_request)
610
610
611 target_ref = self._refresh_reference(
611 target_ref = self._refresh_reference(
612 pull_request.target_ref_parts, target_vcs)
612 pull_request.target_ref_parts, target_vcs)
613
613
614 callback_daemon, extras = prepare_callback_daemon(
614 callback_daemon, extras = prepare_callback_daemon(
615 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
615 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
616 host=vcs_settings.HOOKS_HOST,
616 host=vcs_settings.HOOKS_HOST,
617 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
617 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
618
618
619 with callback_daemon:
619 with callback_daemon:
620 # TODO: johbo: Implement a clean way to run a config_override
620 # TODO: johbo: Implement a clean way to run a config_override
621 # for a single call.
621 # for a single call.
622 target_vcs.config.set(
622 target_vcs.config.set(
623 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
623 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
624
624
625 user_name = user.short_contact
625 user_name = user.short_contact
626 merge_state = target_vcs.merge(
626 merge_state = target_vcs.merge(
627 repo_id, workspace_id, target_ref, source_vcs,
627 repo_id, workspace_id, target_ref, source_vcs,
628 pull_request.source_ref_parts,
628 pull_request.source_ref_parts,
629 user_name=user_name, user_email=user.email,
629 user_name=user_name, user_email=user.email,
630 message=message, use_rebase=use_rebase,
630 message=message, use_rebase=use_rebase,
631 close_branch=close_branch)
631 close_branch=close_branch)
632 return merge_state
632 return merge_state
633
633
634 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
634 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
635 pull_request.merge_rev = merge_state.merge_ref.commit_id
635 pull_request.merge_rev = merge_state.merge_ref.commit_id
636 pull_request.updated_on = datetime.datetime.now()
636 pull_request.updated_on = datetime.datetime.now()
637 close_msg = close_msg or 'Pull request merged and closed'
637 close_msg = close_msg or 'Pull request merged and closed'
638
638
639 CommentsModel().create(
639 CommentsModel().create(
640 text=safe_unicode(close_msg),
640 text=safe_unicode(close_msg),
641 repo=pull_request.target_repo.repo_id,
641 repo=pull_request.target_repo.repo_id,
642 user=user.user_id,
642 user=user.user_id,
643 pull_request=pull_request.pull_request_id,
643 pull_request=pull_request.pull_request_id,
644 f_path=None,
644 f_path=None,
645 line_no=None,
645 line_no=None,
646 closing_pr=True
646 closing_pr=True
647 )
647 )
648
648
649 Session().add(pull_request)
649 Session().add(pull_request)
650 Session().flush()
650 Session().flush()
651 # TODO: paris: replace invalidation with less radical solution
651 # TODO: paris: replace invalidation with less radical solution
652 ScmModel().mark_for_invalidation(
652 ScmModel().mark_for_invalidation(
653 pull_request.target_repo.repo_name)
653 pull_request.target_repo.repo_name)
654 self.trigger_pull_request_hook(pull_request, user, 'merge')
654 self.trigger_pull_request_hook(pull_request, user, 'merge')
655
655
656 def has_valid_update_type(self, pull_request):
656 def has_valid_update_type(self, pull_request):
657 source_ref_type = pull_request.source_ref_parts.type
657 source_ref_type = pull_request.source_ref_parts.type
658 return source_ref_type in self.REF_TYPES
658 return source_ref_type in self.REF_TYPES
659
659
660 def update_commits(self, pull_request):
660 def update_commits(self, pull_request):
661 """
661 """
662 Get the updated list of commits for the pull request
662 Get the updated list of commits for the pull request
663 and return the new pull request version and the list
663 and return the new pull request version and the list
664 of commits processed by this update action
664 of commits processed by this update action
665 """
665 """
666 pull_request = self.__get_pull_request(pull_request)
666 pull_request = self.__get_pull_request(pull_request)
667 source_ref_type = pull_request.source_ref_parts.type
667 source_ref_type = pull_request.source_ref_parts.type
668 source_ref_name = pull_request.source_ref_parts.name
668 source_ref_name = pull_request.source_ref_parts.name
669 source_ref_id = pull_request.source_ref_parts.commit_id
669 source_ref_id = pull_request.source_ref_parts.commit_id
670
670
671 target_ref_type = pull_request.target_ref_parts.type
671 target_ref_type = pull_request.target_ref_parts.type
672 target_ref_name = pull_request.target_ref_parts.name
672 target_ref_name = pull_request.target_ref_parts.name
673 target_ref_id = pull_request.target_ref_parts.commit_id
673 target_ref_id = pull_request.target_ref_parts.commit_id
674
674
675 if not self.has_valid_update_type(pull_request):
675 if not self.has_valid_update_type(pull_request):
676 log.debug("Skipping update of pull request %s due to ref type: %s",
676 log.debug("Skipping update of pull request %s due to ref type: %s",
677 pull_request, source_ref_type)
677 pull_request, source_ref_type)
678 return UpdateResponse(
678 return UpdateResponse(
679 executed=False,
679 executed=False,
680 reason=UpdateFailureReason.WRONG_REF_TYPE,
680 reason=UpdateFailureReason.WRONG_REF_TYPE,
681 old=pull_request, new=None, changes=None,
681 old=pull_request, new=None, changes=None,
682 source_changed=False, target_changed=False)
682 source_changed=False, target_changed=False)
683
683
684 # source repo
684 # source repo
685 source_repo = pull_request.source_repo.scm_instance()
685 source_repo = pull_request.source_repo.scm_instance()
686 source_repo.count() # cache rebuild
687
686 try:
688 try:
687 source_commit = source_repo.get_commit(commit_id=source_ref_name)
689 source_commit = source_repo.get_commit(commit_id=source_ref_name)
688 except CommitDoesNotExistError:
690 except CommitDoesNotExistError:
689 return UpdateResponse(
691 return UpdateResponse(
690 executed=False,
692 executed=False,
691 reason=UpdateFailureReason.MISSING_SOURCE_REF,
693 reason=UpdateFailureReason.MISSING_SOURCE_REF,
692 old=pull_request, new=None, changes=None,
694 old=pull_request, new=None, changes=None,
693 source_changed=False, target_changed=False)
695 source_changed=False, target_changed=False)
694
696
695 source_changed = source_ref_id != source_commit.raw_id
697 source_changed = source_ref_id != source_commit.raw_id
696
698
697 # target repo
699 # target repo
698 target_repo = pull_request.target_repo.scm_instance()
700 target_repo = pull_request.target_repo.scm_instance()
701 target_repo.count() # cache rebuild
702
699 try:
703 try:
700 target_commit = target_repo.get_commit(commit_id=target_ref_name)
704 target_commit = target_repo.get_commit(commit_id=target_ref_name)
701 except CommitDoesNotExistError:
705 except CommitDoesNotExistError:
702 return UpdateResponse(
706 return UpdateResponse(
703 executed=False,
707 executed=False,
704 reason=UpdateFailureReason.MISSING_TARGET_REF,
708 reason=UpdateFailureReason.MISSING_TARGET_REF,
705 old=pull_request, new=None, changes=None,
709 old=pull_request, new=None, changes=None,
706 source_changed=False, target_changed=False)
710 source_changed=False, target_changed=False)
707 target_changed = target_ref_id != target_commit.raw_id
711 target_changed = target_ref_id != target_commit.raw_id
708
712
709 if not (source_changed or target_changed):
713 if not (source_changed or target_changed):
710 log.debug("Nothing changed in pull request %s", pull_request)
714 log.debug("Nothing changed in pull request %s", pull_request)
711 return UpdateResponse(
715 return UpdateResponse(
712 executed=False,
716 executed=False,
713 reason=UpdateFailureReason.NO_CHANGE,
717 reason=UpdateFailureReason.NO_CHANGE,
714 old=pull_request, new=None, changes=None,
718 old=pull_request, new=None, changes=None,
715 source_changed=target_changed, target_changed=source_changed)
719 source_changed=target_changed, target_changed=source_changed)
716
720
717 change_in_found = 'target repo' if target_changed else 'source repo'
721 change_in_found = 'target repo' if target_changed else 'source repo'
718 log.debug('Updating pull request because of change in %s detected',
722 log.debug('Updating pull request because of change in %s detected',
719 change_in_found)
723 change_in_found)
720
724
721 # Finally there is a need for an update, in case of source change
725 # Finally there is a need for an update, in case of source change
722 # we create a new version, else just an update
726 # we create a new version, else just an update
723 if source_changed:
727 if source_changed:
724 pull_request_version = self._create_version_from_snapshot(pull_request)
728 pull_request_version = self._create_version_from_snapshot(pull_request)
725 self._link_comments_to_version(pull_request_version)
729 self._link_comments_to_version(pull_request_version)
726 else:
730 else:
727 try:
731 try:
728 ver = pull_request.versions[-1]
732 ver = pull_request.versions[-1]
729 except IndexError:
733 except IndexError:
730 ver = None
734 ver = None
731
735
732 pull_request.pull_request_version_id = \
736 pull_request.pull_request_version_id = \
733 ver.pull_request_version_id if ver else None
737 ver.pull_request_version_id if ver else None
734 pull_request_version = pull_request
738 pull_request_version = pull_request
735
739
736 try:
740 try:
737 if target_ref_type in self.REF_TYPES:
741 if target_ref_type in self.REF_TYPES:
738 target_commit = target_repo.get_commit(target_ref_name)
742 target_commit = target_repo.get_commit(target_ref_name)
739 else:
743 else:
740 target_commit = target_repo.get_commit(target_ref_id)
744 target_commit = target_repo.get_commit(target_ref_id)
741 except CommitDoesNotExistError:
745 except CommitDoesNotExistError:
742 return UpdateResponse(
746 return UpdateResponse(
743 executed=False,
747 executed=False,
744 reason=UpdateFailureReason.MISSING_TARGET_REF,
748 reason=UpdateFailureReason.MISSING_TARGET_REF,
745 old=pull_request, new=None, changes=None,
749 old=pull_request, new=None, changes=None,
746 source_changed=source_changed, target_changed=target_changed)
750 source_changed=source_changed, target_changed=target_changed)
747
751
748 # re-compute commit ids
752 # re-compute commit ids
749 old_commit_ids = pull_request.revisions
753 old_commit_ids = pull_request.revisions
750 pre_load = ["author", "branch", "date", "message"]
754 pre_load = ["author", "branch", "date", "message"]
751 commit_ranges = target_repo.compare(
755 commit_ranges = target_repo.compare(
752 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
756 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
753 pre_load=pre_load)
757 pre_load=pre_load)
754
758
755 ancestor = source_repo.get_common_ancestor(
759 ancestor = source_repo.get_common_ancestor(
756 source_commit.raw_id, target_commit.raw_id, target_repo)
760 source_commit.raw_id, target_commit.raw_id, target_repo)
757
761
758 pull_request.source_ref = '%s:%s:%s' % (
762 pull_request.source_ref = '%s:%s:%s' % (
759 source_ref_type, source_ref_name, source_commit.raw_id)
763 source_ref_type, source_ref_name, source_commit.raw_id)
760 pull_request.target_ref = '%s:%s:%s' % (
764 pull_request.target_ref = '%s:%s:%s' % (
761 target_ref_type, target_ref_name, ancestor)
765 target_ref_type, target_ref_name, ancestor)
762
766
763 pull_request.revisions = [
767 pull_request.revisions = [
764 commit.raw_id for commit in reversed(commit_ranges)]
768 commit.raw_id for commit in reversed(commit_ranges)]
765 pull_request.updated_on = datetime.datetime.now()
769 pull_request.updated_on = datetime.datetime.now()
766 Session().add(pull_request)
770 Session().add(pull_request)
767 new_commit_ids = pull_request.revisions
771 new_commit_ids = pull_request.revisions
768
772
769 old_diff_data, new_diff_data = self._generate_update_diffs(
773 old_diff_data, new_diff_data = self._generate_update_diffs(
770 pull_request, pull_request_version)
774 pull_request, pull_request_version)
771
775
772 # calculate commit and file changes
776 # calculate commit and file changes
773 changes = self._calculate_commit_id_changes(
777 changes = self._calculate_commit_id_changes(
774 old_commit_ids, new_commit_ids)
778 old_commit_ids, new_commit_ids)
775 file_changes = self._calculate_file_changes(
779 file_changes = self._calculate_file_changes(
776 old_diff_data, new_diff_data)
780 old_diff_data, new_diff_data)
777
781
778 # set comments as outdated if DIFFS changed
782 # set comments as outdated if DIFFS changed
779 CommentsModel().outdate_comments(
783 CommentsModel().outdate_comments(
780 pull_request, old_diff_data=old_diff_data,
784 pull_request, old_diff_data=old_diff_data,
781 new_diff_data=new_diff_data)
785 new_diff_data=new_diff_data)
782
786
783 commit_changes = (changes.added or changes.removed)
787 commit_changes = (changes.added or changes.removed)
784 file_node_changes = (
788 file_node_changes = (
785 file_changes.added or file_changes.modified or file_changes.removed)
789 file_changes.added or file_changes.modified or file_changes.removed)
786 pr_has_changes = commit_changes or file_node_changes
790 pr_has_changes = commit_changes or file_node_changes
787
791
788 # Add an automatic comment to the pull request, in case
792 # Add an automatic comment to the pull request, in case
789 # anything has changed
793 # anything has changed
790 if pr_has_changes:
794 if pr_has_changes:
791 update_comment = CommentsModel().create(
795 update_comment = CommentsModel().create(
792 text=self._render_update_message(changes, file_changes),
796 text=self._render_update_message(changes, file_changes),
793 repo=pull_request.target_repo,
797 repo=pull_request.target_repo,
794 user=pull_request.author,
798 user=pull_request.author,
795 pull_request=pull_request,
799 pull_request=pull_request,
796 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
800 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
797
801
798 # Update status to "Under Review" for added commits
802 # Update status to "Under Review" for added commits
799 for commit_id in changes.added:
803 for commit_id in changes.added:
800 ChangesetStatusModel().set_status(
804 ChangesetStatusModel().set_status(
801 repo=pull_request.source_repo,
805 repo=pull_request.source_repo,
802 status=ChangesetStatus.STATUS_UNDER_REVIEW,
806 status=ChangesetStatus.STATUS_UNDER_REVIEW,
803 comment=update_comment,
807 comment=update_comment,
804 user=pull_request.author,
808 user=pull_request.author,
805 pull_request=pull_request,
809 pull_request=pull_request,
806 revision=commit_id)
810 revision=commit_id)
807
811
808 log.debug(
812 log.debug(
809 'Updated pull request %s, added_ids: %s, common_ids: %s, '
813 'Updated pull request %s, added_ids: %s, common_ids: %s, '
810 'removed_ids: %s', pull_request.pull_request_id,
814 'removed_ids: %s', pull_request.pull_request_id,
811 changes.added, changes.common, changes.removed)
815 changes.added, changes.common, changes.removed)
812 log.debug(
816 log.debug(
813 'Updated pull request with the following file changes: %s',
817 'Updated pull request with the following file changes: %s',
814 file_changes)
818 file_changes)
815
819
816 log.info(
820 log.info(
817 "Updated pull request %s from commit %s to commit %s, "
821 "Updated pull request %s from commit %s to commit %s, "
818 "stored new version %s of this pull request.",
822 "stored new version %s of this pull request.",
819 pull_request.pull_request_id, source_ref_id,
823 pull_request.pull_request_id, source_ref_id,
820 pull_request.source_ref_parts.commit_id,
824 pull_request.source_ref_parts.commit_id,
821 pull_request_version.pull_request_version_id)
825 pull_request_version.pull_request_version_id)
822 Session().commit()
826 Session().commit()
823 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
827 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
824
828
825 return UpdateResponse(
829 return UpdateResponse(
826 executed=True, reason=UpdateFailureReason.NONE,
830 executed=True, reason=UpdateFailureReason.NONE,
827 old=pull_request, new=pull_request_version, changes=changes,
831 old=pull_request, new=pull_request_version, changes=changes,
828 source_changed=source_changed, target_changed=target_changed)
832 source_changed=source_changed, target_changed=target_changed)
829
833
830 def _create_version_from_snapshot(self, pull_request):
834 def _create_version_from_snapshot(self, pull_request):
831 version = PullRequestVersion()
835 version = PullRequestVersion()
832 version.title = pull_request.title
836 version.title = pull_request.title
833 version.description = pull_request.description
837 version.description = pull_request.description
834 version.status = pull_request.status
838 version.status = pull_request.status
835 version.pull_request_state = pull_request.pull_request_state
839 version.pull_request_state = pull_request.pull_request_state
836 version.created_on = datetime.datetime.now()
840 version.created_on = datetime.datetime.now()
837 version.updated_on = pull_request.updated_on
841 version.updated_on = pull_request.updated_on
838 version.user_id = pull_request.user_id
842 version.user_id = pull_request.user_id
839 version.source_repo = pull_request.source_repo
843 version.source_repo = pull_request.source_repo
840 version.source_ref = pull_request.source_ref
844 version.source_ref = pull_request.source_ref
841 version.target_repo = pull_request.target_repo
845 version.target_repo = pull_request.target_repo
842 version.target_ref = pull_request.target_ref
846 version.target_ref = pull_request.target_ref
843
847
844 version._last_merge_source_rev = pull_request._last_merge_source_rev
848 version._last_merge_source_rev = pull_request._last_merge_source_rev
845 version._last_merge_target_rev = pull_request._last_merge_target_rev
849 version._last_merge_target_rev = pull_request._last_merge_target_rev
846 version.last_merge_status = pull_request.last_merge_status
850 version.last_merge_status = pull_request.last_merge_status
847 version.shadow_merge_ref = pull_request.shadow_merge_ref
851 version.shadow_merge_ref = pull_request.shadow_merge_ref
848 version.merge_rev = pull_request.merge_rev
852 version.merge_rev = pull_request.merge_rev
849 version.reviewer_data = pull_request.reviewer_data
853 version.reviewer_data = pull_request.reviewer_data
850
854
851 version.revisions = pull_request.revisions
855 version.revisions = pull_request.revisions
852 version.pull_request = pull_request
856 version.pull_request = pull_request
853 Session().add(version)
857 Session().add(version)
854 Session().flush()
858 Session().flush()
855
859
856 return version
860 return version
857
861
858 def _generate_update_diffs(self, pull_request, pull_request_version):
862 def _generate_update_diffs(self, pull_request, pull_request_version):
859
863
860 diff_context = (
864 diff_context = (
861 self.DIFF_CONTEXT +
865 self.DIFF_CONTEXT +
862 CommentsModel.needed_extra_diff_context())
866 CommentsModel.needed_extra_diff_context())
863 hide_whitespace_changes = False
867 hide_whitespace_changes = False
864 source_repo = pull_request_version.source_repo
868 source_repo = pull_request_version.source_repo
865 source_ref_id = pull_request_version.source_ref_parts.commit_id
869 source_ref_id = pull_request_version.source_ref_parts.commit_id
866 target_ref_id = pull_request_version.target_ref_parts.commit_id
870 target_ref_id = pull_request_version.target_ref_parts.commit_id
867 old_diff = self._get_diff_from_pr_or_version(
871 old_diff = self._get_diff_from_pr_or_version(
868 source_repo, source_ref_id, target_ref_id,
872 source_repo, source_ref_id, target_ref_id,
869 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
873 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
870
874
871 source_repo = pull_request.source_repo
875 source_repo = pull_request.source_repo
872 source_ref_id = pull_request.source_ref_parts.commit_id
876 source_ref_id = pull_request.source_ref_parts.commit_id
873 target_ref_id = pull_request.target_ref_parts.commit_id
877 target_ref_id = pull_request.target_ref_parts.commit_id
874
878
875 new_diff = self._get_diff_from_pr_or_version(
879 new_diff = self._get_diff_from_pr_or_version(
876 source_repo, source_ref_id, target_ref_id,
880 source_repo, source_ref_id, target_ref_id,
877 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
881 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
878
882
879 old_diff_data = diffs.DiffProcessor(old_diff)
883 old_diff_data = diffs.DiffProcessor(old_diff)
880 old_diff_data.prepare()
884 old_diff_data.prepare()
881 new_diff_data = diffs.DiffProcessor(new_diff)
885 new_diff_data = diffs.DiffProcessor(new_diff)
882 new_diff_data.prepare()
886 new_diff_data.prepare()
883
887
884 return old_diff_data, new_diff_data
888 return old_diff_data, new_diff_data
885
889
886 def _link_comments_to_version(self, pull_request_version):
890 def _link_comments_to_version(self, pull_request_version):
887 """
891 """
888 Link all unlinked comments of this pull request to the given version.
892 Link all unlinked comments of this pull request to the given version.
889
893
890 :param pull_request_version: The `PullRequestVersion` to which
894 :param pull_request_version: The `PullRequestVersion` to which
891 the comments shall be linked.
895 the comments shall be linked.
892
896
893 """
897 """
894 pull_request = pull_request_version.pull_request
898 pull_request = pull_request_version.pull_request
895 comments = ChangesetComment.query()\
899 comments = ChangesetComment.query()\
896 .filter(
900 .filter(
897 # TODO: johbo: Should we query for the repo at all here?
901 # TODO: johbo: Should we query for the repo at all here?
898 # Pending decision on how comments of PRs are to be related
902 # Pending decision on how comments of PRs are to be related
899 # to either the source repo, the target repo or no repo at all.
903 # to either the source repo, the target repo or no repo at all.
900 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
904 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
901 ChangesetComment.pull_request == pull_request,
905 ChangesetComment.pull_request == pull_request,
902 ChangesetComment.pull_request_version == None)\
906 ChangesetComment.pull_request_version == None)\
903 .order_by(ChangesetComment.comment_id.asc())
907 .order_by(ChangesetComment.comment_id.asc())
904
908
905 # TODO: johbo: Find out why this breaks if it is done in a bulk
909 # TODO: johbo: Find out why this breaks if it is done in a bulk
906 # operation.
910 # operation.
907 for comment in comments:
911 for comment in comments:
908 comment.pull_request_version_id = (
912 comment.pull_request_version_id = (
909 pull_request_version.pull_request_version_id)
913 pull_request_version.pull_request_version_id)
910 Session().add(comment)
914 Session().add(comment)
911
915
912 def _calculate_commit_id_changes(self, old_ids, new_ids):
916 def _calculate_commit_id_changes(self, old_ids, new_ids):
913 added = [x for x in new_ids if x not in old_ids]
917 added = [x for x in new_ids if x not in old_ids]
914 common = [x for x in new_ids if x in old_ids]
918 common = [x for x in new_ids if x in old_ids]
915 removed = [x for x in old_ids if x not in new_ids]
919 removed = [x for x in old_ids if x not in new_ids]
916 total = new_ids
920 total = new_ids
917 return ChangeTuple(added, common, removed, total)
921 return ChangeTuple(added, common, removed, total)
918
922
919 def _calculate_file_changes(self, old_diff_data, new_diff_data):
923 def _calculate_file_changes(self, old_diff_data, new_diff_data):
920
924
921 old_files = OrderedDict()
925 old_files = OrderedDict()
922 for diff_data in old_diff_data.parsed_diff:
926 for diff_data in old_diff_data.parsed_diff:
923 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
927 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
924
928
925 added_files = []
929 added_files = []
926 modified_files = []
930 modified_files = []
927 removed_files = []
931 removed_files = []
928 for diff_data in new_diff_data.parsed_diff:
932 for diff_data in new_diff_data.parsed_diff:
929 new_filename = diff_data['filename']
933 new_filename = diff_data['filename']
930 new_hash = md5_safe(diff_data['raw_diff'])
934 new_hash = md5_safe(diff_data['raw_diff'])
931
935
932 old_hash = old_files.get(new_filename)
936 old_hash = old_files.get(new_filename)
933 if not old_hash:
937 if not old_hash:
934 # file is not present in old diff, means it's added
938 # file is not present in old diff, means it's added
935 added_files.append(new_filename)
939 added_files.append(new_filename)
936 else:
940 else:
937 if new_hash != old_hash:
941 if new_hash != old_hash:
938 modified_files.append(new_filename)
942 modified_files.append(new_filename)
939 # now remove a file from old, since we have seen it already
943 # now remove a file from old, since we have seen it already
940 del old_files[new_filename]
944 del old_files[new_filename]
941
945
942 # removed files is when there are present in old, but not in NEW,
946 # removed files is when there are present in old, but not in NEW,
943 # since we remove old files that are present in new diff, left-overs
947 # since we remove old files that are present in new diff, left-overs
944 # if any should be the removed files
948 # if any should be the removed files
945 removed_files.extend(old_files.keys())
949 removed_files.extend(old_files.keys())
946
950
947 return FileChangeTuple(added_files, modified_files, removed_files)
951 return FileChangeTuple(added_files, modified_files, removed_files)
948
952
949 def _render_update_message(self, changes, file_changes):
953 def _render_update_message(self, changes, file_changes):
950 """
954 """
951 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
955 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
952 so it's always looking the same disregarding on which default
956 so it's always looking the same disregarding on which default
953 renderer system is using.
957 renderer system is using.
954
958
955 :param changes: changes named tuple
959 :param changes: changes named tuple
956 :param file_changes: file changes named tuple
960 :param file_changes: file changes named tuple
957
961
958 """
962 """
959 new_status = ChangesetStatus.get_status_lbl(
963 new_status = ChangesetStatus.get_status_lbl(
960 ChangesetStatus.STATUS_UNDER_REVIEW)
964 ChangesetStatus.STATUS_UNDER_REVIEW)
961
965
962 changed_files = (
966 changed_files = (
963 file_changes.added + file_changes.modified + file_changes.removed)
967 file_changes.added + file_changes.modified + file_changes.removed)
964
968
965 params = {
969 params = {
966 'under_review_label': new_status,
970 'under_review_label': new_status,
967 'added_commits': changes.added,
971 'added_commits': changes.added,
968 'removed_commits': changes.removed,
972 'removed_commits': changes.removed,
969 'changed_files': changed_files,
973 'changed_files': changed_files,
970 'added_files': file_changes.added,
974 'added_files': file_changes.added,
971 'modified_files': file_changes.modified,
975 'modified_files': file_changes.modified,
972 'removed_files': file_changes.removed,
976 'removed_files': file_changes.removed,
973 }
977 }
974 renderer = RstTemplateRenderer()
978 renderer = RstTemplateRenderer()
975 return renderer.render('pull_request_update.mako', **params)
979 return renderer.render('pull_request_update.mako', **params)
976
980
977 def edit(self, pull_request, title, description, description_renderer, user):
981 def edit(self, pull_request, title, description, description_renderer, user):
978 pull_request = self.__get_pull_request(pull_request)
982 pull_request = self.__get_pull_request(pull_request)
979 old_data = pull_request.get_api_data(with_merge_state=False)
983 old_data = pull_request.get_api_data(with_merge_state=False)
980 if pull_request.is_closed():
984 if pull_request.is_closed():
981 raise ValueError('This pull request is closed')
985 raise ValueError('This pull request is closed')
982 if title:
986 if title:
983 pull_request.title = title
987 pull_request.title = title
984 pull_request.description = description
988 pull_request.description = description
985 pull_request.updated_on = datetime.datetime.now()
989 pull_request.updated_on = datetime.datetime.now()
986 pull_request.description_renderer = description_renderer
990 pull_request.description_renderer = description_renderer
987 Session().add(pull_request)
991 Session().add(pull_request)
988 self._log_audit_action(
992 self._log_audit_action(
989 'repo.pull_request.edit', {'old_data': old_data},
993 'repo.pull_request.edit', {'old_data': old_data},
990 user, pull_request)
994 user, pull_request)
991
995
992 def update_reviewers(self, pull_request, reviewer_data, user):
996 def update_reviewers(self, pull_request, reviewer_data, user):
993 """
997 """
994 Update the reviewers in the pull request
998 Update the reviewers in the pull request
995
999
996 :param pull_request: the pr to update
1000 :param pull_request: the pr to update
997 :param reviewer_data: list of tuples
1001 :param reviewer_data: list of tuples
998 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
1002 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
999 """
1003 """
1000 pull_request = self.__get_pull_request(pull_request)
1004 pull_request = self.__get_pull_request(pull_request)
1001 if pull_request.is_closed():
1005 if pull_request.is_closed():
1002 raise ValueError('This pull request is closed')
1006 raise ValueError('This pull request is closed')
1003
1007
1004 reviewers = {}
1008 reviewers = {}
1005 for user_id, reasons, mandatory, rules in reviewer_data:
1009 for user_id, reasons, mandatory, rules in reviewer_data:
1006 if isinstance(user_id, (int, compat.string_types)):
1010 if isinstance(user_id, (int, compat.string_types)):
1007 user_id = self._get_user(user_id).user_id
1011 user_id = self._get_user(user_id).user_id
1008 reviewers[user_id] = {
1012 reviewers[user_id] = {
1009 'reasons': reasons, 'mandatory': mandatory}
1013 'reasons': reasons, 'mandatory': mandatory}
1010
1014
1011 reviewers_ids = set(reviewers.keys())
1015 reviewers_ids = set(reviewers.keys())
1012 current_reviewers = PullRequestReviewers.query()\
1016 current_reviewers = PullRequestReviewers.query()\
1013 .filter(PullRequestReviewers.pull_request ==
1017 .filter(PullRequestReviewers.pull_request ==
1014 pull_request).all()
1018 pull_request).all()
1015 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1019 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1016
1020
1017 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1021 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1018 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1022 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1019
1023
1020 log.debug("Adding %s reviewers", ids_to_add)
1024 log.debug("Adding %s reviewers", ids_to_add)
1021 log.debug("Removing %s reviewers", ids_to_remove)
1025 log.debug("Removing %s reviewers", ids_to_remove)
1022 changed = False
1026 changed = False
1023 added_audit_reviewers = []
1027 added_audit_reviewers = []
1024 removed_audit_reviewers = []
1028 removed_audit_reviewers = []
1025
1029
1026 for uid in ids_to_add:
1030 for uid in ids_to_add:
1027 changed = True
1031 changed = True
1028 _usr = self._get_user(uid)
1032 _usr = self._get_user(uid)
1029 reviewer = PullRequestReviewers()
1033 reviewer = PullRequestReviewers()
1030 reviewer.user = _usr
1034 reviewer.user = _usr
1031 reviewer.pull_request = pull_request
1035 reviewer.pull_request = pull_request
1032 reviewer.reasons = reviewers[uid]['reasons']
1036 reviewer.reasons = reviewers[uid]['reasons']
1033 # NOTE(marcink): mandatory shouldn't be changed now
1037 # NOTE(marcink): mandatory shouldn't be changed now
1034 # reviewer.mandatory = reviewers[uid]['reasons']
1038 # reviewer.mandatory = reviewers[uid]['reasons']
1035 Session().add(reviewer)
1039 Session().add(reviewer)
1036 added_audit_reviewers.append(reviewer.get_dict())
1040 added_audit_reviewers.append(reviewer.get_dict())
1037
1041
1038 for uid in ids_to_remove:
1042 for uid in ids_to_remove:
1039 changed = True
1043 changed = True
1040 # NOTE(marcink): we fetch "ALL" reviewers using .all(). This is an edge case
1044 # NOTE(marcink): we fetch "ALL" reviewers using .all(). This is an edge case
1041 # that prevents and fixes cases that we added the same reviewer twice.
1045 # that prevents and fixes cases that we added the same reviewer twice.
1042 # this CAN happen due to the lack of DB checks
1046 # this CAN happen due to the lack of DB checks
1043 reviewers = PullRequestReviewers.query()\
1047 reviewers = PullRequestReviewers.query()\
1044 .filter(PullRequestReviewers.user_id == uid,
1048 .filter(PullRequestReviewers.user_id == uid,
1045 PullRequestReviewers.pull_request == pull_request)\
1049 PullRequestReviewers.pull_request == pull_request)\
1046 .all()
1050 .all()
1047
1051
1048 for obj in reviewers:
1052 for obj in reviewers:
1049 added_audit_reviewers.append(obj.get_dict())
1053 added_audit_reviewers.append(obj.get_dict())
1050 Session().delete(obj)
1054 Session().delete(obj)
1051
1055
1052 if changed:
1056 if changed:
1053 Session().expire_all()
1057 Session().expire_all()
1054 pull_request.updated_on = datetime.datetime.now()
1058 pull_request.updated_on = datetime.datetime.now()
1055 Session().add(pull_request)
1059 Session().add(pull_request)
1056
1060
1057 # finally store audit logs
1061 # finally store audit logs
1058 for user_data in added_audit_reviewers:
1062 for user_data in added_audit_reviewers:
1059 self._log_audit_action(
1063 self._log_audit_action(
1060 'repo.pull_request.reviewer.add', {'data': user_data},
1064 'repo.pull_request.reviewer.add', {'data': user_data},
1061 user, pull_request)
1065 user, pull_request)
1062 for user_data in removed_audit_reviewers:
1066 for user_data in removed_audit_reviewers:
1063 self._log_audit_action(
1067 self._log_audit_action(
1064 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1068 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1065 user, pull_request)
1069 user, pull_request)
1066
1070
1067 self.notify_reviewers(pull_request, ids_to_add)
1071 self.notify_reviewers(pull_request, ids_to_add)
1068 return ids_to_add, ids_to_remove
1072 return ids_to_add, ids_to_remove
1069
1073
1070 def get_url(self, pull_request, request=None, permalink=False):
1074 def get_url(self, pull_request, request=None, permalink=False):
1071 if not request:
1075 if not request:
1072 request = get_current_request()
1076 request = get_current_request()
1073
1077
1074 if permalink:
1078 if permalink:
1075 return request.route_url(
1079 return request.route_url(
1076 'pull_requests_global',
1080 'pull_requests_global',
1077 pull_request_id=pull_request.pull_request_id,)
1081 pull_request_id=pull_request.pull_request_id,)
1078 else:
1082 else:
1079 return request.route_url('pullrequest_show',
1083 return request.route_url('pullrequest_show',
1080 repo_name=safe_str(pull_request.target_repo.repo_name),
1084 repo_name=safe_str(pull_request.target_repo.repo_name),
1081 pull_request_id=pull_request.pull_request_id,)
1085 pull_request_id=pull_request.pull_request_id,)
1082
1086
1083 def get_shadow_clone_url(self, pull_request, request=None):
1087 def get_shadow_clone_url(self, pull_request, request=None):
1084 """
1088 """
1085 Returns qualified url pointing to the shadow repository. If this pull
1089 Returns qualified url pointing to the shadow repository. If this pull
1086 request is closed there is no shadow repository and ``None`` will be
1090 request is closed there is no shadow repository and ``None`` will be
1087 returned.
1091 returned.
1088 """
1092 """
1089 if pull_request.is_closed():
1093 if pull_request.is_closed():
1090 return None
1094 return None
1091 else:
1095 else:
1092 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1096 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1093 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1097 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1094
1098
1095 def notify_reviewers(self, pull_request, reviewers_ids):
1099 def notify_reviewers(self, pull_request, reviewers_ids):
1096 # notification to reviewers
1100 # notification to reviewers
1097 if not reviewers_ids:
1101 if not reviewers_ids:
1098 return
1102 return
1099
1103
1100 pull_request_obj = pull_request
1104 pull_request_obj = pull_request
1101 # get the current participants of this pull request
1105 # get the current participants of this pull request
1102 recipients = reviewers_ids
1106 recipients = reviewers_ids
1103 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1107 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1104
1108
1105 pr_source_repo = pull_request_obj.source_repo
1109 pr_source_repo = pull_request_obj.source_repo
1106 pr_target_repo = pull_request_obj.target_repo
1110 pr_target_repo = pull_request_obj.target_repo
1107
1111
1108 pr_url = h.route_url('pullrequest_show',
1112 pr_url = h.route_url('pullrequest_show',
1109 repo_name=pr_target_repo.repo_name,
1113 repo_name=pr_target_repo.repo_name,
1110 pull_request_id=pull_request_obj.pull_request_id,)
1114 pull_request_id=pull_request_obj.pull_request_id,)
1111
1115
1112 # set some variables for email notification
1116 # set some variables for email notification
1113 pr_target_repo_url = h.route_url(
1117 pr_target_repo_url = h.route_url(
1114 'repo_summary', repo_name=pr_target_repo.repo_name)
1118 'repo_summary', repo_name=pr_target_repo.repo_name)
1115
1119
1116 pr_source_repo_url = h.route_url(
1120 pr_source_repo_url = h.route_url(
1117 'repo_summary', repo_name=pr_source_repo.repo_name)
1121 'repo_summary', repo_name=pr_source_repo.repo_name)
1118
1122
1119 # pull request specifics
1123 # pull request specifics
1120 pull_request_commits = [
1124 pull_request_commits = [
1121 (x.raw_id, x.message)
1125 (x.raw_id, x.message)
1122 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1126 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1123
1127
1124 kwargs = {
1128 kwargs = {
1125 'user': pull_request.author,
1129 'user': pull_request.author,
1126 'pull_request': pull_request_obj,
1130 'pull_request': pull_request_obj,
1127 'pull_request_commits': pull_request_commits,
1131 'pull_request_commits': pull_request_commits,
1128
1132
1129 'pull_request_target_repo': pr_target_repo,
1133 'pull_request_target_repo': pr_target_repo,
1130 'pull_request_target_repo_url': pr_target_repo_url,
1134 'pull_request_target_repo_url': pr_target_repo_url,
1131
1135
1132 'pull_request_source_repo': pr_source_repo,
1136 'pull_request_source_repo': pr_source_repo,
1133 'pull_request_source_repo_url': pr_source_repo_url,
1137 'pull_request_source_repo_url': pr_source_repo_url,
1134
1138
1135 'pull_request_url': pr_url,
1139 'pull_request_url': pr_url,
1136 }
1140 }
1137
1141
1138 # pre-generate the subject for notification itself
1142 # pre-generate the subject for notification itself
1139 (subject,
1143 (subject,
1140 _h, _e, # we don't care about those
1144 _h, _e, # we don't care about those
1141 body_plaintext) = EmailNotificationModel().render_email(
1145 body_plaintext) = EmailNotificationModel().render_email(
1142 notification_type, **kwargs)
1146 notification_type, **kwargs)
1143
1147
1144 # create notification objects, and emails
1148 # create notification objects, and emails
1145 NotificationModel().create(
1149 NotificationModel().create(
1146 created_by=pull_request.author,
1150 created_by=pull_request.author,
1147 notification_subject=subject,
1151 notification_subject=subject,
1148 notification_body=body_plaintext,
1152 notification_body=body_plaintext,
1149 notification_type=notification_type,
1153 notification_type=notification_type,
1150 recipients=recipients,
1154 recipients=recipients,
1151 email_kwargs=kwargs,
1155 email_kwargs=kwargs,
1152 )
1156 )
1153
1157
1154 def delete(self, pull_request, user):
1158 def delete(self, pull_request, user):
1155 pull_request = self.__get_pull_request(pull_request)
1159 pull_request = self.__get_pull_request(pull_request)
1156 old_data = pull_request.get_api_data(with_merge_state=False)
1160 old_data = pull_request.get_api_data(with_merge_state=False)
1157 self._cleanup_merge_workspace(pull_request)
1161 self._cleanup_merge_workspace(pull_request)
1158 self._log_audit_action(
1162 self._log_audit_action(
1159 'repo.pull_request.delete', {'old_data': old_data},
1163 'repo.pull_request.delete', {'old_data': old_data},
1160 user, pull_request)
1164 user, pull_request)
1161 Session().delete(pull_request)
1165 Session().delete(pull_request)
1162
1166
1163 def close_pull_request(self, pull_request, user):
1167 def close_pull_request(self, pull_request, user):
1164 pull_request = self.__get_pull_request(pull_request)
1168 pull_request = self.__get_pull_request(pull_request)
1165 self._cleanup_merge_workspace(pull_request)
1169 self._cleanup_merge_workspace(pull_request)
1166 pull_request.status = PullRequest.STATUS_CLOSED
1170 pull_request.status = PullRequest.STATUS_CLOSED
1167 pull_request.updated_on = datetime.datetime.now()
1171 pull_request.updated_on = datetime.datetime.now()
1168 Session().add(pull_request)
1172 Session().add(pull_request)
1169 self.trigger_pull_request_hook(
1173 self.trigger_pull_request_hook(
1170 pull_request, pull_request.author, 'close')
1174 pull_request, pull_request.author, 'close')
1171
1175
1172 pr_data = pull_request.get_api_data(with_merge_state=False)
1176 pr_data = pull_request.get_api_data(with_merge_state=False)
1173 self._log_audit_action(
1177 self._log_audit_action(
1174 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1178 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1175
1179
1176 def close_pull_request_with_comment(
1180 def close_pull_request_with_comment(
1177 self, pull_request, user, repo, message=None, auth_user=None):
1181 self, pull_request, user, repo, message=None, auth_user=None):
1178
1182
1179 pull_request_review_status = pull_request.calculated_review_status()
1183 pull_request_review_status = pull_request.calculated_review_status()
1180
1184
1181 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1185 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1182 # approved only if we have voting consent
1186 # approved only if we have voting consent
1183 status = ChangesetStatus.STATUS_APPROVED
1187 status = ChangesetStatus.STATUS_APPROVED
1184 else:
1188 else:
1185 status = ChangesetStatus.STATUS_REJECTED
1189 status = ChangesetStatus.STATUS_REJECTED
1186 status_lbl = ChangesetStatus.get_status_lbl(status)
1190 status_lbl = ChangesetStatus.get_status_lbl(status)
1187
1191
1188 default_message = (
1192 default_message = (
1189 'Closing with status change {transition_icon} {status}.'
1193 'Closing with status change {transition_icon} {status}.'
1190 ).format(transition_icon='>', status=status_lbl)
1194 ).format(transition_icon='>', status=status_lbl)
1191 text = message or default_message
1195 text = message or default_message
1192
1196
1193 # create a comment, and link it to new status
1197 # create a comment, and link it to new status
1194 comment = CommentsModel().create(
1198 comment = CommentsModel().create(
1195 text=text,
1199 text=text,
1196 repo=repo.repo_id,
1200 repo=repo.repo_id,
1197 user=user.user_id,
1201 user=user.user_id,
1198 pull_request=pull_request.pull_request_id,
1202 pull_request=pull_request.pull_request_id,
1199 status_change=status_lbl,
1203 status_change=status_lbl,
1200 status_change_type=status,
1204 status_change_type=status,
1201 closing_pr=True,
1205 closing_pr=True,
1202 auth_user=auth_user,
1206 auth_user=auth_user,
1203 )
1207 )
1204
1208
1205 # calculate old status before we change it
1209 # calculate old status before we change it
1206 old_calculated_status = pull_request.calculated_review_status()
1210 old_calculated_status = pull_request.calculated_review_status()
1207 ChangesetStatusModel().set_status(
1211 ChangesetStatusModel().set_status(
1208 repo.repo_id,
1212 repo.repo_id,
1209 status,
1213 status,
1210 user.user_id,
1214 user.user_id,
1211 comment=comment,
1215 comment=comment,
1212 pull_request=pull_request.pull_request_id
1216 pull_request=pull_request.pull_request_id
1213 )
1217 )
1214
1218
1215 Session().flush()
1219 Session().flush()
1216 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1220 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1217 # we now calculate the status of pull request again, and based on that
1221 # we now calculate the status of pull request again, and based on that
1218 # calculation trigger status change. This might happen in cases
1222 # calculation trigger status change. This might happen in cases
1219 # that non-reviewer admin closes a pr, which means his vote doesn't
1223 # that non-reviewer admin closes a pr, which means his vote doesn't
1220 # change the status, while if he's a reviewer this might change it.
1224 # change the status, while if he's a reviewer this might change it.
1221 calculated_status = pull_request.calculated_review_status()
1225 calculated_status = pull_request.calculated_review_status()
1222 if old_calculated_status != calculated_status:
1226 if old_calculated_status != calculated_status:
1223 self.trigger_pull_request_hook(
1227 self.trigger_pull_request_hook(
1224 pull_request, user, 'review_status_change',
1228 pull_request, user, 'review_status_change',
1225 data={'status': calculated_status})
1229 data={'status': calculated_status})
1226
1230
1227 # finally close the PR
1231 # finally close the PR
1228 PullRequestModel().close_pull_request(
1232 PullRequestModel().close_pull_request(
1229 pull_request.pull_request_id, user)
1233 pull_request.pull_request_id, user)
1230
1234
1231 return comment, status
1235 return comment, status
1232
1236
1233 def merge_status(self, pull_request, translator=None,
1237 def merge_status(self, pull_request, translator=None,
1234 force_shadow_repo_refresh=False):
1238 force_shadow_repo_refresh=False):
1235 _ = translator or get_current_request().translate
1239 _ = translator or get_current_request().translate
1236
1240
1237 if not self._is_merge_enabled(pull_request):
1241 if not self._is_merge_enabled(pull_request):
1238 return False, _('Server-side pull request merging is disabled.')
1242 return False, _('Server-side pull request merging is disabled.')
1239 if pull_request.is_closed():
1243 if pull_request.is_closed():
1240 return False, _('This pull request is closed.')
1244 return False, _('This pull request is closed.')
1241 merge_possible, msg = self._check_repo_requirements(
1245 merge_possible, msg = self._check_repo_requirements(
1242 target=pull_request.target_repo, source=pull_request.source_repo,
1246 target=pull_request.target_repo, source=pull_request.source_repo,
1243 translator=_)
1247 translator=_)
1244 if not merge_possible:
1248 if not merge_possible:
1245 return merge_possible, msg
1249 return merge_possible, msg
1246
1250
1247 try:
1251 try:
1248 resp = self._try_merge(
1252 resp = self._try_merge(
1249 pull_request,
1253 pull_request,
1250 force_shadow_repo_refresh=force_shadow_repo_refresh)
1254 force_shadow_repo_refresh=force_shadow_repo_refresh)
1251 log.debug("Merge response: %s", resp)
1255 log.debug("Merge response: %s", resp)
1252 status = resp.possible, resp.merge_status_message
1256 status = resp.possible, resp.merge_status_message
1253 except NotImplementedError:
1257 except NotImplementedError:
1254 status = False, _('Pull request merging is not supported.')
1258 status = False, _('Pull request merging is not supported.')
1255
1259
1256 return status
1260 return status
1257
1261
1258 def _check_repo_requirements(self, target, source, translator):
1262 def _check_repo_requirements(self, target, source, translator):
1259 """
1263 """
1260 Check if `target` and `source` have compatible requirements.
1264 Check if `target` and `source` have compatible requirements.
1261
1265
1262 Currently this is just checking for largefiles.
1266 Currently this is just checking for largefiles.
1263 """
1267 """
1264 _ = translator
1268 _ = translator
1265 target_has_largefiles = self._has_largefiles(target)
1269 target_has_largefiles = self._has_largefiles(target)
1266 source_has_largefiles = self._has_largefiles(source)
1270 source_has_largefiles = self._has_largefiles(source)
1267 merge_possible = True
1271 merge_possible = True
1268 message = u''
1272 message = u''
1269
1273
1270 if target_has_largefiles != source_has_largefiles:
1274 if target_has_largefiles != source_has_largefiles:
1271 merge_possible = False
1275 merge_possible = False
1272 if source_has_largefiles:
1276 if source_has_largefiles:
1273 message = _(
1277 message = _(
1274 'Target repository large files support is disabled.')
1278 'Target repository large files support is disabled.')
1275 else:
1279 else:
1276 message = _(
1280 message = _(
1277 'Source repository large files support is disabled.')
1281 'Source repository large files support is disabled.')
1278
1282
1279 return merge_possible, message
1283 return merge_possible, message
1280
1284
1281 def _has_largefiles(self, repo):
1285 def _has_largefiles(self, repo):
1282 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1286 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1283 'extensions', 'largefiles')
1287 'extensions', 'largefiles')
1284 return largefiles_ui and largefiles_ui[0].active
1288 return largefiles_ui and largefiles_ui[0].active
1285
1289
1286 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1290 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1287 """
1291 """
1288 Try to merge the pull request and return the merge status.
1292 Try to merge the pull request and return the merge status.
1289 """
1293 """
1290 log.debug(
1294 log.debug(
1291 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1295 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1292 pull_request.pull_request_id, force_shadow_repo_refresh)
1296 pull_request.pull_request_id, force_shadow_repo_refresh)
1293 target_vcs = pull_request.target_repo.scm_instance()
1297 target_vcs = pull_request.target_repo.scm_instance()
1294 # Refresh the target reference.
1298 # Refresh the target reference.
1295 try:
1299 try:
1296 target_ref = self._refresh_reference(
1300 target_ref = self._refresh_reference(
1297 pull_request.target_ref_parts, target_vcs)
1301 pull_request.target_ref_parts, target_vcs)
1298 except CommitDoesNotExistError:
1302 except CommitDoesNotExistError:
1299 merge_state = MergeResponse(
1303 merge_state = MergeResponse(
1300 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1304 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1301 metadata={'target_ref': pull_request.target_ref_parts})
1305 metadata={'target_ref': pull_request.target_ref_parts})
1302 return merge_state
1306 return merge_state
1303
1307
1304 target_locked = pull_request.target_repo.locked
1308 target_locked = pull_request.target_repo.locked
1305 if target_locked and target_locked[0]:
1309 if target_locked and target_locked[0]:
1306 locked_by = 'user:{}'.format(target_locked[0])
1310 locked_by = 'user:{}'.format(target_locked[0])
1307 log.debug("The target repository is locked by %s.", locked_by)
1311 log.debug("The target repository is locked by %s.", locked_by)
1308 merge_state = MergeResponse(
1312 merge_state = MergeResponse(
1309 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1313 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1310 metadata={'locked_by': locked_by})
1314 metadata={'locked_by': locked_by})
1311 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1315 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1312 pull_request, target_ref):
1316 pull_request, target_ref):
1313 log.debug("Refreshing the merge status of the repository.")
1317 log.debug("Refreshing the merge status of the repository.")
1314 merge_state = self._refresh_merge_state(
1318 merge_state = self._refresh_merge_state(
1315 pull_request, target_vcs, target_ref)
1319 pull_request, target_vcs, target_ref)
1316 else:
1320 else:
1317 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1321 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1318 metadata = {
1322 metadata = {
1319 'target_ref': pull_request.target_ref_parts,
1323 'target_ref': pull_request.target_ref_parts,
1320 'source_ref': pull_request.source_ref_parts,
1324 'source_ref': pull_request.source_ref_parts,
1321 }
1325 }
1322 if not possible and target_ref.type == 'branch':
1326 if not possible and target_ref.type == 'branch':
1323 # NOTE(marcink): case for mercurial multiple heads on branch
1327 # NOTE(marcink): case for mercurial multiple heads on branch
1324 heads = target_vcs._heads(target_ref.name)
1328 heads = target_vcs._heads(target_ref.name)
1325 if len(heads) != 1:
1329 if len(heads) != 1:
1326 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1330 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1327 metadata.update({
1331 metadata.update({
1328 'heads': heads
1332 'heads': heads
1329 })
1333 })
1330 merge_state = MergeResponse(
1334 merge_state = MergeResponse(
1331 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1335 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1332
1336
1333 return merge_state
1337 return merge_state
1334
1338
1335 def _refresh_reference(self, reference, vcs_repository):
1339 def _refresh_reference(self, reference, vcs_repository):
1336 if reference.type in self.UPDATABLE_REF_TYPES:
1340 if reference.type in self.UPDATABLE_REF_TYPES:
1337 name_or_id = reference.name
1341 name_or_id = reference.name
1338 else:
1342 else:
1339 name_or_id = reference.commit_id
1343 name_or_id = reference.commit_id
1344
1345 vcs_repository.count() # cache rebuild
1340 refreshed_commit = vcs_repository.get_commit(name_or_id)
1346 refreshed_commit = vcs_repository.get_commit(name_or_id)
1341 refreshed_reference = Reference(
1347 refreshed_reference = Reference(
1342 reference.type, reference.name, refreshed_commit.raw_id)
1348 reference.type, reference.name, refreshed_commit.raw_id)
1343 return refreshed_reference
1349 return refreshed_reference
1344
1350
1345 def _needs_merge_state_refresh(self, pull_request, target_reference):
1351 def _needs_merge_state_refresh(self, pull_request, target_reference):
1346 return not(
1352 return not(
1347 pull_request.revisions and
1353 pull_request.revisions and
1348 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1354 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1349 target_reference.commit_id == pull_request._last_merge_target_rev)
1355 target_reference.commit_id == pull_request._last_merge_target_rev)
1350
1356
1351 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1357 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1352 workspace_id = self._workspace_id(pull_request)
1358 workspace_id = self._workspace_id(pull_request)
1353 source_vcs = pull_request.source_repo.scm_instance()
1359 source_vcs = pull_request.source_repo.scm_instance()
1354 repo_id = pull_request.target_repo.repo_id
1360 repo_id = pull_request.target_repo.repo_id
1355 use_rebase = self._use_rebase_for_merging(pull_request)
1361 use_rebase = self._use_rebase_for_merging(pull_request)
1356 close_branch = self._close_branch_before_merging(pull_request)
1362 close_branch = self._close_branch_before_merging(pull_request)
1357 merge_state = target_vcs.merge(
1363 merge_state = target_vcs.merge(
1358 repo_id, workspace_id,
1364 repo_id, workspace_id,
1359 target_reference, source_vcs, pull_request.source_ref_parts,
1365 target_reference, source_vcs, pull_request.source_ref_parts,
1360 dry_run=True, use_rebase=use_rebase,
1366 dry_run=True, use_rebase=use_rebase,
1361 close_branch=close_branch)
1367 close_branch=close_branch)
1362
1368
1363 # Do not store the response if there was an unknown error.
1369 # Do not store the response if there was an unknown error.
1364 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1370 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1365 pull_request._last_merge_source_rev = \
1371 pull_request._last_merge_source_rev = \
1366 pull_request.source_ref_parts.commit_id
1372 pull_request.source_ref_parts.commit_id
1367 pull_request._last_merge_target_rev = target_reference.commit_id
1373 pull_request._last_merge_target_rev = target_reference.commit_id
1368 pull_request.last_merge_status = merge_state.failure_reason
1374 pull_request.last_merge_status = merge_state.failure_reason
1369 pull_request.shadow_merge_ref = merge_state.merge_ref
1375 pull_request.shadow_merge_ref = merge_state.merge_ref
1370 Session().add(pull_request)
1376 Session().add(pull_request)
1371 Session().commit()
1377 Session().commit()
1372
1378
1373 return merge_state
1379 return merge_state
1374
1380
1375 def _workspace_id(self, pull_request):
1381 def _workspace_id(self, pull_request):
1376 workspace_id = 'pr-%s' % pull_request.pull_request_id
1382 workspace_id = 'pr-%s' % pull_request.pull_request_id
1377 return workspace_id
1383 return workspace_id
1378
1384
1379 def generate_repo_data(self, repo, commit_id=None, branch=None,
1385 def generate_repo_data(self, repo, commit_id=None, branch=None,
1380 bookmark=None, translator=None):
1386 bookmark=None, translator=None):
1381 from rhodecode.model.repo import RepoModel
1387 from rhodecode.model.repo import RepoModel
1382
1388
1383 all_refs, selected_ref = \
1389 all_refs, selected_ref = \
1384 self._get_repo_pullrequest_sources(
1390 self._get_repo_pullrequest_sources(
1385 repo.scm_instance(), commit_id=commit_id,
1391 repo.scm_instance(), commit_id=commit_id,
1386 branch=branch, bookmark=bookmark, translator=translator)
1392 branch=branch, bookmark=bookmark, translator=translator)
1387
1393
1388 refs_select2 = []
1394 refs_select2 = []
1389 for element in all_refs:
1395 for element in all_refs:
1390 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1396 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1391 refs_select2.append({'text': element[1], 'children': children})
1397 refs_select2.append({'text': element[1], 'children': children})
1392
1398
1393 return {
1399 return {
1394 'user': {
1400 'user': {
1395 'user_id': repo.user.user_id,
1401 'user_id': repo.user.user_id,
1396 'username': repo.user.username,
1402 'username': repo.user.username,
1397 'firstname': repo.user.first_name,
1403 'firstname': repo.user.first_name,
1398 'lastname': repo.user.last_name,
1404 'lastname': repo.user.last_name,
1399 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1405 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1400 },
1406 },
1401 'name': repo.repo_name,
1407 'name': repo.repo_name,
1402 'link': RepoModel().get_url(repo),
1408 'link': RepoModel().get_url(repo),
1403 'description': h.chop_at_smart(repo.description_safe, '\n'),
1409 'description': h.chop_at_smart(repo.description_safe, '\n'),
1404 'refs': {
1410 'refs': {
1405 'all_refs': all_refs,
1411 'all_refs': all_refs,
1406 'selected_ref': selected_ref,
1412 'selected_ref': selected_ref,
1407 'select2_refs': refs_select2
1413 'select2_refs': refs_select2
1408 }
1414 }
1409 }
1415 }
1410
1416
1411 def generate_pullrequest_title(self, source, source_ref, target):
1417 def generate_pullrequest_title(self, source, source_ref, target):
1412 return u'{source}#{at_ref} to {target}'.format(
1418 return u'{source}#{at_ref} to {target}'.format(
1413 source=source,
1419 source=source,
1414 at_ref=source_ref,
1420 at_ref=source_ref,
1415 target=target,
1421 target=target,
1416 )
1422 )
1417
1423
1418 def _cleanup_merge_workspace(self, pull_request):
1424 def _cleanup_merge_workspace(self, pull_request):
1419 # Merging related cleanup
1425 # Merging related cleanup
1420 repo_id = pull_request.target_repo.repo_id
1426 repo_id = pull_request.target_repo.repo_id
1421 target_scm = pull_request.target_repo.scm_instance()
1427 target_scm = pull_request.target_repo.scm_instance()
1422 workspace_id = self._workspace_id(pull_request)
1428 workspace_id = self._workspace_id(pull_request)
1423
1429
1424 try:
1430 try:
1425 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1431 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1426 except NotImplementedError:
1432 except NotImplementedError:
1427 pass
1433 pass
1428
1434
1429 def _get_repo_pullrequest_sources(
1435 def _get_repo_pullrequest_sources(
1430 self, repo, commit_id=None, branch=None, bookmark=None,
1436 self, repo, commit_id=None, branch=None, bookmark=None,
1431 translator=None):
1437 translator=None):
1432 """
1438 """
1433 Return a structure with repo's interesting commits, suitable for
1439 Return a structure with repo's interesting commits, suitable for
1434 the selectors in pullrequest controller
1440 the selectors in pullrequest controller
1435
1441
1436 :param commit_id: a commit that must be in the list somehow
1442 :param commit_id: a commit that must be in the list somehow
1437 and selected by default
1443 and selected by default
1438 :param branch: a branch that must be in the list and selected
1444 :param branch: a branch that must be in the list and selected
1439 by default - even if closed
1445 by default - even if closed
1440 :param bookmark: a bookmark that must be in the list and selected
1446 :param bookmark: a bookmark that must be in the list and selected
1441 """
1447 """
1442 _ = translator or get_current_request().translate
1448 _ = translator or get_current_request().translate
1443
1449
1444 commit_id = safe_str(commit_id) if commit_id else None
1450 commit_id = safe_str(commit_id) if commit_id else None
1445 branch = safe_unicode(branch) if branch else None
1451 branch = safe_unicode(branch) if branch else None
1446 bookmark = safe_unicode(bookmark) if bookmark else None
1452 bookmark = safe_unicode(bookmark) if bookmark else None
1447
1453
1448 selected = None
1454 selected = None
1449
1455
1450 # order matters: first source that has commit_id in it will be selected
1456 # order matters: first source that has commit_id in it will be selected
1451 sources = []
1457 sources = []
1452 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1458 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1453 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1459 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1454
1460
1455 if commit_id:
1461 if commit_id:
1456 ref_commit = (h.short_id(commit_id), commit_id)
1462 ref_commit = (h.short_id(commit_id), commit_id)
1457 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1463 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1458
1464
1459 sources.append(
1465 sources.append(
1460 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1466 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1461 )
1467 )
1462
1468
1463 groups = []
1469 groups = []
1464
1470
1465 for group_key, ref_list, group_name, match in sources:
1471 for group_key, ref_list, group_name, match in sources:
1466 group_refs = []
1472 group_refs = []
1467 for ref_name, ref_id in ref_list:
1473 for ref_name, ref_id in ref_list:
1468 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1474 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1469 group_refs.append((ref_key, ref_name))
1475 group_refs.append((ref_key, ref_name))
1470
1476
1471 if not selected:
1477 if not selected:
1472 if set([commit_id, match]) & set([ref_id, ref_name]):
1478 if set([commit_id, match]) & set([ref_id, ref_name]):
1473 selected = ref_key
1479 selected = ref_key
1474
1480
1475 if group_refs:
1481 if group_refs:
1476 groups.append((group_refs, group_name))
1482 groups.append((group_refs, group_name))
1477
1483
1478 if not selected:
1484 if not selected:
1479 ref = commit_id or branch or bookmark
1485 ref = commit_id or branch or bookmark
1480 if ref:
1486 if ref:
1481 raise CommitDoesNotExistError(
1487 raise CommitDoesNotExistError(
1482 u'No commit refs could be found matching: {}'.format(ref))
1488 u'No commit refs could be found matching: {}'.format(ref))
1483 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1489 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1484 selected = u'branch:{}:{}'.format(
1490 selected = u'branch:{}:{}'.format(
1485 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1491 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1486 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1492 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1487 )
1493 )
1488 elif repo.commit_ids:
1494 elif repo.commit_ids:
1489 # make the user select in this case
1495 # make the user select in this case
1490 selected = None
1496 selected = None
1491 else:
1497 else:
1492 raise EmptyRepositoryError()
1498 raise EmptyRepositoryError()
1493 return groups, selected
1499 return groups, selected
1494
1500
1495 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1501 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1496 hide_whitespace_changes, diff_context):
1502 hide_whitespace_changes, diff_context):
1497
1503
1498 return self._get_diff_from_pr_or_version(
1504 return self._get_diff_from_pr_or_version(
1499 source_repo, source_ref_id, target_ref_id,
1505 source_repo, source_ref_id, target_ref_id,
1500 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1506 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1501
1507
1502 def _get_diff_from_pr_or_version(
1508 def _get_diff_from_pr_or_version(
1503 self, source_repo, source_ref_id, target_ref_id,
1509 self, source_repo, source_ref_id, target_ref_id,
1504 hide_whitespace_changes, diff_context):
1510 hide_whitespace_changes, diff_context):
1505
1511
1506 target_commit = source_repo.get_commit(
1512 target_commit = source_repo.get_commit(
1507 commit_id=safe_str(target_ref_id))
1513 commit_id=safe_str(target_ref_id))
1508 source_commit = source_repo.get_commit(
1514 source_commit = source_repo.get_commit(
1509 commit_id=safe_str(source_ref_id))
1515 commit_id=safe_str(source_ref_id))
1510 if isinstance(source_repo, Repository):
1516 if isinstance(source_repo, Repository):
1511 vcs_repo = source_repo.scm_instance()
1517 vcs_repo = source_repo.scm_instance()
1512 else:
1518 else:
1513 vcs_repo = source_repo
1519 vcs_repo = source_repo
1514
1520
1515 # TODO: johbo: In the context of an update, we cannot reach
1521 # TODO: johbo: In the context of an update, we cannot reach
1516 # the old commit anymore with our normal mechanisms. It needs
1522 # the old commit anymore with our normal mechanisms. It needs
1517 # some sort of special support in the vcs layer to avoid this
1523 # some sort of special support in the vcs layer to avoid this
1518 # workaround.
1524 # workaround.
1519 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1525 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1520 vcs_repo.alias == 'git'):
1526 vcs_repo.alias == 'git'):
1521 source_commit.raw_id = safe_str(source_ref_id)
1527 source_commit.raw_id = safe_str(source_ref_id)
1522
1528
1523 log.debug('calculating diff between '
1529 log.debug('calculating diff between '
1524 'source_ref:%s and target_ref:%s for repo `%s`',
1530 'source_ref:%s and target_ref:%s for repo `%s`',
1525 target_ref_id, source_ref_id,
1531 target_ref_id, source_ref_id,
1526 safe_unicode(vcs_repo.path))
1532 safe_unicode(vcs_repo.path))
1527
1533
1528 vcs_diff = vcs_repo.get_diff(
1534 vcs_diff = vcs_repo.get_diff(
1529 commit1=target_commit, commit2=source_commit,
1535 commit1=target_commit, commit2=source_commit,
1530 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1536 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1531 return vcs_diff
1537 return vcs_diff
1532
1538
1533 def _is_merge_enabled(self, pull_request):
1539 def _is_merge_enabled(self, pull_request):
1534 return self._get_general_setting(
1540 return self._get_general_setting(
1535 pull_request, 'rhodecode_pr_merge_enabled')
1541 pull_request, 'rhodecode_pr_merge_enabled')
1536
1542
1537 def _use_rebase_for_merging(self, pull_request):
1543 def _use_rebase_for_merging(self, pull_request):
1538 repo_type = pull_request.target_repo.repo_type
1544 repo_type = pull_request.target_repo.repo_type
1539 if repo_type == 'hg':
1545 if repo_type == 'hg':
1540 return self._get_general_setting(
1546 return self._get_general_setting(
1541 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1547 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1542 elif repo_type == 'git':
1548 elif repo_type == 'git':
1543 return self._get_general_setting(
1549 return self._get_general_setting(
1544 pull_request, 'rhodecode_git_use_rebase_for_merging')
1550 pull_request, 'rhodecode_git_use_rebase_for_merging')
1545
1551
1546 return False
1552 return False
1547
1553
1548 def _close_branch_before_merging(self, pull_request):
1554 def _close_branch_before_merging(self, pull_request):
1549 repo_type = pull_request.target_repo.repo_type
1555 repo_type = pull_request.target_repo.repo_type
1550 if repo_type == 'hg':
1556 if repo_type == 'hg':
1551 return self._get_general_setting(
1557 return self._get_general_setting(
1552 pull_request, 'rhodecode_hg_close_branch_before_merging')
1558 pull_request, 'rhodecode_hg_close_branch_before_merging')
1553 elif repo_type == 'git':
1559 elif repo_type == 'git':
1554 return self._get_general_setting(
1560 return self._get_general_setting(
1555 pull_request, 'rhodecode_git_close_branch_before_merging')
1561 pull_request, 'rhodecode_git_close_branch_before_merging')
1556
1562
1557 return False
1563 return False
1558
1564
1559 def _get_general_setting(self, pull_request, settings_key, default=False):
1565 def _get_general_setting(self, pull_request, settings_key, default=False):
1560 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1566 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1561 settings = settings_model.get_general_settings()
1567 settings = settings_model.get_general_settings()
1562 return settings.get(settings_key, default)
1568 return settings.get(settings_key, default)
1563
1569
1564 def _log_audit_action(self, action, action_data, user, pull_request):
1570 def _log_audit_action(self, action, action_data, user, pull_request):
1565 audit_logger.store(
1571 audit_logger.store(
1566 action=action,
1572 action=action,
1567 action_data=action_data,
1573 action_data=action_data,
1568 user=user,
1574 user=user,
1569 repo=pull_request.target_repo)
1575 repo=pull_request.target_repo)
1570
1576
1571 def get_reviewer_functions(self):
1577 def get_reviewer_functions(self):
1572 """
1578 """
1573 Fetches functions for validation and fetching default reviewers.
1579 Fetches functions for validation and fetching default reviewers.
1574 If available we use the EE package, else we fallback to CE
1580 If available we use the EE package, else we fallback to CE
1575 package functions
1581 package functions
1576 """
1582 """
1577 try:
1583 try:
1578 from rc_reviewers.utils import get_default_reviewers_data
1584 from rc_reviewers.utils import get_default_reviewers_data
1579 from rc_reviewers.utils import validate_default_reviewers
1585 from rc_reviewers.utils import validate_default_reviewers
1580 except ImportError:
1586 except ImportError:
1581 from rhodecode.apps.repository.utils import get_default_reviewers_data
1587 from rhodecode.apps.repository.utils import get_default_reviewers_data
1582 from rhodecode.apps.repository.utils import validate_default_reviewers
1588 from rhodecode.apps.repository.utils import validate_default_reviewers
1583
1589
1584 return get_default_reviewers_data, validate_default_reviewers
1590 return get_default_reviewers_data, validate_default_reviewers
1585
1591
1586
1592
1587 class MergeCheck(object):
1593 class MergeCheck(object):
1588 """
1594 """
1589 Perform Merge Checks and returns a check object which stores information
1595 Perform Merge Checks and returns a check object which stores information
1590 about merge errors, and merge conditions
1596 about merge errors, and merge conditions
1591 """
1597 """
1592 TODO_CHECK = 'todo'
1598 TODO_CHECK = 'todo'
1593 PERM_CHECK = 'perm'
1599 PERM_CHECK = 'perm'
1594 REVIEW_CHECK = 'review'
1600 REVIEW_CHECK = 'review'
1595 MERGE_CHECK = 'merge'
1601 MERGE_CHECK = 'merge'
1596
1602
1597 def __init__(self):
1603 def __init__(self):
1598 self.review_status = None
1604 self.review_status = None
1599 self.merge_possible = None
1605 self.merge_possible = None
1600 self.merge_msg = ''
1606 self.merge_msg = ''
1601 self.failed = None
1607 self.failed = None
1602 self.errors = []
1608 self.errors = []
1603 self.error_details = OrderedDict()
1609 self.error_details = OrderedDict()
1604
1610
1605 def push_error(self, error_type, message, error_key, details):
1611 def push_error(self, error_type, message, error_key, details):
1606 self.failed = True
1612 self.failed = True
1607 self.errors.append([error_type, message])
1613 self.errors.append([error_type, message])
1608 self.error_details[error_key] = dict(
1614 self.error_details[error_key] = dict(
1609 details=details,
1615 details=details,
1610 error_type=error_type,
1616 error_type=error_type,
1611 message=message
1617 message=message
1612 )
1618 )
1613
1619
1614 @classmethod
1620 @classmethod
1615 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1621 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1616 force_shadow_repo_refresh=False):
1622 force_shadow_repo_refresh=False):
1617 _ = translator
1623 _ = translator
1618 merge_check = cls()
1624 merge_check = cls()
1619
1625
1620 # permissions to merge
1626 # permissions to merge
1621 user_allowed_to_merge = PullRequestModel().check_user_merge(
1627 user_allowed_to_merge = PullRequestModel().check_user_merge(
1622 pull_request, auth_user)
1628 pull_request, auth_user)
1623 if not user_allowed_to_merge:
1629 if not user_allowed_to_merge:
1624 log.debug("MergeCheck: cannot merge, approval is pending.")
1630 log.debug("MergeCheck: cannot merge, approval is pending.")
1625
1631
1626 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1632 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1627 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1633 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1628 if fail_early:
1634 if fail_early:
1629 return merge_check
1635 return merge_check
1630
1636
1631 # permission to merge into the target branch
1637 # permission to merge into the target branch
1632 target_commit_id = pull_request.target_ref_parts.commit_id
1638 target_commit_id = pull_request.target_ref_parts.commit_id
1633 if pull_request.target_ref_parts.type == 'branch':
1639 if pull_request.target_ref_parts.type == 'branch':
1634 branch_name = pull_request.target_ref_parts.name
1640 branch_name = pull_request.target_ref_parts.name
1635 else:
1641 else:
1636 # for mercurial we can always figure out the branch from the commit
1642 # for mercurial we can always figure out the branch from the commit
1637 # in case of bookmark
1643 # in case of bookmark
1638 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1644 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1639 branch_name = target_commit.branch
1645 branch_name = target_commit.branch
1640
1646
1641 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1647 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1642 pull_request.target_repo.repo_name, branch_name)
1648 pull_request.target_repo.repo_name, branch_name)
1643 if branch_perm and branch_perm == 'branch.none':
1649 if branch_perm and branch_perm == 'branch.none':
1644 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1650 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1645 branch_name, rule)
1651 branch_name, rule)
1646 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1652 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1647 if fail_early:
1653 if fail_early:
1648 return merge_check
1654 return merge_check
1649
1655
1650 # review status, must be always present
1656 # review status, must be always present
1651 review_status = pull_request.calculated_review_status()
1657 review_status = pull_request.calculated_review_status()
1652 merge_check.review_status = review_status
1658 merge_check.review_status = review_status
1653
1659
1654 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1660 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1655 if not status_approved:
1661 if not status_approved:
1656 log.debug("MergeCheck: cannot merge, approval is pending.")
1662 log.debug("MergeCheck: cannot merge, approval is pending.")
1657
1663
1658 msg = _('Pull request reviewer approval is pending.')
1664 msg = _('Pull request reviewer approval is pending.')
1659
1665
1660 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
1666 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
1661
1667
1662 if fail_early:
1668 if fail_early:
1663 return merge_check
1669 return merge_check
1664
1670
1665 # left over TODOs
1671 # left over TODOs
1666 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
1672 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
1667 if todos:
1673 if todos:
1668 log.debug("MergeCheck: cannot merge, {} "
1674 log.debug("MergeCheck: cannot merge, {} "
1669 "unresolved TODOs left.".format(len(todos)))
1675 "unresolved TODOs left.".format(len(todos)))
1670
1676
1671 if len(todos) == 1:
1677 if len(todos) == 1:
1672 msg = _('Cannot merge, {} TODO still not resolved.').format(
1678 msg = _('Cannot merge, {} TODO still not resolved.').format(
1673 len(todos))
1679 len(todos))
1674 else:
1680 else:
1675 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1681 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1676 len(todos))
1682 len(todos))
1677
1683
1678 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1684 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1679
1685
1680 if fail_early:
1686 if fail_early:
1681 return merge_check
1687 return merge_check
1682
1688
1683 # merge possible, here is the filesystem simulation + shadow repo
1689 # merge possible, here is the filesystem simulation + shadow repo
1684 merge_status, msg = PullRequestModel().merge_status(
1690 merge_status, msg = PullRequestModel().merge_status(
1685 pull_request, translator=translator,
1691 pull_request, translator=translator,
1686 force_shadow_repo_refresh=force_shadow_repo_refresh)
1692 force_shadow_repo_refresh=force_shadow_repo_refresh)
1687 merge_check.merge_possible = merge_status
1693 merge_check.merge_possible = merge_status
1688 merge_check.merge_msg = msg
1694 merge_check.merge_msg = msg
1689 if not merge_status:
1695 if not merge_status:
1690 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
1696 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
1691 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1697 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1692
1698
1693 if fail_early:
1699 if fail_early:
1694 return merge_check
1700 return merge_check
1695
1701
1696 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1702 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1697 return merge_check
1703 return merge_check
1698
1704
1699 @classmethod
1705 @classmethod
1700 def get_merge_conditions(cls, pull_request, translator):
1706 def get_merge_conditions(cls, pull_request, translator):
1701 _ = translator
1707 _ = translator
1702 merge_details = {}
1708 merge_details = {}
1703
1709
1704 model = PullRequestModel()
1710 model = PullRequestModel()
1705 use_rebase = model._use_rebase_for_merging(pull_request)
1711 use_rebase = model._use_rebase_for_merging(pull_request)
1706
1712
1707 if use_rebase:
1713 if use_rebase:
1708 merge_details['merge_strategy'] = dict(
1714 merge_details['merge_strategy'] = dict(
1709 details={},
1715 details={},
1710 message=_('Merge strategy: rebase')
1716 message=_('Merge strategy: rebase')
1711 )
1717 )
1712 else:
1718 else:
1713 merge_details['merge_strategy'] = dict(
1719 merge_details['merge_strategy'] = dict(
1714 details={},
1720 details={},
1715 message=_('Merge strategy: explicit merge commit')
1721 message=_('Merge strategy: explicit merge commit')
1716 )
1722 )
1717
1723
1718 close_branch = model._close_branch_before_merging(pull_request)
1724 close_branch = model._close_branch_before_merging(pull_request)
1719 if close_branch:
1725 if close_branch:
1720 repo_type = pull_request.target_repo.repo_type
1726 repo_type = pull_request.target_repo.repo_type
1721 close_msg = ''
1727 close_msg = ''
1722 if repo_type == 'hg':
1728 if repo_type == 'hg':
1723 close_msg = _('Source branch will be closed after merge.')
1729 close_msg = _('Source branch will be closed after merge.')
1724 elif repo_type == 'git':
1730 elif repo_type == 'git':
1725 close_msg = _('Source branch will be deleted after merge.')
1731 close_msg = _('Source branch will be deleted after merge.')
1726
1732
1727 merge_details['close_branch'] = dict(
1733 merge_details['close_branch'] = dict(
1728 details={},
1734 details={},
1729 message=close_msg
1735 message=close_msg
1730 )
1736 )
1731
1737
1732 return merge_details
1738 return merge_details
1733
1739
1734
1740
1735 ChangeTuple = collections.namedtuple(
1741 ChangeTuple = collections.namedtuple(
1736 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1742 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1737
1743
1738 FileChangeTuple = collections.namedtuple(
1744 FileChangeTuple = collections.namedtuple(
1739 'FileChangeTuple', ['added', 'modified', 'removed'])
1745 'FileChangeTuple', ['added', 'modified', 'removed'])
@@ -1,1886 +1,1888 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import collections
21 import collections
22 import datetime
22 import datetime
23 import hashlib
23 import hashlib
24 import os
24 import os
25 import re
25 import re
26 import pprint
26 import pprint
27 import shutil
27 import shutil
28 import socket
28 import socket
29 import subprocess32
29 import subprocess32
30 import time
30 import time
31 import uuid
31 import uuid
32 import dateutil.tz
32 import dateutil.tz
33 import functools
33 import functools
34
34
35 import mock
35 import mock
36 import pyramid.testing
36 import pyramid.testing
37 import pytest
37 import pytest
38 import colander
38 import colander
39 import requests
39 import requests
40 import pyramid.paster
40 import pyramid.paster
41
41
42 import rhodecode
42 import rhodecode
43 from rhodecode.lib.utils2 import AttributeDict
43 from rhodecode.lib.utils2 import AttributeDict
44 from rhodecode.model.changeset_status import ChangesetStatusModel
44 from rhodecode.model.changeset_status import ChangesetStatusModel
45 from rhodecode.model.comment import CommentsModel
45 from rhodecode.model.comment import CommentsModel
46 from rhodecode.model.db import (
46 from rhodecode.model.db import (
47 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
47 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
48 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
48 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
49 from rhodecode.model.meta import Session
49 from rhodecode.model.meta import Session
50 from rhodecode.model.pull_request import PullRequestModel
50 from rhodecode.model.pull_request import PullRequestModel
51 from rhodecode.model.repo import RepoModel
51 from rhodecode.model.repo import RepoModel
52 from rhodecode.model.repo_group import RepoGroupModel
52 from rhodecode.model.repo_group import RepoGroupModel
53 from rhodecode.model.user import UserModel
53 from rhodecode.model.user import UserModel
54 from rhodecode.model.settings import VcsSettingsModel
54 from rhodecode.model.settings import VcsSettingsModel
55 from rhodecode.model.user_group import UserGroupModel
55 from rhodecode.model.user_group import UserGroupModel
56 from rhodecode.model.integration import IntegrationModel
56 from rhodecode.model.integration import IntegrationModel
57 from rhodecode.integrations import integration_type_registry
57 from rhodecode.integrations import integration_type_registry
58 from rhodecode.integrations.types.base import IntegrationTypeBase
58 from rhodecode.integrations.types.base import IntegrationTypeBase
59 from rhodecode.lib.utils import repo2db_mapper
59 from rhodecode.lib.utils import repo2db_mapper
60 from rhodecode.lib.vcs import create_vcsserver_proxy
60 from rhodecode.lib.vcs import create_vcsserver_proxy
61 from rhodecode.lib.vcs.backends import get_backend
61 from rhodecode.lib.vcs.backends import get_backend
62 from rhodecode.lib.vcs.nodes import FileNode
62 from rhodecode.lib.vcs.nodes import FileNode
63 from rhodecode.tests import (
63 from rhodecode.tests import (
64 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
64 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
65 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
65 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
66 TEST_USER_REGULAR_PASS)
66 TEST_USER_REGULAR_PASS)
67 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
67 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
68 from rhodecode.tests.fixture import Fixture
68 from rhodecode.tests.fixture import Fixture
69 from rhodecode.config import utils as config_utils
69 from rhodecode.config import utils as config_utils
70
70
71 def _split_comma(value):
71 def _split_comma(value):
72 return value.split(',')
72 return value.split(',')
73
73
74
74
75 def pytest_addoption(parser):
75 def pytest_addoption(parser):
76 parser.addoption(
76 parser.addoption(
77 '--keep-tmp-path', action='store_true',
77 '--keep-tmp-path', action='store_true',
78 help="Keep the test temporary directories")
78 help="Keep the test temporary directories")
79 parser.addoption(
79 parser.addoption(
80 '--backends', action='store', type=_split_comma,
80 '--backends', action='store', type=_split_comma,
81 default=['git', 'hg', 'svn'],
81 default=['git', 'hg', 'svn'],
82 help="Select which backends to test for backend specific tests.")
82 help="Select which backends to test for backend specific tests.")
83 parser.addoption(
83 parser.addoption(
84 '--dbs', action='store', type=_split_comma,
84 '--dbs', action='store', type=_split_comma,
85 default=['sqlite'],
85 default=['sqlite'],
86 help="Select which database to test for database specific tests. "
86 help="Select which database to test for database specific tests. "
87 "Possible options are sqlite,postgres,mysql")
87 "Possible options are sqlite,postgres,mysql")
88 parser.addoption(
88 parser.addoption(
89 '--appenlight', '--ae', action='store_true',
89 '--appenlight', '--ae', action='store_true',
90 help="Track statistics in appenlight.")
90 help="Track statistics in appenlight.")
91 parser.addoption(
91 parser.addoption(
92 '--appenlight-api-key', '--ae-key',
92 '--appenlight-api-key', '--ae-key',
93 help="API key for Appenlight.")
93 help="API key for Appenlight.")
94 parser.addoption(
94 parser.addoption(
95 '--appenlight-url', '--ae-url',
95 '--appenlight-url', '--ae-url',
96 default="https://ae.rhodecode.com",
96 default="https://ae.rhodecode.com",
97 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
97 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
98 parser.addoption(
98 parser.addoption(
99 '--sqlite-connection-string', action='store',
99 '--sqlite-connection-string', action='store',
100 default='', help="Connection string for the dbs tests with SQLite")
100 default='', help="Connection string for the dbs tests with SQLite")
101 parser.addoption(
101 parser.addoption(
102 '--postgres-connection-string', action='store',
102 '--postgres-connection-string', action='store',
103 default='', help="Connection string for the dbs tests with Postgres")
103 default='', help="Connection string for the dbs tests with Postgres")
104 parser.addoption(
104 parser.addoption(
105 '--mysql-connection-string', action='store',
105 '--mysql-connection-string', action='store',
106 default='', help="Connection string for the dbs tests with MySQL")
106 default='', help="Connection string for the dbs tests with MySQL")
107 parser.addoption(
107 parser.addoption(
108 '--repeat', type=int, default=100,
108 '--repeat', type=int, default=100,
109 help="Number of repetitions in performance tests.")
109 help="Number of repetitions in performance tests.")
110
110
111
111
112 def pytest_configure(config):
112 def pytest_configure(config):
113 from rhodecode.config import patches
113 from rhodecode.config import patches
114
114
115
115
116 def pytest_collection_modifyitems(session, config, items):
116 def pytest_collection_modifyitems(session, config, items):
117 # nottest marked, compare nose, used for transition from nose to pytest
117 # nottest marked, compare nose, used for transition from nose to pytest
118 remaining = [
118 remaining = [
119 i for i in items if getattr(i.obj, '__test__', True)]
119 i for i in items if getattr(i.obj, '__test__', True)]
120 items[:] = remaining
120 items[:] = remaining
121
121
122
122
123 def pytest_generate_tests(metafunc):
123 def pytest_generate_tests(metafunc):
124 # Support test generation based on --backend parameter
124 # Support test generation based on --backend parameter
125 if 'backend_alias' in metafunc.fixturenames:
125 if 'backend_alias' in metafunc.fixturenames:
126 backends = get_backends_from_metafunc(metafunc)
126 backends = get_backends_from_metafunc(metafunc)
127 scope = None
127 scope = None
128 if not backends:
128 if not backends:
129 pytest.skip("Not enabled for any of selected backends")
129 pytest.skip("Not enabled for any of selected backends")
130 metafunc.parametrize('backend_alias', backends, scope=scope)
130 metafunc.parametrize('backend_alias', backends, scope=scope)
131 elif hasattr(metafunc.function, 'backends'):
131 elif hasattr(metafunc.function, 'backends'):
132 backends = get_backends_from_metafunc(metafunc)
132 backends = get_backends_from_metafunc(metafunc)
133 if not backends:
133 if not backends:
134 pytest.skip("Not enabled for any of selected backends")
134 pytest.skip("Not enabled for any of selected backends")
135
135
136
136
137 def get_backends_from_metafunc(metafunc):
137 def get_backends_from_metafunc(metafunc):
138 requested_backends = set(metafunc.config.getoption('--backends'))
138 requested_backends = set(metafunc.config.getoption('--backends'))
139 if hasattr(metafunc.function, 'backends'):
139 if hasattr(metafunc.function, 'backends'):
140 # Supported backends by this test function, created from
140 # Supported backends by this test function, created from
141 # pytest.mark.backends
141 # pytest.mark.backends
142 backends = metafunc.definition.get_closest_marker('backends').args
142 backends = metafunc.definition.get_closest_marker('backends').args
143 elif hasattr(metafunc.cls, 'backend_alias'):
143 elif hasattr(metafunc.cls, 'backend_alias'):
144 # Support class attribute "backend_alias", this is mainly
144 # Support class attribute "backend_alias", this is mainly
145 # for legacy reasons for tests not yet using pytest.mark.backends
145 # for legacy reasons for tests not yet using pytest.mark.backends
146 backends = [metafunc.cls.backend_alias]
146 backends = [metafunc.cls.backend_alias]
147 else:
147 else:
148 backends = metafunc.config.getoption('--backends')
148 backends = metafunc.config.getoption('--backends')
149 return requested_backends.intersection(backends)
149 return requested_backends.intersection(backends)
150
150
151
151
152 @pytest.fixture(scope='session', autouse=True)
152 @pytest.fixture(scope='session', autouse=True)
153 def activate_example_rcextensions(request):
153 def activate_example_rcextensions(request):
154 """
154 """
155 Patch in an example rcextensions module which verifies passed in kwargs.
155 Patch in an example rcextensions module which verifies passed in kwargs.
156 """
156 """
157 from rhodecode.config import rcextensions
157 from rhodecode.config import rcextensions
158
158
159 old_extensions = rhodecode.EXTENSIONS
159 old_extensions = rhodecode.EXTENSIONS
160 rhodecode.EXTENSIONS = rcextensions
160 rhodecode.EXTENSIONS = rcextensions
161 rhodecode.EXTENSIONS.calls = collections.defaultdict(list)
161 rhodecode.EXTENSIONS.calls = collections.defaultdict(list)
162
162
163 @request.addfinalizer
163 @request.addfinalizer
164 def cleanup():
164 def cleanup():
165 rhodecode.EXTENSIONS = old_extensions
165 rhodecode.EXTENSIONS = old_extensions
166
166
167
167
168 @pytest.fixture
168 @pytest.fixture
169 def capture_rcextensions():
169 def capture_rcextensions():
170 """
170 """
171 Returns the recorded calls to entry points in rcextensions.
171 Returns the recorded calls to entry points in rcextensions.
172 """
172 """
173 calls = rhodecode.EXTENSIONS.calls
173 calls = rhodecode.EXTENSIONS.calls
174 calls.clear()
174 calls.clear()
175 # Note: At this moment, it is still the empty dict, but that will
175 # Note: At this moment, it is still the empty dict, but that will
176 # be filled during the test run and since it is a reference this
176 # be filled during the test run and since it is a reference this
177 # is enough to make it work.
177 # is enough to make it work.
178 return calls
178 return calls
179
179
180
180
181 @pytest.fixture(scope='session')
181 @pytest.fixture(scope='session')
182 def http_environ_session():
182 def http_environ_session():
183 """
183 """
184 Allow to use "http_environ" in session scope.
184 Allow to use "http_environ" in session scope.
185 """
185 """
186 return plain_http_environ()
186 return plain_http_environ()
187
187
188
188
189 def plain_http_host_stub():
189 def plain_http_host_stub():
190 """
190 """
191 Value of HTTP_HOST in the test run.
191 Value of HTTP_HOST in the test run.
192 """
192 """
193 return 'example.com:80'
193 return 'example.com:80'
194
194
195
195
196 @pytest.fixture
196 @pytest.fixture
197 def http_host_stub():
197 def http_host_stub():
198 """
198 """
199 Value of HTTP_HOST in the test run.
199 Value of HTTP_HOST in the test run.
200 """
200 """
201 return plain_http_host_stub()
201 return plain_http_host_stub()
202
202
203
203
204 def plain_http_host_only_stub():
204 def plain_http_host_only_stub():
205 """
205 """
206 Value of HTTP_HOST in the test run.
206 Value of HTTP_HOST in the test run.
207 """
207 """
208 return plain_http_host_stub().split(':')[0]
208 return plain_http_host_stub().split(':')[0]
209
209
210
210
211 @pytest.fixture
211 @pytest.fixture
212 def http_host_only_stub():
212 def http_host_only_stub():
213 """
213 """
214 Value of HTTP_HOST in the test run.
214 Value of HTTP_HOST in the test run.
215 """
215 """
216 return plain_http_host_only_stub()
216 return plain_http_host_only_stub()
217
217
218
218
219 def plain_http_environ():
219 def plain_http_environ():
220 """
220 """
221 HTTP extra environ keys.
221 HTTP extra environ keys.
222
222
223 User by the test application and as well for setting up the pylons
223 User by the test application and as well for setting up the pylons
224 environment. In the case of the fixture "app" it should be possible
224 environment. In the case of the fixture "app" it should be possible
225 to override this for a specific test case.
225 to override this for a specific test case.
226 """
226 """
227 return {
227 return {
228 'SERVER_NAME': plain_http_host_only_stub(),
228 'SERVER_NAME': plain_http_host_only_stub(),
229 'SERVER_PORT': plain_http_host_stub().split(':')[1],
229 'SERVER_PORT': plain_http_host_stub().split(':')[1],
230 'HTTP_HOST': plain_http_host_stub(),
230 'HTTP_HOST': plain_http_host_stub(),
231 'HTTP_USER_AGENT': 'rc-test-agent',
231 'HTTP_USER_AGENT': 'rc-test-agent',
232 'REQUEST_METHOD': 'GET'
232 'REQUEST_METHOD': 'GET'
233 }
233 }
234
234
235
235
236 @pytest.fixture
236 @pytest.fixture
237 def http_environ():
237 def http_environ():
238 """
238 """
239 HTTP extra environ keys.
239 HTTP extra environ keys.
240
240
241 User by the test application and as well for setting up the pylons
241 User by the test application and as well for setting up the pylons
242 environment. In the case of the fixture "app" it should be possible
242 environment. In the case of the fixture "app" it should be possible
243 to override this for a specific test case.
243 to override this for a specific test case.
244 """
244 """
245 return plain_http_environ()
245 return plain_http_environ()
246
246
247
247
248 @pytest.fixture(scope='session')
248 @pytest.fixture(scope='session')
249 def baseapp(ini_config, vcsserver, http_environ_session):
249 def baseapp(ini_config, vcsserver, http_environ_session):
250 from rhodecode.lib.pyramid_utils import get_app_config
250 from rhodecode.lib.pyramid_utils import get_app_config
251 from rhodecode.config.middleware import make_pyramid_app
251 from rhodecode.config.middleware import make_pyramid_app
252
252
253 print("Using the RhodeCode configuration:{}".format(ini_config))
253 print("Using the RhodeCode configuration:{}".format(ini_config))
254 pyramid.paster.setup_logging(ini_config)
254 pyramid.paster.setup_logging(ini_config)
255
255
256 settings = get_app_config(ini_config)
256 settings = get_app_config(ini_config)
257 app = make_pyramid_app({'__file__': ini_config}, **settings)
257 app = make_pyramid_app({'__file__': ini_config}, **settings)
258
258
259 return app
259 return app
260
260
261
261
262 @pytest.fixture(scope='function')
262 @pytest.fixture(scope='function')
263 def app(request, config_stub, baseapp, http_environ):
263 def app(request, config_stub, baseapp, http_environ):
264 app = CustomTestApp(
264 app = CustomTestApp(
265 baseapp,
265 baseapp,
266 extra_environ=http_environ)
266 extra_environ=http_environ)
267 if request.cls:
267 if request.cls:
268 request.cls.app = app
268 request.cls.app = app
269 return app
269 return app
270
270
271
271
272 @pytest.fixture(scope='session')
272 @pytest.fixture(scope='session')
273 def app_settings(baseapp, ini_config):
273 def app_settings(baseapp, ini_config):
274 """
274 """
275 Settings dictionary used to create the app.
275 Settings dictionary used to create the app.
276
276
277 Parses the ini file and passes the result through the sanitize and apply
277 Parses the ini file and passes the result through the sanitize and apply
278 defaults mechanism in `rhodecode.config.middleware`.
278 defaults mechanism in `rhodecode.config.middleware`.
279 """
279 """
280 return baseapp.config.get_settings()
280 return baseapp.config.get_settings()
281
281
282
282
283 @pytest.fixture(scope='session')
283 @pytest.fixture(scope='session')
284 def db_connection(ini_settings):
284 def db_connection(ini_settings):
285 # Initialize the database connection.
285 # Initialize the database connection.
286 config_utils.initialize_database(ini_settings)
286 config_utils.initialize_database(ini_settings)
287
287
288
288
289 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
289 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
290
290
291
291
292 def _autologin_user(app, *args):
292 def _autologin_user(app, *args):
293 session = login_user_session(app, *args)
293 session = login_user_session(app, *args)
294 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
294 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
295 return LoginData(csrf_token, session['rhodecode_user'])
295 return LoginData(csrf_token, session['rhodecode_user'])
296
296
297
297
298 @pytest.fixture
298 @pytest.fixture
299 def autologin_user(app):
299 def autologin_user(app):
300 """
300 """
301 Utility fixture which makes sure that the admin user is logged in
301 Utility fixture which makes sure that the admin user is logged in
302 """
302 """
303 return _autologin_user(app)
303 return _autologin_user(app)
304
304
305
305
306 @pytest.fixture
306 @pytest.fixture
307 def autologin_regular_user(app):
307 def autologin_regular_user(app):
308 """
308 """
309 Utility fixture which makes sure that the regular user is logged in
309 Utility fixture which makes sure that the regular user is logged in
310 """
310 """
311 return _autologin_user(
311 return _autologin_user(
312 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
312 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
313
313
314
314
315 @pytest.fixture(scope='function')
315 @pytest.fixture(scope='function')
316 def csrf_token(request, autologin_user):
316 def csrf_token(request, autologin_user):
317 return autologin_user.csrf_token
317 return autologin_user.csrf_token
318
318
319
319
320 @pytest.fixture(scope='function')
320 @pytest.fixture(scope='function')
321 def xhr_header(request):
321 def xhr_header(request):
322 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
322 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
323
323
324
324
325 @pytest.fixture
325 @pytest.fixture
326 def real_crypto_backend(monkeypatch):
326 def real_crypto_backend(monkeypatch):
327 """
327 """
328 Switch the production crypto backend on for this test.
328 Switch the production crypto backend on for this test.
329
329
330 During the test run the crypto backend is replaced with a faster
330 During the test run the crypto backend is replaced with a faster
331 implementation based on the MD5 algorithm.
331 implementation based on the MD5 algorithm.
332 """
332 """
333 monkeypatch.setattr(rhodecode, 'is_test', False)
333 monkeypatch.setattr(rhodecode, 'is_test', False)
334
334
335
335
336 @pytest.fixture(scope='class')
336 @pytest.fixture(scope='class')
337 def index_location(request, baseapp):
337 def index_location(request, baseapp):
338 index_location = baseapp.config.get_settings()['search.location']
338 index_location = baseapp.config.get_settings()['search.location']
339 if request.cls:
339 if request.cls:
340 request.cls.index_location = index_location
340 request.cls.index_location = index_location
341 return index_location
341 return index_location
342
342
343
343
344 @pytest.fixture(scope='session', autouse=True)
344 @pytest.fixture(scope='session', autouse=True)
345 def tests_tmp_path(request):
345 def tests_tmp_path(request):
346 """
346 """
347 Create temporary directory to be used during the test session.
347 Create temporary directory to be used during the test session.
348 """
348 """
349 if not os.path.exists(TESTS_TMP_PATH):
349 if not os.path.exists(TESTS_TMP_PATH):
350 os.makedirs(TESTS_TMP_PATH)
350 os.makedirs(TESTS_TMP_PATH)
351
351
352 if not request.config.getoption('--keep-tmp-path'):
352 if not request.config.getoption('--keep-tmp-path'):
353 @request.addfinalizer
353 @request.addfinalizer
354 def remove_tmp_path():
354 def remove_tmp_path():
355 shutil.rmtree(TESTS_TMP_PATH)
355 shutil.rmtree(TESTS_TMP_PATH)
356
356
357 return TESTS_TMP_PATH
357 return TESTS_TMP_PATH
358
358
359
359
360 @pytest.fixture
360 @pytest.fixture
361 def test_repo_group(request):
361 def test_repo_group(request):
362 """
362 """
363 Create a temporary repository group, and destroy it after
363 Create a temporary repository group, and destroy it after
364 usage automatically
364 usage automatically
365 """
365 """
366 fixture = Fixture()
366 fixture = Fixture()
367 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
367 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
368 repo_group = fixture.create_repo_group(repogroupid)
368 repo_group = fixture.create_repo_group(repogroupid)
369
369
370 def _cleanup():
370 def _cleanup():
371 fixture.destroy_repo_group(repogroupid)
371 fixture.destroy_repo_group(repogroupid)
372
372
373 request.addfinalizer(_cleanup)
373 request.addfinalizer(_cleanup)
374 return repo_group
374 return repo_group
375
375
376
376
377 @pytest.fixture
377 @pytest.fixture
378 def test_user_group(request):
378 def test_user_group(request):
379 """
379 """
380 Create a temporary user group, and destroy it after
380 Create a temporary user group, and destroy it after
381 usage automatically
381 usage automatically
382 """
382 """
383 fixture = Fixture()
383 fixture = Fixture()
384 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
384 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
385 user_group = fixture.create_user_group(usergroupid)
385 user_group = fixture.create_user_group(usergroupid)
386
386
387 def _cleanup():
387 def _cleanup():
388 fixture.destroy_user_group(user_group)
388 fixture.destroy_user_group(user_group)
389
389
390 request.addfinalizer(_cleanup)
390 request.addfinalizer(_cleanup)
391 return user_group
391 return user_group
392
392
393
393
394 @pytest.fixture(scope='session')
394 @pytest.fixture(scope='session')
395 def test_repo(request):
395 def test_repo(request):
396 container = TestRepoContainer()
396 container = TestRepoContainer()
397 request.addfinalizer(container._cleanup)
397 request.addfinalizer(container._cleanup)
398 return container
398 return container
399
399
400
400
401 class TestRepoContainer(object):
401 class TestRepoContainer(object):
402 """
402 """
403 Container for test repositories which are used read only.
403 Container for test repositories which are used read only.
404
404
405 Repositories will be created on demand and re-used during the lifetime
405 Repositories will be created on demand and re-used during the lifetime
406 of this object.
406 of this object.
407
407
408 Usage to get the svn test repository "minimal"::
408 Usage to get the svn test repository "minimal"::
409
409
410 test_repo = TestContainer()
410 test_repo = TestContainer()
411 repo = test_repo('minimal', 'svn')
411 repo = test_repo('minimal', 'svn')
412
412
413 """
413 """
414
414
415 dump_extractors = {
415 dump_extractors = {
416 'git': utils.extract_git_repo_from_dump,
416 'git': utils.extract_git_repo_from_dump,
417 'hg': utils.extract_hg_repo_from_dump,
417 'hg': utils.extract_hg_repo_from_dump,
418 'svn': utils.extract_svn_repo_from_dump,
418 'svn': utils.extract_svn_repo_from_dump,
419 }
419 }
420
420
421 def __init__(self):
421 def __init__(self):
422 self._cleanup_repos = []
422 self._cleanup_repos = []
423 self._fixture = Fixture()
423 self._fixture = Fixture()
424 self._repos = {}
424 self._repos = {}
425
425
426 def __call__(self, dump_name, backend_alias, config=None):
426 def __call__(self, dump_name, backend_alias, config=None):
427 key = (dump_name, backend_alias)
427 key = (dump_name, backend_alias)
428 if key not in self._repos:
428 if key not in self._repos:
429 repo = self._create_repo(dump_name, backend_alias, config)
429 repo = self._create_repo(dump_name, backend_alias, config)
430 self._repos[key] = repo.repo_id
430 self._repos[key] = repo.repo_id
431 return Repository.get(self._repos[key])
431 return Repository.get(self._repos[key])
432
432
433 def _create_repo(self, dump_name, backend_alias, config):
433 def _create_repo(self, dump_name, backend_alias, config):
434 repo_name = '%s-%s' % (backend_alias, dump_name)
434 repo_name = '%s-%s' % (backend_alias, dump_name)
435 backend_class = get_backend(backend_alias)
435 backend_class = get_backend(backend_alias)
436 dump_extractor = self.dump_extractors[backend_alias]
436 dump_extractor = self.dump_extractors[backend_alias]
437 repo_path = dump_extractor(dump_name, repo_name)
437 repo_path = dump_extractor(dump_name, repo_name)
438
438
439 vcs_repo = backend_class(repo_path, config=config)
439 vcs_repo = backend_class(repo_path, config=config)
440 repo2db_mapper({repo_name: vcs_repo})
440 repo2db_mapper({repo_name: vcs_repo})
441
441
442 repo = RepoModel().get_by_repo_name(repo_name)
442 repo = RepoModel().get_by_repo_name(repo_name)
443 self._cleanup_repos.append(repo_name)
443 self._cleanup_repos.append(repo_name)
444 return repo
444 return repo
445
445
446 def _cleanup(self):
446 def _cleanup(self):
447 for repo_name in reversed(self._cleanup_repos):
447 for repo_name in reversed(self._cleanup_repos):
448 self._fixture.destroy_repo(repo_name)
448 self._fixture.destroy_repo(repo_name)
449
449
450
450
451 def backend_base(request, backend_alias, baseapp, test_repo):
451 def backend_base(request, backend_alias, baseapp, test_repo):
452 if backend_alias not in request.config.getoption('--backends'):
452 if backend_alias not in request.config.getoption('--backends'):
453 pytest.skip("Backend %s not selected." % (backend_alias, ))
453 pytest.skip("Backend %s not selected." % (backend_alias, ))
454
454
455 utils.check_xfail_backends(request.node, backend_alias)
455 utils.check_xfail_backends(request.node, backend_alias)
456 utils.check_skip_backends(request.node, backend_alias)
456 utils.check_skip_backends(request.node, backend_alias)
457
457
458 repo_name = 'vcs_test_%s' % (backend_alias, )
458 repo_name = 'vcs_test_%s' % (backend_alias, )
459 backend = Backend(
459 backend = Backend(
460 alias=backend_alias,
460 alias=backend_alias,
461 repo_name=repo_name,
461 repo_name=repo_name,
462 test_name=request.node.name,
462 test_name=request.node.name,
463 test_repo_container=test_repo)
463 test_repo_container=test_repo)
464 request.addfinalizer(backend.cleanup)
464 request.addfinalizer(backend.cleanup)
465 return backend
465 return backend
466
466
467
467
468 @pytest.fixture
468 @pytest.fixture
469 def backend(request, backend_alias, baseapp, test_repo):
469 def backend(request, backend_alias, baseapp, test_repo):
470 """
470 """
471 Parametrized fixture which represents a single backend implementation.
471 Parametrized fixture which represents a single backend implementation.
472
472
473 It respects the option `--backends` to focus the test run on specific
473 It respects the option `--backends` to focus the test run on specific
474 backend implementations.
474 backend implementations.
475
475
476 It also supports `pytest.mark.xfail_backends` to mark tests as failing
476 It also supports `pytest.mark.xfail_backends` to mark tests as failing
477 for specific backends. This is intended as a utility for incremental
477 for specific backends. This is intended as a utility for incremental
478 development of a new backend implementation.
478 development of a new backend implementation.
479 """
479 """
480 return backend_base(request, backend_alias, baseapp, test_repo)
480 return backend_base(request, backend_alias, baseapp, test_repo)
481
481
482
482
483 @pytest.fixture
483 @pytest.fixture
484 def backend_git(request, baseapp, test_repo):
484 def backend_git(request, baseapp, test_repo):
485 return backend_base(request, 'git', baseapp, test_repo)
485 return backend_base(request, 'git', baseapp, test_repo)
486
486
487
487
488 @pytest.fixture
488 @pytest.fixture
489 def backend_hg(request, baseapp, test_repo):
489 def backend_hg(request, baseapp, test_repo):
490 return backend_base(request, 'hg', baseapp, test_repo)
490 return backend_base(request, 'hg', baseapp, test_repo)
491
491
492
492
493 @pytest.fixture
493 @pytest.fixture
494 def backend_svn(request, baseapp, test_repo):
494 def backend_svn(request, baseapp, test_repo):
495 return backend_base(request, 'svn', baseapp, test_repo)
495 return backend_base(request, 'svn', baseapp, test_repo)
496
496
497
497
498 @pytest.fixture
498 @pytest.fixture
499 def backend_random(backend_git):
499 def backend_random(backend_git):
500 """
500 """
501 Use this to express that your tests need "a backend.
501 Use this to express that your tests need "a backend.
502
502
503 A few of our tests need a backend, so that we can run the code. This
503 A few of our tests need a backend, so that we can run the code. This
504 fixture is intended to be used for such cases. It will pick one of the
504 fixture is intended to be used for such cases. It will pick one of the
505 backends and run the tests.
505 backends and run the tests.
506
506
507 The fixture `backend` would run the test multiple times for each
507 The fixture `backend` would run the test multiple times for each
508 available backend which is a pure waste of time if the test is
508 available backend which is a pure waste of time if the test is
509 independent of the backend type.
509 independent of the backend type.
510 """
510 """
511 # TODO: johbo: Change this to pick a random backend
511 # TODO: johbo: Change this to pick a random backend
512 return backend_git
512 return backend_git
513
513
514
514
515 @pytest.fixture
515 @pytest.fixture
516 def backend_stub(backend_git):
516 def backend_stub(backend_git):
517 """
517 """
518 Use this to express that your tests need a backend stub
518 Use this to express that your tests need a backend stub
519
519
520 TODO: mikhail: Implement a real stub logic instead of returning
520 TODO: mikhail: Implement a real stub logic instead of returning
521 a git backend
521 a git backend
522 """
522 """
523 return backend_git
523 return backend_git
524
524
525
525
526 @pytest.fixture
526 @pytest.fixture
527 def repo_stub(backend_stub):
527 def repo_stub(backend_stub):
528 """
528 """
529 Use this to express that your tests need a repository stub
529 Use this to express that your tests need a repository stub
530 """
530 """
531 return backend_stub.create_repo()
531 return backend_stub.create_repo()
532
532
533
533
534 class Backend(object):
534 class Backend(object):
535 """
535 """
536 Represents the test configuration for one supported backend
536 Represents the test configuration for one supported backend
537
537
538 Provides easy access to different test repositories based on
538 Provides easy access to different test repositories based on
539 `__getitem__`. Such repositories will only be created once per test
539 `__getitem__`. Such repositories will only be created once per test
540 session.
540 session.
541 """
541 """
542
542
543 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
543 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
544 _master_repo = None
544 _master_repo = None
545 _commit_ids = {}
545 _commit_ids = {}
546
546
547 def __init__(self, alias, repo_name, test_name, test_repo_container):
547 def __init__(self, alias, repo_name, test_name, test_repo_container):
548 self.alias = alias
548 self.alias = alias
549 self.repo_name = repo_name
549 self.repo_name = repo_name
550 self._cleanup_repos = []
550 self._cleanup_repos = []
551 self._test_name = test_name
551 self._test_name = test_name
552 self._test_repo_container = test_repo_container
552 self._test_repo_container = test_repo_container
553 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
553 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
554 # Fixture will survive in the end.
554 # Fixture will survive in the end.
555 self._fixture = Fixture()
555 self._fixture = Fixture()
556
556
557 def __getitem__(self, key):
557 def __getitem__(self, key):
558 return self._test_repo_container(key, self.alias)
558 return self._test_repo_container(key, self.alias)
559
559
560 def create_test_repo(self, key, config=None):
560 def create_test_repo(self, key, config=None):
561 return self._test_repo_container(key, self.alias, config)
561 return self._test_repo_container(key, self.alias, config)
562
562
563 @property
563 @property
564 def repo(self):
564 def repo(self):
565 """
565 """
566 Returns the "current" repository. This is the vcs_test repo or the
566 Returns the "current" repository. This is the vcs_test repo or the
567 last repo which has been created with `create_repo`.
567 last repo which has been created with `create_repo`.
568 """
568 """
569 from rhodecode.model.db import Repository
569 from rhodecode.model.db import Repository
570 return Repository.get_by_repo_name(self.repo_name)
570 return Repository.get_by_repo_name(self.repo_name)
571
571
572 @property
572 @property
573 def default_branch_name(self):
573 def default_branch_name(self):
574 VcsRepository = get_backend(self.alias)
574 VcsRepository = get_backend(self.alias)
575 return VcsRepository.DEFAULT_BRANCH_NAME
575 return VcsRepository.DEFAULT_BRANCH_NAME
576
576
577 @property
577 @property
578 def default_head_id(self):
578 def default_head_id(self):
579 """
579 """
580 Returns the default head id of the underlying backend.
580 Returns the default head id of the underlying backend.
581
581
582 This will be the default branch name in case the backend does have a
582 This will be the default branch name in case the backend does have a
583 default branch. In the other cases it will point to a valid head
583 default branch. In the other cases it will point to a valid head
584 which can serve as the base to create a new commit on top of it.
584 which can serve as the base to create a new commit on top of it.
585 """
585 """
586 vcsrepo = self.repo.scm_instance()
586 vcsrepo = self.repo.scm_instance()
587 head_id = (
587 head_id = (
588 vcsrepo.DEFAULT_BRANCH_NAME or
588 vcsrepo.DEFAULT_BRANCH_NAME or
589 vcsrepo.commit_ids[-1])
589 vcsrepo.commit_ids[-1])
590 return head_id
590 return head_id
591
591
592 @property
592 @property
593 def commit_ids(self):
593 def commit_ids(self):
594 """
594 """
595 Returns the list of commits for the last created repository
595 Returns the list of commits for the last created repository
596 """
596 """
597 return self._commit_ids
597 return self._commit_ids
598
598
599 def create_master_repo(self, commits):
599 def create_master_repo(self, commits):
600 """
600 """
601 Create a repository and remember it as a template.
601 Create a repository and remember it as a template.
602
602
603 This allows to easily create derived repositories to construct
603 This allows to easily create derived repositories to construct
604 more complex scenarios for diff, compare and pull requests.
604 more complex scenarios for diff, compare and pull requests.
605
605
606 Returns a commit map which maps from commit message to raw_id.
606 Returns a commit map which maps from commit message to raw_id.
607 """
607 """
608 self._master_repo = self.create_repo(commits=commits)
608 self._master_repo = self.create_repo(commits=commits)
609 return self._commit_ids
609 return self._commit_ids
610
610
611 def create_repo(
611 def create_repo(
612 self, commits=None, number_of_commits=0, heads=None,
612 self, commits=None, number_of_commits=0, heads=None,
613 name_suffix=u'', bare=False, **kwargs):
613 name_suffix=u'', bare=False, **kwargs):
614 """
614 """
615 Create a repository and record it for later cleanup.
615 Create a repository and record it for later cleanup.
616
616
617 :param commits: Optional. A sequence of dict instances.
617 :param commits: Optional. A sequence of dict instances.
618 Will add a commit per entry to the new repository.
618 Will add a commit per entry to the new repository.
619 :param number_of_commits: Optional. If set to a number, this number of
619 :param number_of_commits: Optional. If set to a number, this number of
620 commits will be added to the new repository.
620 commits will be added to the new repository.
621 :param heads: Optional. Can be set to a sequence of of commit
621 :param heads: Optional. Can be set to a sequence of of commit
622 names which shall be pulled in from the master repository.
622 names which shall be pulled in from the master repository.
623 :param name_suffix: adds special suffix to generated repo name
623 :param name_suffix: adds special suffix to generated repo name
624 :param bare: set a repo as bare (no checkout)
624 :param bare: set a repo as bare (no checkout)
625 """
625 """
626 self.repo_name = self._next_repo_name() + name_suffix
626 self.repo_name = self._next_repo_name() + name_suffix
627 repo = self._fixture.create_repo(
627 repo = self._fixture.create_repo(
628 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
628 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
629 self._cleanup_repos.append(repo.repo_name)
629 self._cleanup_repos.append(repo.repo_name)
630
630
631 commits = commits or [
631 commits = commits or [
632 {'message': 'Commit %s of %s' % (x, self.repo_name)}
632 {'message': 'Commit %s of %s' % (x, self.repo_name)}
633 for x in range(number_of_commits)]
633 for x in range(number_of_commits)]
634 self._add_commits_to_repo(repo.scm_instance(), commits)
634 vcs_repo = repo.scm_instance()
635 vcs_repo.count()
636 self._add_commits_to_repo(vcs_repo, commits)
635 if heads:
637 if heads:
636 self.pull_heads(repo, heads)
638 self.pull_heads(repo, heads)
637
639
638 return repo
640 return repo
639
641
640 def pull_heads(self, repo, heads):
642 def pull_heads(self, repo, heads):
641 """
643 """
642 Make sure that repo contains all commits mentioned in `heads`
644 Make sure that repo contains all commits mentioned in `heads`
643 """
645 """
644 vcsmaster = self._master_repo.scm_instance()
646 vcsmaster = self._master_repo.scm_instance()
645 vcsrepo = repo.scm_instance()
647 vcsrepo = repo.scm_instance()
646 vcsrepo.config.clear_section('hooks')
648 vcsrepo.config.clear_section('hooks')
647 commit_ids = [self._commit_ids[h] for h in heads]
649 commit_ids = [self._commit_ids[h] for h in heads]
648 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
650 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
649
651
650 def create_fork(self):
652 def create_fork(self):
651 repo_to_fork = self.repo_name
653 repo_to_fork = self.repo_name
652 self.repo_name = self._next_repo_name()
654 self.repo_name = self._next_repo_name()
653 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
655 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
654 self._cleanup_repos.append(self.repo_name)
656 self._cleanup_repos.append(self.repo_name)
655 return repo
657 return repo
656
658
657 def new_repo_name(self, suffix=u''):
659 def new_repo_name(self, suffix=u''):
658 self.repo_name = self._next_repo_name() + suffix
660 self.repo_name = self._next_repo_name() + suffix
659 self._cleanup_repos.append(self.repo_name)
661 self._cleanup_repos.append(self.repo_name)
660 return self.repo_name
662 return self.repo_name
661
663
662 def _next_repo_name(self):
664 def _next_repo_name(self):
663 return u"%s_%s" % (
665 return u"%s_%s" % (
664 self.invalid_repo_name.sub(u'_', self._test_name), len(self._cleanup_repos))
666 self.invalid_repo_name.sub(u'_', self._test_name), len(self._cleanup_repos))
665
667
666 def ensure_file(self, filename, content='Test content\n'):
668 def ensure_file(self, filename, content='Test content\n'):
667 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
669 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
668 commits = [
670 commits = [
669 {'added': [
671 {'added': [
670 FileNode(filename, content=content),
672 FileNode(filename, content=content),
671 ]},
673 ]},
672 ]
674 ]
673 self._add_commits_to_repo(self.repo.scm_instance(), commits)
675 self._add_commits_to_repo(self.repo.scm_instance(), commits)
674
676
675 def enable_downloads(self):
677 def enable_downloads(self):
676 repo = self.repo
678 repo = self.repo
677 repo.enable_downloads = True
679 repo.enable_downloads = True
678 Session().add(repo)
680 Session().add(repo)
679 Session().commit()
681 Session().commit()
680
682
681 def cleanup(self):
683 def cleanup(self):
682 for repo_name in reversed(self._cleanup_repos):
684 for repo_name in reversed(self._cleanup_repos):
683 self._fixture.destroy_repo(repo_name)
685 self._fixture.destroy_repo(repo_name)
684
686
685 def _add_commits_to_repo(self, repo, commits):
687 def _add_commits_to_repo(self, repo, commits):
686 commit_ids = _add_commits_to_repo(repo, commits)
688 commit_ids = _add_commits_to_repo(repo, commits)
687 if not commit_ids:
689 if not commit_ids:
688 return
690 return
689 self._commit_ids = commit_ids
691 self._commit_ids = commit_ids
690
692
691 # Creating refs for Git to allow fetching them from remote repository
693 # Creating refs for Git to allow fetching them from remote repository
692 if self.alias == 'git':
694 if self.alias == 'git':
693 refs = {}
695 refs = {}
694 for message in self._commit_ids:
696 for message in self._commit_ids:
695 # TODO: mikhail: do more special chars replacements
697 # TODO: mikhail: do more special chars replacements
696 ref_name = 'refs/test-refs/{}'.format(
698 ref_name = 'refs/test-refs/{}'.format(
697 message.replace(' ', ''))
699 message.replace(' ', ''))
698 refs[ref_name] = self._commit_ids[message]
700 refs[ref_name] = self._commit_ids[message]
699 self._create_refs(repo, refs)
701 self._create_refs(repo, refs)
700
702
701 def _create_refs(self, repo, refs):
703 def _create_refs(self, repo, refs):
702 for ref_name in refs:
704 for ref_name in refs:
703 repo.set_refs(ref_name, refs[ref_name])
705 repo.set_refs(ref_name, refs[ref_name])
704
706
705
707
706 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo):
708 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo):
707 if backend_alias not in request.config.getoption('--backends'):
709 if backend_alias not in request.config.getoption('--backends'):
708 pytest.skip("Backend %s not selected." % (backend_alias, ))
710 pytest.skip("Backend %s not selected." % (backend_alias, ))
709
711
710 utils.check_xfail_backends(request.node, backend_alias)
712 utils.check_xfail_backends(request.node, backend_alias)
711 utils.check_skip_backends(request.node, backend_alias)
713 utils.check_skip_backends(request.node, backend_alias)
712
714
713 repo_name = 'vcs_test_%s' % (backend_alias, )
715 repo_name = 'vcs_test_%s' % (backend_alias, )
714 repo_path = os.path.join(tests_tmp_path, repo_name)
716 repo_path = os.path.join(tests_tmp_path, repo_name)
715 backend = VcsBackend(
717 backend = VcsBackend(
716 alias=backend_alias,
718 alias=backend_alias,
717 repo_path=repo_path,
719 repo_path=repo_path,
718 test_name=request.node.name,
720 test_name=request.node.name,
719 test_repo_container=test_repo)
721 test_repo_container=test_repo)
720 request.addfinalizer(backend.cleanup)
722 request.addfinalizer(backend.cleanup)
721 return backend
723 return backend
722
724
723
725
724 @pytest.fixture
726 @pytest.fixture
725 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
727 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
726 """
728 """
727 Parametrized fixture which represents a single vcs backend implementation.
729 Parametrized fixture which represents a single vcs backend implementation.
728
730
729 See the fixture `backend` for more details. This one implements the same
731 See the fixture `backend` for more details. This one implements the same
730 concept, but on vcs level. So it does not provide model instances etc.
732 concept, but on vcs level. So it does not provide model instances etc.
731
733
732 Parameters are generated dynamically, see :func:`pytest_generate_tests`
734 Parameters are generated dynamically, see :func:`pytest_generate_tests`
733 for how this works.
735 for how this works.
734 """
736 """
735 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
737 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
736
738
737
739
738 @pytest.fixture
740 @pytest.fixture
739 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
741 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
740 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
742 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
741
743
742
744
743 @pytest.fixture
745 @pytest.fixture
744 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
746 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
745 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
747 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
746
748
747
749
748 @pytest.fixture
750 @pytest.fixture
749 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
751 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
750 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
752 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
751
753
752
754
753 @pytest.fixture
755 @pytest.fixture
754 def vcsbackend_stub(vcsbackend_git):
756 def vcsbackend_stub(vcsbackend_git):
755 """
757 """
756 Use this to express that your test just needs a stub of a vcsbackend.
758 Use this to express that your test just needs a stub of a vcsbackend.
757
759
758 Plan is to eventually implement an in-memory stub to speed tests up.
760 Plan is to eventually implement an in-memory stub to speed tests up.
759 """
761 """
760 return vcsbackend_git
762 return vcsbackend_git
761
763
762
764
763 class VcsBackend(object):
765 class VcsBackend(object):
764 """
766 """
765 Represents the test configuration for one supported vcs backend.
767 Represents the test configuration for one supported vcs backend.
766 """
768 """
767
769
768 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
770 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
769
771
770 def __init__(self, alias, repo_path, test_name, test_repo_container):
772 def __init__(self, alias, repo_path, test_name, test_repo_container):
771 self.alias = alias
773 self.alias = alias
772 self._repo_path = repo_path
774 self._repo_path = repo_path
773 self._cleanup_repos = []
775 self._cleanup_repos = []
774 self._test_name = test_name
776 self._test_name = test_name
775 self._test_repo_container = test_repo_container
777 self._test_repo_container = test_repo_container
776
778
777 def __getitem__(self, key):
779 def __getitem__(self, key):
778 return self._test_repo_container(key, self.alias).scm_instance()
780 return self._test_repo_container(key, self.alias).scm_instance()
779
781
780 @property
782 @property
781 def repo(self):
783 def repo(self):
782 """
784 """
783 Returns the "current" repository. This is the vcs_test repo of the last
785 Returns the "current" repository. This is the vcs_test repo of the last
784 repo which has been created.
786 repo which has been created.
785 """
787 """
786 Repository = get_backend(self.alias)
788 Repository = get_backend(self.alias)
787 return Repository(self._repo_path)
789 return Repository(self._repo_path)
788
790
789 @property
791 @property
790 def backend(self):
792 def backend(self):
791 """
793 """
792 Returns the backend implementation class.
794 Returns the backend implementation class.
793 """
795 """
794 return get_backend(self.alias)
796 return get_backend(self.alias)
795
797
796 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
798 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
797 bare=False):
799 bare=False):
798 repo_name = self._next_repo_name()
800 repo_name = self._next_repo_name()
799 self._repo_path = get_new_dir(repo_name)
801 self._repo_path = get_new_dir(repo_name)
800 repo_class = get_backend(self.alias)
802 repo_class = get_backend(self.alias)
801 src_url = None
803 src_url = None
802 if _clone_repo:
804 if _clone_repo:
803 src_url = _clone_repo.path
805 src_url = _clone_repo.path
804 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
806 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
805 self._cleanup_repos.append(repo)
807 self._cleanup_repos.append(repo)
806
808
807 commits = commits or [
809 commits = commits or [
808 {'message': 'Commit %s of %s' % (x, repo_name)}
810 {'message': 'Commit %s of %s' % (x, repo_name)}
809 for x in xrange(number_of_commits)]
811 for x in xrange(number_of_commits)]
810 _add_commits_to_repo(repo, commits)
812 _add_commits_to_repo(repo, commits)
811 return repo
813 return repo
812
814
813 def clone_repo(self, repo):
815 def clone_repo(self, repo):
814 return self.create_repo(_clone_repo=repo)
816 return self.create_repo(_clone_repo=repo)
815
817
816 def cleanup(self):
818 def cleanup(self):
817 for repo in self._cleanup_repos:
819 for repo in self._cleanup_repos:
818 shutil.rmtree(repo.path)
820 shutil.rmtree(repo.path)
819
821
820 def new_repo_path(self):
822 def new_repo_path(self):
821 repo_name = self._next_repo_name()
823 repo_name = self._next_repo_name()
822 self._repo_path = get_new_dir(repo_name)
824 self._repo_path = get_new_dir(repo_name)
823 return self._repo_path
825 return self._repo_path
824
826
825 def _next_repo_name(self):
827 def _next_repo_name(self):
826 return "%s_%s" % (
828 return "%s_%s" % (
827 self.invalid_repo_name.sub('_', self._test_name),
829 self.invalid_repo_name.sub('_', self._test_name),
828 len(self._cleanup_repos))
830 len(self._cleanup_repos))
829
831
830 def add_file(self, repo, filename, content='Test content\n'):
832 def add_file(self, repo, filename, content='Test content\n'):
831 imc = repo.in_memory_commit
833 imc = repo.in_memory_commit
832 imc.add(FileNode(filename, content=content))
834 imc.add(FileNode(filename, content=content))
833 imc.commit(
835 imc.commit(
834 message=u'Automatic commit from vcsbackend fixture',
836 message=u'Automatic commit from vcsbackend fixture',
835 author=u'Automatic')
837 author=u'Automatic')
836
838
837 def ensure_file(self, filename, content='Test content\n'):
839 def ensure_file(self, filename, content='Test content\n'):
838 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
840 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
839 self.add_file(self.repo, filename, content)
841 self.add_file(self.repo, filename, content)
840
842
841
843
842 def _add_commits_to_repo(vcs_repo, commits):
844 def _add_commits_to_repo(vcs_repo, commits):
843 commit_ids = {}
845 commit_ids = {}
844 if not commits:
846 if not commits:
845 return commit_ids
847 return commit_ids
846
848
847 imc = vcs_repo.in_memory_commit
849 imc = vcs_repo.in_memory_commit
848 commit = None
850 commit = None
849
851
850 for idx, commit in enumerate(commits):
852 for idx, commit in enumerate(commits):
851 message = unicode(commit.get('message', 'Commit %s' % idx))
853 message = unicode(commit.get('message', 'Commit %s' % idx))
852
854
853 for node in commit.get('added', []):
855 for node in commit.get('added', []):
854 imc.add(FileNode(node.path, content=node.content))
856 imc.add(FileNode(node.path, content=node.content))
855 for node in commit.get('changed', []):
857 for node in commit.get('changed', []):
856 imc.change(FileNode(node.path, content=node.content))
858 imc.change(FileNode(node.path, content=node.content))
857 for node in commit.get('removed', []):
859 for node in commit.get('removed', []):
858 imc.remove(FileNode(node.path))
860 imc.remove(FileNode(node.path))
859
861
860 parents = [
862 parents = [
861 vcs_repo.get_commit(commit_id=commit_ids[p])
863 vcs_repo.get_commit(commit_id=commit_ids[p])
862 for p in commit.get('parents', [])]
864 for p in commit.get('parents', [])]
863
865
864 operations = ('added', 'changed', 'removed')
866 operations = ('added', 'changed', 'removed')
865 if not any((commit.get(o) for o in operations)):
867 if not any((commit.get(o) for o in operations)):
866 imc.add(FileNode('file_%s' % idx, content=message))
868 imc.add(FileNode('file_%s' % idx, content=message))
867
869
868 commit = imc.commit(
870 commit = imc.commit(
869 message=message,
871 message=message,
870 author=unicode(commit.get('author', 'Automatic')),
872 author=unicode(commit.get('author', 'Automatic')),
871 date=commit.get('date'),
873 date=commit.get('date'),
872 branch=commit.get('branch'),
874 branch=commit.get('branch'),
873 parents=parents)
875 parents=parents)
874
876
875 commit_ids[commit.message] = commit.raw_id
877 commit_ids[commit.message] = commit.raw_id
876
878
877 return commit_ids
879 return commit_ids
878
880
879
881
880 @pytest.fixture
882 @pytest.fixture
881 def reposerver(request):
883 def reposerver(request):
882 """
884 """
883 Allows to serve a backend repository
885 Allows to serve a backend repository
884 """
886 """
885
887
886 repo_server = RepoServer()
888 repo_server = RepoServer()
887 request.addfinalizer(repo_server.cleanup)
889 request.addfinalizer(repo_server.cleanup)
888 return repo_server
890 return repo_server
889
891
890
892
891 class RepoServer(object):
893 class RepoServer(object):
892 """
894 """
893 Utility to serve a local repository for the duration of a test case.
895 Utility to serve a local repository for the duration of a test case.
894
896
895 Supports only Subversion so far.
897 Supports only Subversion so far.
896 """
898 """
897
899
898 url = None
900 url = None
899
901
900 def __init__(self):
902 def __init__(self):
901 self._cleanup_servers = []
903 self._cleanup_servers = []
902
904
903 def serve(self, vcsrepo):
905 def serve(self, vcsrepo):
904 if vcsrepo.alias != 'svn':
906 if vcsrepo.alias != 'svn':
905 raise TypeError("Backend %s not supported" % vcsrepo.alias)
907 raise TypeError("Backend %s not supported" % vcsrepo.alias)
906
908
907 proc = subprocess32.Popen(
909 proc = subprocess32.Popen(
908 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
910 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
909 '--root', vcsrepo.path])
911 '--root', vcsrepo.path])
910 self._cleanup_servers.append(proc)
912 self._cleanup_servers.append(proc)
911 self.url = 'svn://localhost'
913 self.url = 'svn://localhost'
912
914
913 def cleanup(self):
915 def cleanup(self):
914 for proc in self._cleanup_servers:
916 for proc in self._cleanup_servers:
915 proc.terminate()
917 proc.terminate()
916
918
917
919
918 @pytest.fixture
920 @pytest.fixture
919 def pr_util(backend, request, config_stub):
921 def pr_util(backend, request, config_stub):
920 """
922 """
921 Utility for tests of models and for functional tests around pull requests.
923 Utility for tests of models and for functional tests around pull requests.
922
924
923 It gives an instance of :class:`PRTestUtility` which provides various
925 It gives an instance of :class:`PRTestUtility` which provides various
924 utility methods around one pull request.
926 utility methods around one pull request.
925
927
926 This fixture uses `backend` and inherits its parameterization.
928 This fixture uses `backend` and inherits its parameterization.
927 """
929 """
928
930
929 util = PRTestUtility(backend)
931 util = PRTestUtility(backend)
930 request.addfinalizer(util.cleanup)
932 request.addfinalizer(util.cleanup)
931
933
932 return util
934 return util
933
935
934
936
935 class PRTestUtility(object):
937 class PRTestUtility(object):
936
938
937 pull_request = None
939 pull_request = None
938 pull_request_id = None
940 pull_request_id = None
939 mergeable_patcher = None
941 mergeable_patcher = None
940 mergeable_mock = None
942 mergeable_mock = None
941 notification_patcher = None
943 notification_patcher = None
942
944
943 def __init__(self, backend):
945 def __init__(self, backend):
944 self.backend = backend
946 self.backend = backend
945
947
946 def create_pull_request(
948 def create_pull_request(
947 self, commits=None, target_head=None, source_head=None,
949 self, commits=None, target_head=None, source_head=None,
948 revisions=None, approved=False, author=None, mergeable=False,
950 revisions=None, approved=False, author=None, mergeable=False,
949 enable_notifications=True, name_suffix=u'', reviewers=None,
951 enable_notifications=True, name_suffix=u'', reviewers=None,
950 title=u"Test", description=u"Description"):
952 title=u"Test", description=u"Description"):
951 self.set_mergeable(mergeable)
953 self.set_mergeable(mergeable)
952 if not enable_notifications:
954 if not enable_notifications:
953 # mock notification side effect
955 # mock notification side effect
954 self.notification_patcher = mock.patch(
956 self.notification_patcher = mock.patch(
955 'rhodecode.model.notification.NotificationModel.create')
957 'rhodecode.model.notification.NotificationModel.create')
956 self.notification_patcher.start()
958 self.notification_patcher.start()
957
959
958 if not self.pull_request:
960 if not self.pull_request:
959 if not commits:
961 if not commits:
960 commits = [
962 commits = [
961 {'message': 'c1'},
963 {'message': 'c1'},
962 {'message': 'c2'},
964 {'message': 'c2'},
963 {'message': 'c3'},
965 {'message': 'c3'},
964 ]
966 ]
965 target_head = 'c1'
967 target_head = 'c1'
966 source_head = 'c2'
968 source_head = 'c2'
967 revisions = ['c2']
969 revisions = ['c2']
968
970
969 self.commit_ids = self.backend.create_master_repo(commits)
971 self.commit_ids = self.backend.create_master_repo(commits)
970 self.target_repository = self.backend.create_repo(
972 self.target_repository = self.backend.create_repo(
971 heads=[target_head], name_suffix=name_suffix)
973 heads=[target_head], name_suffix=name_suffix)
972 self.source_repository = self.backend.create_repo(
974 self.source_repository = self.backend.create_repo(
973 heads=[source_head], name_suffix=name_suffix)
975 heads=[source_head], name_suffix=name_suffix)
974 self.author = author or UserModel().get_by_username(
976 self.author = author or UserModel().get_by_username(
975 TEST_USER_ADMIN_LOGIN)
977 TEST_USER_ADMIN_LOGIN)
976
978
977 model = PullRequestModel()
979 model = PullRequestModel()
978 self.create_parameters = {
980 self.create_parameters = {
979 'created_by': self.author,
981 'created_by': self.author,
980 'source_repo': self.source_repository.repo_name,
982 'source_repo': self.source_repository.repo_name,
981 'source_ref': self._default_branch_reference(source_head),
983 'source_ref': self._default_branch_reference(source_head),
982 'target_repo': self.target_repository.repo_name,
984 'target_repo': self.target_repository.repo_name,
983 'target_ref': self._default_branch_reference(target_head),
985 'target_ref': self._default_branch_reference(target_head),
984 'revisions': [self.commit_ids[r] for r in revisions],
986 'revisions': [self.commit_ids[r] for r in revisions],
985 'reviewers': reviewers or self._get_reviewers(),
987 'reviewers': reviewers or self._get_reviewers(),
986 'title': title,
988 'title': title,
987 'description': description,
989 'description': description,
988 }
990 }
989 self.pull_request = model.create(**self.create_parameters)
991 self.pull_request = model.create(**self.create_parameters)
990 assert model.get_versions(self.pull_request) == []
992 assert model.get_versions(self.pull_request) == []
991
993
992 self.pull_request_id = self.pull_request.pull_request_id
994 self.pull_request_id = self.pull_request.pull_request_id
993
995
994 if approved:
996 if approved:
995 self.approve()
997 self.approve()
996
998
997 Session().add(self.pull_request)
999 Session().add(self.pull_request)
998 Session().commit()
1000 Session().commit()
999
1001
1000 return self.pull_request
1002 return self.pull_request
1001
1003
1002 def approve(self):
1004 def approve(self):
1003 self.create_status_votes(
1005 self.create_status_votes(
1004 ChangesetStatus.STATUS_APPROVED,
1006 ChangesetStatus.STATUS_APPROVED,
1005 *self.pull_request.reviewers)
1007 *self.pull_request.reviewers)
1006
1008
1007 def close(self):
1009 def close(self):
1008 PullRequestModel().close_pull_request(self.pull_request, self.author)
1010 PullRequestModel().close_pull_request(self.pull_request, self.author)
1009
1011
1010 def _default_branch_reference(self, commit_message):
1012 def _default_branch_reference(self, commit_message):
1011 reference = '%s:%s:%s' % (
1013 reference = '%s:%s:%s' % (
1012 'branch',
1014 'branch',
1013 self.backend.default_branch_name,
1015 self.backend.default_branch_name,
1014 self.commit_ids[commit_message])
1016 self.commit_ids[commit_message])
1015 return reference
1017 return reference
1016
1018
1017 def _get_reviewers(self):
1019 def _get_reviewers(self):
1018 return [
1020 return [
1019 (TEST_USER_REGULAR_LOGIN, ['default1'], False, []),
1021 (TEST_USER_REGULAR_LOGIN, ['default1'], False, []),
1020 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, []),
1022 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, []),
1021 ]
1023 ]
1022
1024
1023 def update_source_repository(self, head=None):
1025 def update_source_repository(self, head=None):
1024 heads = [head or 'c3']
1026 heads = [head or 'c3']
1025 self.backend.pull_heads(self.source_repository, heads=heads)
1027 self.backend.pull_heads(self.source_repository, heads=heads)
1026
1028
1027 def add_one_commit(self, head=None):
1029 def add_one_commit(self, head=None):
1028 self.update_source_repository(head=head)
1030 self.update_source_repository(head=head)
1029 old_commit_ids = set(self.pull_request.revisions)
1031 old_commit_ids = set(self.pull_request.revisions)
1030 PullRequestModel().update_commits(self.pull_request)
1032 PullRequestModel().update_commits(self.pull_request)
1031 commit_ids = set(self.pull_request.revisions)
1033 commit_ids = set(self.pull_request.revisions)
1032 new_commit_ids = commit_ids - old_commit_ids
1034 new_commit_ids = commit_ids - old_commit_ids
1033 assert len(new_commit_ids) == 1
1035 assert len(new_commit_ids) == 1
1034 return new_commit_ids.pop()
1036 return new_commit_ids.pop()
1035
1037
1036 def remove_one_commit(self):
1038 def remove_one_commit(self):
1037 assert len(self.pull_request.revisions) == 2
1039 assert len(self.pull_request.revisions) == 2
1038 source_vcs = self.source_repository.scm_instance()
1040 source_vcs = self.source_repository.scm_instance()
1039 removed_commit_id = source_vcs.commit_ids[-1]
1041 removed_commit_id = source_vcs.commit_ids[-1]
1040
1042
1041 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1043 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1042 # remove the if once that's sorted out.
1044 # remove the if once that's sorted out.
1043 if self.backend.alias == "git":
1045 if self.backend.alias == "git":
1044 kwargs = {'branch_name': self.backend.default_branch_name}
1046 kwargs = {'branch_name': self.backend.default_branch_name}
1045 else:
1047 else:
1046 kwargs = {}
1048 kwargs = {}
1047 source_vcs.strip(removed_commit_id, **kwargs)
1049 source_vcs.strip(removed_commit_id, **kwargs)
1048
1050
1049 PullRequestModel().update_commits(self.pull_request)
1051 PullRequestModel().update_commits(self.pull_request)
1050 assert len(self.pull_request.revisions) == 1
1052 assert len(self.pull_request.revisions) == 1
1051 return removed_commit_id
1053 return removed_commit_id
1052
1054
1053 def create_comment(self, linked_to=None):
1055 def create_comment(self, linked_to=None):
1054 comment = CommentsModel().create(
1056 comment = CommentsModel().create(
1055 text=u"Test comment",
1057 text=u"Test comment",
1056 repo=self.target_repository.repo_name,
1058 repo=self.target_repository.repo_name,
1057 user=self.author,
1059 user=self.author,
1058 pull_request=self.pull_request)
1060 pull_request=self.pull_request)
1059 assert comment.pull_request_version_id is None
1061 assert comment.pull_request_version_id is None
1060
1062
1061 if linked_to:
1063 if linked_to:
1062 PullRequestModel()._link_comments_to_version(linked_to)
1064 PullRequestModel()._link_comments_to_version(linked_to)
1063
1065
1064 return comment
1066 return comment
1065
1067
1066 def create_inline_comment(
1068 def create_inline_comment(
1067 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1069 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1068 comment = CommentsModel().create(
1070 comment = CommentsModel().create(
1069 text=u"Test comment",
1071 text=u"Test comment",
1070 repo=self.target_repository.repo_name,
1072 repo=self.target_repository.repo_name,
1071 user=self.author,
1073 user=self.author,
1072 line_no=line_no,
1074 line_no=line_no,
1073 f_path=file_path,
1075 f_path=file_path,
1074 pull_request=self.pull_request)
1076 pull_request=self.pull_request)
1075 assert comment.pull_request_version_id is None
1077 assert comment.pull_request_version_id is None
1076
1078
1077 if linked_to:
1079 if linked_to:
1078 PullRequestModel()._link_comments_to_version(linked_to)
1080 PullRequestModel()._link_comments_to_version(linked_to)
1079
1081
1080 return comment
1082 return comment
1081
1083
1082 def create_version_of_pull_request(self):
1084 def create_version_of_pull_request(self):
1083 pull_request = self.create_pull_request()
1085 pull_request = self.create_pull_request()
1084 version = PullRequestModel()._create_version_from_snapshot(
1086 version = PullRequestModel()._create_version_from_snapshot(
1085 pull_request)
1087 pull_request)
1086 return version
1088 return version
1087
1089
1088 def create_status_votes(self, status, *reviewers):
1090 def create_status_votes(self, status, *reviewers):
1089 for reviewer in reviewers:
1091 for reviewer in reviewers:
1090 ChangesetStatusModel().set_status(
1092 ChangesetStatusModel().set_status(
1091 repo=self.pull_request.target_repo,
1093 repo=self.pull_request.target_repo,
1092 status=status,
1094 status=status,
1093 user=reviewer.user_id,
1095 user=reviewer.user_id,
1094 pull_request=self.pull_request)
1096 pull_request=self.pull_request)
1095
1097
1096 def set_mergeable(self, value):
1098 def set_mergeable(self, value):
1097 if not self.mergeable_patcher:
1099 if not self.mergeable_patcher:
1098 self.mergeable_patcher = mock.patch.object(
1100 self.mergeable_patcher = mock.patch.object(
1099 VcsSettingsModel, 'get_general_settings')
1101 VcsSettingsModel, 'get_general_settings')
1100 self.mergeable_mock = self.mergeable_patcher.start()
1102 self.mergeable_mock = self.mergeable_patcher.start()
1101 self.mergeable_mock.return_value = {
1103 self.mergeable_mock.return_value = {
1102 'rhodecode_pr_merge_enabled': value}
1104 'rhodecode_pr_merge_enabled': value}
1103
1105
1104 def cleanup(self):
1106 def cleanup(self):
1105 # In case the source repository is already cleaned up, the pull
1107 # In case the source repository is already cleaned up, the pull
1106 # request will already be deleted.
1108 # request will already be deleted.
1107 pull_request = PullRequest().get(self.pull_request_id)
1109 pull_request = PullRequest().get(self.pull_request_id)
1108 if pull_request:
1110 if pull_request:
1109 PullRequestModel().delete(pull_request, pull_request.author)
1111 PullRequestModel().delete(pull_request, pull_request.author)
1110 Session().commit()
1112 Session().commit()
1111
1113
1112 if self.notification_patcher:
1114 if self.notification_patcher:
1113 self.notification_patcher.stop()
1115 self.notification_patcher.stop()
1114
1116
1115 if self.mergeable_patcher:
1117 if self.mergeable_patcher:
1116 self.mergeable_patcher.stop()
1118 self.mergeable_patcher.stop()
1117
1119
1118
1120
1119 @pytest.fixture
1121 @pytest.fixture
1120 def user_admin(baseapp):
1122 def user_admin(baseapp):
1121 """
1123 """
1122 Provides the default admin test user as an instance of `db.User`.
1124 Provides the default admin test user as an instance of `db.User`.
1123 """
1125 """
1124 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1126 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1125 return user
1127 return user
1126
1128
1127
1129
1128 @pytest.fixture
1130 @pytest.fixture
1129 def user_regular(baseapp):
1131 def user_regular(baseapp):
1130 """
1132 """
1131 Provides the default regular test user as an instance of `db.User`.
1133 Provides the default regular test user as an instance of `db.User`.
1132 """
1134 """
1133 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1135 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1134 return user
1136 return user
1135
1137
1136
1138
1137 @pytest.fixture
1139 @pytest.fixture
1138 def user_util(request, db_connection):
1140 def user_util(request, db_connection):
1139 """
1141 """
1140 Provides a wired instance of `UserUtility` with integrated cleanup.
1142 Provides a wired instance of `UserUtility` with integrated cleanup.
1141 """
1143 """
1142 utility = UserUtility(test_name=request.node.name)
1144 utility = UserUtility(test_name=request.node.name)
1143 request.addfinalizer(utility.cleanup)
1145 request.addfinalizer(utility.cleanup)
1144 return utility
1146 return utility
1145
1147
1146
1148
1147 # TODO: johbo: Split this up into utilities per domain or something similar
1149 # TODO: johbo: Split this up into utilities per domain or something similar
1148 class UserUtility(object):
1150 class UserUtility(object):
1149
1151
1150 def __init__(self, test_name="test"):
1152 def __init__(self, test_name="test"):
1151 self._test_name = self._sanitize_name(test_name)
1153 self._test_name = self._sanitize_name(test_name)
1152 self.fixture = Fixture()
1154 self.fixture = Fixture()
1153 self.repo_group_ids = []
1155 self.repo_group_ids = []
1154 self.repos_ids = []
1156 self.repos_ids = []
1155 self.user_ids = []
1157 self.user_ids = []
1156 self.user_group_ids = []
1158 self.user_group_ids = []
1157 self.user_repo_permission_ids = []
1159 self.user_repo_permission_ids = []
1158 self.user_group_repo_permission_ids = []
1160 self.user_group_repo_permission_ids = []
1159 self.user_repo_group_permission_ids = []
1161 self.user_repo_group_permission_ids = []
1160 self.user_group_repo_group_permission_ids = []
1162 self.user_group_repo_group_permission_ids = []
1161 self.user_user_group_permission_ids = []
1163 self.user_user_group_permission_ids = []
1162 self.user_group_user_group_permission_ids = []
1164 self.user_group_user_group_permission_ids = []
1163 self.user_permissions = []
1165 self.user_permissions = []
1164
1166
1165 def _sanitize_name(self, name):
1167 def _sanitize_name(self, name):
1166 for char in ['[', ']']:
1168 for char in ['[', ']']:
1167 name = name.replace(char, '_')
1169 name = name.replace(char, '_')
1168 return name
1170 return name
1169
1171
1170 def create_repo_group(
1172 def create_repo_group(
1171 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1173 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1172 group_name = "{prefix}_repogroup_{count}".format(
1174 group_name = "{prefix}_repogroup_{count}".format(
1173 prefix=self._test_name,
1175 prefix=self._test_name,
1174 count=len(self.repo_group_ids))
1176 count=len(self.repo_group_ids))
1175 repo_group = self.fixture.create_repo_group(
1177 repo_group = self.fixture.create_repo_group(
1176 group_name, cur_user=owner)
1178 group_name, cur_user=owner)
1177 if auto_cleanup:
1179 if auto_cleanup:
1178 self.repo_group_ids.append(repo_group.group_id)
1180 self.repo_group_ids.append(repo_group.group_id)
1179 return repo_group
1181 return repo_group
1180
1182
1181 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1183 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1182 auto_cleanup=True, repo_type='hg', bare=False):
1184 auto_cleanup=True, repo_type='hg', bare=False):
1183 repo_name = "{prefix}_repository_{count}".format(
1185 repo_name = "{prefix}_repository_{count}".format(
1184 prefix=self._test_name,
1186 prefix=self._test_name,
1185 count=len(self.repos_ids))
1187 count=len(self.repos_ids))
1186
1188
1187 repository = self.fixture.create_repo(
1189 repository = self.fixture.create_repo(
1188 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1190 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1189 if auto_cleanup:
1191 if auto_cleanup:
1190 self.repos_ids.append(repository.repo_id)
1192 self.repos_ids.append(repository.repo_id)
1191 return repository
1193 return repository
1192
1194
1193 def create_user(self, auto_cleanup=True, **kwargs):
1195 def create_user(self, auto_cleanup=True, **kwargs):
1194 user_name = "{prefix}_user_{count}".format(
1196 user_name = "{prefix}_user_{count}".format(
1195 prefix=self._test_name,
1197 prefix=self._test_name,
1196 count=len(self.user_ids))
1198 count=len(self.user_ids))
1197 user = self.fixture.create_user(user_name, **kwargs)
1199 user = self.fixture.create_user(user_name, **kwargs)
1198 if auto_cleanup:
1200 if auto_cleanup:
1199 self.user_ids.append(user.user_id)
1201 self.user_ids.append(user.user_id)
1200 return user
1202 return user
1201
1203
1202 def create_additional_user_email(self, user, email):
1204 def create_additional_user_email(self, user, email):
1203 uem = self.fixture.create_additional_user_email(user=user, email=email)
1205 uem = self.fixture.create_additional_user_email(user=user, email=email)
1204 return uem
1206 return uem
1205
1207
1206 def create_user_with_group(self):
1208 def create_user_with_group(self):
1207 user = self.create_user()
1209 user = self.create_user()
1208 user_group = self.create_user_group(members=[user])
1210 user_group = self.create_user_group(members=[user])
1209 return user, user_group
1211 return user, user_group
1210
1212
1211 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1213 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1212 auto_cleanup=True, **kwargs):
1214 auto_cleanup=True, **kwargs):
1213 group_name = "{prefix}_usergroup_{count}".format(
1215 group_name = "{prefix}_usergroup_{count}".format(
1214 prefix=self._test_name,
1216 prefix=self._test_name,
1215 count=len(self.user_group_ids))
1217 count=len(self.user_group_ids))
1216 user_group = self.fixture.create_user_group(
1218 user_group = self.fixture.create_user_group(
1217 group_name, cur_user=owner, **kwargs)
1219 group_name, cur_user=owner, **kwargs)
1218
1220
1219 if auto_cleanup:
1221 if auto_cleanup:
1220 self.user_group_ids.append(user_group.users_group_id)
1222 self.user_group_ids.append(user_group.users_group_id)
1221 if members:
1223 if members:
1222 for user in members:
1224 for user in members:
1223 UserGroupModel().add_user_to_group(user_group, user)
1225 UserGroupModel().add_user_to_group(user_group, user)
1224 return user_group
1226 return user_group
1225
1227
1226 def grant_user_permission(self, user_name, permission_name):
1228 def grant_user_permission(self, user_name, permission_name):
1227 self.inherit_default_user_permissions(user_name, False)
1229 self.inherit_default_user_permissions(user_name, False)
1228 self.user_permissions.append((user_name, permission_name))
1230 self.user_permissions.append((user_name, permission_name))
1229
1231
1230 def grant_user_permission_to_repo_group(
1232 def grant_user_permission_to_repo_group(
1231 self, repo_group, user, permission_name):
1233 self, repo_group, user, permission_name):
1232 permission = RepoGroupModel().grant_user_permission(
1234 permission = RepoGroupModel().grant_user_permission(
1233 repo_group, user, permission_name)
1235 repo_group, user, permission_name)
1234 self.user_repo_group_permission_ids.append(
1236 self.user_repo_group_permission_ids.append(
1235 (repo_group.group_id, user.user_id))
1237 (repo_group.group_id, user.user_id))
1236 return permission
1238 return permission
1237
1239
1238 def grant_user_group_permission_to_repo_group(
1240 def grant_user_group_permission_to_repo_group(
1239 self, repo_group, user_group, permission_name):
1241 self, repo_group, user_group, permission_name):
1240 permission = RepoGroupModel().grant_user_group_permission(
1242 permission = RepoGroupModel().grant_user_group_permission(
1241 repo_group, user_group, permission_name)
1243 repo_group, user_group, permission_name)
1242 self.user_group_repo_group_permission_ids.append(
1244 self.user_group_repo_group_permission_ids.append(
1243 (repo_group.group_id, user_group.users_group_id))
1245 (repo_group.group_id, user_group.users_group_id))
1244 return permission
1246 return permission
1245
1247
1246 def grant_user_permission_to_repo(
1248 def grant_user_permission_to_repo(
1247 self, repo, user, permission_name):
1249 self, repo, user, permission_name):
1248 permission = RepoModel().grant_user_permission(
1250 permission = RepoModel().grant_user_permission(
1249 repo, user, permission_name)
1251 repo, user, permission_name)
1250 self.user_repo_permission_ids.append(
1252 self.user_repo_permission_ids.append(
1251 (repo.repo_id, user.user_id))
1253 (repo.repo_id, user.user_id))
1252 return permission
1254 return permission
1253
1255
1254 def grant_user_group_permission_to_repo(
1256 def grant_user_group_permission_to_repo(
1255 self, repo, user_group, permission_name):
1257 self, repo, user_group, permission_name):
1256 permission = RepoModel().grant_user_group_permission(
1258 permission = RepoModel().grant_user_group_permission(
1257 repo, user_group, permission_name)
1259 repo, user_group, permission_name)
1258 self.user_group_repo_permission_ids.append(
1260 self.user_group_repo_permission_ids.append(
1259 (repo.repo_id, user_group.users_group_id))
1261 (repo.repo_id, user_group.users_group_id))
1260 return permission
1262 return permission
1261
1263
1262 def grant_user_permission_to_user_group(
1264 def grant_user_permission_to_user_group(
1263 self, target_user_group, user, permission_name):
1265 self, target_user_group, user, permission_name):
1264 permission = UserGroupModel().grant_user_permission(
1266 permission = UserGroupModel().grant_user_permission(
1265 target_user_group, user, permission_name)
1267 target_user_group, user, permission_name)
1266 self.user_user_group_permission_ids.append(
1268 self.user_user_group_permission_ids.append(
1267 (target_user_group.users_group_id, user.user_id))
1269 (target_user_group.users_group_id, user.user_id))
1268 return permission
1270 return permission
1269
1271
1270 def grant_user_group_permission_to_user_group(
1272 def grant_user_group_permission_to_user_group(
1271 self, target_user_group, user_group, permission_name):
1273 self, target_user_group, user_group, permission_name):
1272 permission = UserGroupModel().grant_user_group_permission(
1274 permission = UserGroupModel().grant_user_group_permission(
1273 target_user_group, user_group, permission_name)
1275 target_user_group, user_group, permission_name)
1274 self.user_group_user_group_permission_ids.append(
1276 self.user_group_user_group_permission_ids.append(
1275 (target_user_group.users_group_id, user_group.users_group_id))
1277 (target_user_group.users_group_id, user_group.users_group_id))
1276 return permission
1278 return permission
1277
1279
1278 def revoke_user_permission(self, user_name, permission_name):
1280 def revoke_user_permission(self, user_name, permission_name):
1279 self.inherit_default_user_permissions(user_name, True)
1281 self.inherit_default_user_permissions(user_name, True)
1280 UserModel().revoke_perm(user_name, permission_name)
1282 UserModel().revoke_perm(user_name, permission_name)
1281
1283
1282 def inherit_default_user_permissions(self, user_name, value):
1284 def inherit_default_user_permissions(self, user_name, value):
1283 user = UserModel().get_by_username(user_name)
1285 user = UserModel().get_by_username(user_name)
1284 user.inherit_default_permissions = value
1286 user.inherit_default_permissions = value
1285 Session().add(user)
1287 Session().add(user)
1286 Session().commit()
1288 Session().commit()
1287
1289
1288 def cleanup(self):
1290 def cleanup(self):
1289 self._cleanup_permissions()
1291 self._cleanup_permissions()
1290 self._cleanup_repos()
1292 self._cleanup_repos()
1291 self._cleanup_repo_groups()
1293 self._cleanup_repo_groups()
1292 self._cleanup_user_groups()
1294 self._cleanup_user_groups()
1293 self._cleanup_users()
1295 self._cleanup_users()
1294
1296
1295 def _cleanup_permissions(self):
1297 def _cleanup_permissions(self):
1296 if self.user_permissions:
1298 if self.user_permissions:
1297 for user_name, permission_name in self.user_permissions:
1299 for user_name, permission_name in self.user_permissions:
1298 self.revoke_user_permission(user_name, permission_name)
1300 self.revoke_user_permission(user_name, permission_name)
1299
1301
1300 for permission in self.user_repo_permission_ids:
1302 for permission in self.user_repo_permission_ids:
1301 RepoModel().revoke_user_permission(*permission)
1303 RepoModel().revoke_user_permission(*permission)
1302
1304
1303 for permission in self.user_group_repo_permission_ids:
1305 for permission in self.user_group_repo_permission_ids:
1304 RepoModel().revoke_user_group_permission(*permission)
1306 RepoModel().revoke_user_group_permission(*permission)
1305
1307
1306 for permission in self.user_repo_group_permission_ids:
1308 for permission in self.user_repo_group_permission_ids:
1307 RepoGroupModel().revoke_user_permission(*permission)
1309 RepoGroupModel().revoke_user_permission(*permission)
1308
1310
1309 for permission in self.user_group_repo_group_permission_ids:
1311 for permission in self.user_group_repo_group_permission_ids:
1310 RepoGroupModel().revoke_user_group_permission(*permission)
1312 RepoGroupModel().revoke_user_group_permission(*permission)
1311
1313
1312 for permission in self.user_user_group_permission_ids:
1314 for permission in self.user_user_group_permission_ids:
1313 UserGroupModel().revoke_user_permission(*permission)
1315 UserGroupModel().revoke_user_permission(*permission)
1314
1316
1315 for permission in self.user_group_user_group_permission_ids:
1317 for permission in self.user_group_user_group_permission_ids:
1316 UserGroupModel().revoke_user_group_permission(*permission)
1318 UserGroupModel().revoke_user_group_permission(*permission)
1317
1319
1318 def _cleanup_repo_groups(self):
1320 def _cleanup_repo_groups(self):
1319 def _repo_group_compare(first_group_id, second_group_id):
1321 def _repo_group_compare(first_group_id, second_group_id):
1320 """
1322 """
1321 Gives higher priority to the groups with the most complex paths
1323 Gives higher priority to the groups with the most complex paths
1322 """
1324 """
1323 first_group = RepoGroup.get(first_group_id)
1325 first_group = RepoGroup.get(first_group_id)
1324 second_group = RepoGroup.get(second_group_id)
1326 second_group = RepoGroup.get(second_group_id)
1325 first_group_parts = (
1327 first_group_parts = (
1326 len(first_group.group_name.split('/')) if first_group else 0)
1328 len(first_group.group_name.split('/')) if first_group else 0)
1327 second_group_parts = (
1329 second_group_parts = (
1328 len(second_group.group_name.split('/')) if second_group else 0)
1330 len(second_group.group_name.split('/')) if second_group else 0)
1329 return cmp(second_group_parts, first_group_parts)
1331 return cmp(second_group_parts, first_group_parts)
1330
1332
1331 sorted_repo_group_ids = sorted(
1333 sorted_repo_group_ids = sorted(
1332 self.repo_group_ids, cmp=_repo_group_compare)
1334 self.repo_group_ids, cmp=_repo_group_compare)
1333 for repo_group_id in sorted_repo_group_ids:
1335 for repo_group_id in sorted_repo_group_ids:
1334 self.fixture.destroy_repo_group(repo_group_id)
1336 self.fixture.destroy_repo_group(repo_group_id)
1335
1337
1336 def _cleanup_repos(self):
1338 def _cleanup_repos(self):
1337 sorted_repos_ids = sorted(self.repos_ids)
1339 sorted_repos_ids = sorted(self.repos_ids)
1338 for repo_id in sorted_repos_ids:
1340 for repo_id in sorted_repos_ids:
1339 self.fixture.destroy_repo(repo_id)
1341 self.fixture.destroy_repo(repo_id)
1340
1342
1341 def _cleanup_user_groups(self):
1343 def _cleanup_user_groups(self):
1342 def _user_group_compare(first_group_id, second_group_id):
1344 def _user_group_compare(first_group_id, second_group_id):
1343 """
1345 """
1344 Gives higher priority to the groups with the most complex paths
1346 Gives higher priority to the groups with the most complex paths
1345 """
1347 """
1346 first_group = UserGroup.get(first_group_id)
1348 first_group = UserGroup.get(first_group_id)
1347 second_group = UserGroup.get(second_group_id)
1349 second_group = UserGroup.get(second_group_id)
1348 first_group_parts = (
1350 first_group_parts = (
1349 len(first_group.users_group_name.split('/'))
1351 len(first_group.users_group_name.split('/'))
1350 if first_group else 0)
1352 if first_group else 0)
1351 second_group_parts = (
1353 second_group_parts = (
1352 len(second_group.users_group_name.split('/'))
1354 len(second_group.users_group_name.split('/'))
1353 if second_group else 0)
1355 if second_group else 0)
1354 return cmp(second_group_parts, first_group_parts)
1356 return cmp(second_group_parts, first_group_parts)
1355
1357
1356 sorted_user_group_ids = sorted(
1358 sorted_user_group_ids = sorted(
1357 self.user_group_ids, cmp=_user_group_compare)
1359 self.user_group_ids, cmp=_user_group_compare)
1358 for user_group_id in sorted_user_group_ids:
1360 for user_group_id in sorted_user_group_ids:
1359 self.fixture.destroy_user_group(user_group_id)
1361 self.fixture.destroy_user_group(user_group_id)
1360
1362
1361 def _cleanup_users(self):
1363 def _cleanup_users(self):
1362 for user_id in self.user_ids:
1364 for user_id in self.user_ids:
1363 self.fixture.destroy_user(user_id)
1365 self.fixture.destroy_user(user_id)
1364
1366
1365
1367
1366 # TODO: Think about moving this into a pytest-pyro package and make it a
1368 # TODO: Think about moving this into a pytest-pyro package and make it a
1367 # pytest plugin
1369 # pytest plugin
1368 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1370 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1369 def pytest_runtest_makereport(item, call):
1371 def pytest_runtest_makereport(item, call):
1370 """
1372 """
1371 Adding the remote traceback if the exception has this information.
1373 Adding the remote traceback if the exception has this information.
1372
1374
1373 VCSServer attaches this information as the attribute `_vcs_server_traceback`
1375 VCSServer attaches this information as the attribute `_vcs_server_traceback`
1374 to the exception instance.
1376 to the exception instance.
1375 """
1377 """
1376 outcome = yield
1378 outcome = yield
1377 report = outcome.get_result()
1379 report = outcome.get_result()
1378 if call.excinfo:
1380 if call.excinfo:
1379 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1381 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1380
1382
1381
1383
1382 def _add_vcsserver_remote_traceback(report, exc):
1384 def _add_vcsserver_remote_traceback(report, exc):
1383 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1385 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1384
1386
1385 if vcsserver_traceback:
1387 if vcsserver_traceback:
1386 section = 'VCSServer remote traceback ' + report.when
1388 section = 'VCSServer remote traceback ' + report.when
1387 report.sections.append((section, vcsserver_traceback))
1389 report.sections.append((section, vcsserver_traceback))
1388
1390
1389
1391
1390 @pytest.fixture(scope='session')
1392 @pytest.fixture(scope='session')
1391 def testrun():
1393 def testrun():
1392 return {
1394 return {
1393 'uuid': uuid.uuid4(),
1395 'uuid': uuid.uuid4(),
1394 'start': datetime.datetime.utcnow().isoformat(),
1396 'start': datetime.datetime.utcnow().isoformat(),
1395 'timestamp': int(time.time()),
1397 'timestamp': int(time.time()),
1396 }
1398 }
1397
1399
1398
1400
1399 @pytest.fixture(autouse=True)
1401 @pytest.fixture(autouse=True)
1400 def collect_appenlight_stats(request, testrun):
1402 def collect_appenlight_stats(request, testrun):
1401 """
1403 """
1402 This fixture reports memory consumtion of single tests.
1404 This fixture reports memory consumtion of single tests.
1403
1405
1404 It gathers data based on `psutil` and sends them to Appenlight. The option
1406 It gathers data based on `psutil` and sends them to Appenlight. The option
1405 ``--ae`` has te be used to enable this fixture and the API key for your
1407 ``--ae`` has te be used to enable this fixture and the API key for your
1406 application has to be provided in ``--ae-key``.
1408 application has to be provided in ``--ae-key``.
1407 """
1409 """
1408 try:
1410 try:
1409 # cygwin cannot have yet psutil support.
1411 # cygwin cannot have yet psutil support.
1410 import psutil
1412 import psutil
1411 except ImportError:
1413 except ImportError:
1412 return
1414 return
1413
1415
1414 if not request.config.getoption('--appenlight'):
1416 if not request.config.getoption('--appenlight'):
1415 return
1417 return
1416 else:
1418 else:
1417 # Only request the baseapp fixture if appenlight tracking is
1419 # Only request the baseapp fixture if appenlight tracking is
1418 # enabled. This will speed up a test run of unit tests by 2 to 3
1420 # enabled. This will speed up a test run of unit tests by 2 to 3
1419 # seconds if appenlight is not enabled.
1421 # seconds if appenlight is not enabled.
1420 baseapp = request.getfuncargvalue("baseapp")
1422 baseapp = request.getfuncargvalue("baseapp")
1421 url = '{}/api/logs'.format(request.config.getoption('--appenlight-url'))
1423 url = '{}/api/logs'.format(request.config.getoption('--appenlight-url'))
1422 client = AppenlightClient(
1424 client = AppenlightClient(
1423 url=url,
1425 url=url,
1424 api_key=request.config.getoption('--appenlight-api-key'),
1426 api_key=request.config.getoption('--appenlight-api-key'),
1425 namespace=request.node.nodeid,
1427 namespace=request.node.nodeid,
1426 request=str(testrun['uuid']),
1428 request=str(testrun['uuid']),
1427 testrun=testrun)
1429 testrun=testrun)
1428
1430
1429 client.collect({
1431 client.collect({
1430 'message': "Starting",
1432 'message': "Starting",
1431 })
1433 })
1432
1434
1433 server_and_port = baseapp.config.get_settings()['vcs.server']
1435 server_and_port = baseapp.config.get_settings()['vcs.server']
1434 protocol = baseapp.config.get_settings()['vcs.server.protocol']
1436 protocol = baseapp.config.get_settings()['vcs.server.protocol']
1435 server = create_vcsserver_proxy(server_and_port, protocol)
1437 server = create_vcsserver_proxy(server_and_port, protocol)
1436 with server:
1438 with server:
1437 vcs_pid = server.get_pid()
1439 vcs_pid = server.get_pid()
1438 server.run_gc()
1440 server.run_gc()
1439 vcs_process = psutil.Process(vcs_pid)
1441 vcs_process = psutil.Process(vcs_pid)
1440 mem = vcs_process.memory_info()
1442 mem = vcs_process.memory_info()
1441 client.tag_before('vcsserver.rss', mem.rss)
1443 client.tag_before('vcsserver.rss', mem.rss)
1442 client.tag_before('vcsserver.vms', mem.vms)
1444 client.tag_before('vcsserver.vms', mem.vms)
1443
1445
1444 test_process = psutil.Process()
1446 test_process = psutil.Process()
1445 mem = test_process.memory_info()
1447 mem = test_process.memory_info()
1446 client.tag_before('test.rss', mem.rss)
1448 client.tag_before('test.rss', mem.rss)
1447 client.tag_before('test.vms', mem.vms)
1449 client.tag_before('test.vms', mem.vms)
1448
1450
1449 client.tag_before('time', time.time())
1451 client.tag_before('time', time.time())
1450
1452
1451 @request.addfinalizer
1453 @request.addfinalizer
1452 def send_stats():
1454 def send_stats():
1453 client.tag_after('time', time.time())
1455 client.tag_after('time', time.time())
1454 with server:
1456 with server:
1455 gc_stats = server.run_gc()
1457 gc_stats = server.run_gc()
1456 for tag, value in gc_stats.items():
1458 for tag, value in gc_stats.items():
1457 client.tag_after(tag, value)
1459 client.tag_after(tag, value)
1458 mem = vcs_process.memory_info()
1460 mem = vcs_process.memory_info()
1459 client.tag_after('vcsserver.rss', mem.rss)
1461 client.tag_after('vcsserver.rss', mem.rss)
1460 client.tag_after('vcsserver.vms', mem.vms)
1462 client.tag_after('vcsserver.vms', mem.vms)
1461
1463
1462 mem = test_process.memory_info()
1464 mem = test_process.memory_info()
1463 client.tag_after('test.rss', mem.rss)
1465 client.tag_after('test.rss', mem.rss)
1464 client.tag_after('test.vms', mem.vms)
1466 client.tag_after('test.vms', mem.vms)
1465
1467
1466 client.collect({
1468 client.collect({
1467 'message': "Finished",
1469 'message': "Finished",
1468 })
1470 })
1469 client.send_stats()
1471 client.send_stats()
1470
1472
1471 return client
1473 return client
1472
1474
1473
1475
1474 class AppenlightClient():
1476 class AppenlightClient():
1475
1477
1476 url_template = '{url}?protocol_version=0.5'
1478 url_template = '{url}?protocol_version=0.5'
1477
1479
1478 def __init__(
1480 def __init__(
1479 self, url, api_key, add_server=True, add_timestamp=True,
1481 self, url, api_key, add_server=True, add_timestamp=True,
1480 namespace=None, request=None, testrun=None):
1482 namespace=None, request=None, testrun=None):
1481 self.url = self.url_template.format(url=url)
1483 self.url = self.url_template.format(url=url)
1482 self.api_key = api_key
1484 self.api_key = api_key
1483 self.add_server = add_server
1485 self.add_server = add_server
1484 self.add_timestamp = add_timestamp
1486 self.add_timestamp = add_timestamp
1485 self.namespace = namespace
1487 self.namespace = namespace
1486 self.request = request
1488 self.request = request
1487 self.server = socket.getfqdn(socket.gethostname())
1489 self.server = socket.getfqdn(socket.gethostname())
1488 self.tags_before = {}
1490 self.tags_before = {}
1489 self.tags_after = {}
1491 self.tags_after = {}
1490 self.stats = []
1492 self.stats = []
1491 self.testrun = testrun or {}
1493 self.testrun = testrun or {}
1492
1494
1493 def tag_before(self, tag, value):
1495 def tag_before(self, tag, value):
1494 self.tags_before[tag] = value
1496 self.tags_before[tag] = value
1495
1497
1496 def tag_after(self, tag, value):
1498 def tag_after(self, tag, value):
1497 self.tags_after[tag] = value
1499 self.tags_after[tag] = value
1498
1500
1499 def collect(self, data):
1501 def collect(self, data):
1500 if self.add_server:
1502 if self.add_server:
1501 data.setdefault('server', self.server)
1503 data.setdefault('server', self.server)
1502 if self.add_timestamp:
1504 if self.add_timestamp:
1503 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1505 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1504 if self.namespace:
1506 if self.namespace:
1505 data.setdefault('namespace', self.namespace)
1507 data.setdefault('namespace', self.namespace)
1506 if self.request:
1508 if self.request:
1507 data.setdefault('request', self.request)
1509 data.setdefault('request', self.request)
1508 self.stats.append(data)
1510 self.stats.append(data)
1509
1511
1510 def send_stats(self):
1512 def send_stats(self):
1511 tags = [
1513 tags = [
1512 ('testrun', self.request),
1514 ('testrun', self.request),
1513 ('testrun.start', self.testrun['start']),
1515 ('testrun.start', self.testrun['start']),
1514 ('testrun.timestamp', self.testrun['timestamp']),
1516 ('testrun.timestamp', self.testrun['timestamp']),
1515 ('test', self.namespace),
1517 ('test', self.namespace),
1516 ]
1518 ]
1517 for key, value in self.tags_before.items():
1519 for key, value in self.tags_before.items():
1518 tags.append((key + '.before', value))
1520 tags.append((key + '.before', value))
1519 try:
1521 try:
1520 delta = self.tags_after[key] - value
1522 delta = self.tags_after[key] - value
1521 tags.append((key + '.delta', delta))
1523 tags.append((key + '.delta', delta))
1522 except Exception:
1524 except Exception:
1523 pass
1525 pass
1524 for key, value in self.tags_after.items():
1526 for key, value in self.tags_after.items():
1525 tags.append((key + '.after', value))
1527 tags.append((key + '.after', value))
1526 self.collect({
1528 self.collect({
1527 'message': "Collected tags",
1529 'message': "Collected tags",
1528 'tags': tags,
1530 'tags': tags,
1529 })
1531 })
1530
1532
1531 response = requests.post(
1533 response = requests.post(
1532 self.url,
1534 self.url,
1533 headers={
1535 headers={
1534 'X-appenlight-api-key': self.api_key},
1536 'X-appenlight-api-key': self.api_key},
1535 json=self.stats,
1537 json=self.stats,
1536 )
1538 )
1537
1539
1538 if not response.status_code == 200:
1540 if not response.status_code == 200:
1539 pprint.pprint(self.stats)
1541 pprint.pprint(self.stats)
1540 print(response.headers)
1542 print(response.headers)
1541 print(response.text)
1543 print(response.text)
1542 raise Exception('Sending to appenlight failed')
1544 raise Exception('Sending to appenlight failed')
1543
1545
1544
1546
1545 @pytest.fixture
1547 @pytest.fixture
1546 def gist_util(request, db_connection):
1548 def gist_util(request, db_connection):
1547 """
1549 """
1548 Provides a wired instance of `GistUtility` with integrated cleanup.
1550 Provides a wired instance of `GistUtility` with integrated cleanup.
1549 """
1551 """
1550 utility = GistUtility()
1552 utility = GistUtility()
1551 request.addfinalizer(utility.cleanup)
1553 request.addfinalizer(utility.cleanup)
1552 return utility
1554 return utility
1553
1555
1554
1556
1555 class GistUtility(object):
1557 class GistUtility(object):
1556 def __init__(self):
1558 def __init__(self):
1557 self.fixture = Fixture()
1559 self.fixture = Fixture()
1558 self.gist_ids = []
1560 self.gist_ids = []
1559
1561
1560 def create_gist(self, **kwargs):
1562 def create_gist(self, **kwargs):
1561 gist = self.fixture.create_gist(**kwargs)
1563 gist = self.fixture.create_gist(**kwargs)
1562 self.gist_ids.append(gist.gist_id)
1564 self.gist_ids.append(gist.gist_id)
1563 return gist
1565 return gist
1564
1566
1565 def cleanup(self):
1567 def cleanup(self):
1566 for id_ in self.gist_ids:
1568 for id_ in self.gist_ids:
1567 self.fixture.destroy_gists(str(id_))
1569 self.fixture.destroy_gists(str(id_))
1568
1570
1569
1571
1570 @pytest.fixture
1572 @pytest.fixture
1571 def enabled_backends(request):
1573 def enabled_backends(request):
1572 backends = request.config.option.backends
1574 backends = request.config.option.backends
1573 return backends[:]
1575 return backends[:]
1574
1576
1575
1577
1576 @pytest.fixture
1578 @pytest.fixture
1577 def settings_util(request, db_connection):
1579 def settings_util(request, db_connection):
1578 """
1580 """
1579 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1581 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1580 """
1582 """
1581 utility = SettingsUtility()
1583 utility = SettingsUtility()
1582 request.addfinalizer(utility.cleanup)
1584 request.addfinalizer(utility.cleanup)
1583 return utility
1585 return utility
1584
1586
1585
1587
1586 class SettingsUtility(object):
1588 class SettingsUtility(object):
1587 def __init__(self):
1589 def __init__(self):
1588 self.rhodecode_ui_ids = []
1590 self.rhodecode_ui_ids = []
1589 self.rhodecode_setting_ids = []
1591 self.rhodecode_setting_ids = []
1590 self.repo_rhodecode_ui_ids = []
1592 self.repo_rhodecode_ui_ids = []
1591 self.repo_rhodecode_setting_ids = []
1593 self.repo_rhodecode_setting_ids = []
1592
1594
1593 def create_repo_rhodecode_ui(
1595 def create_repo_rhodecode_ui(
1594 self, repo, section, value, key=None, active=True, cleanup=True):
1596 self, repo, section, value, key=None, active=True, cleanup=True):
1595 key = key or hashlib.sha1(
1597 key = key or hashlib.sha1(
1596 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1598 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1597
1599
1598 setting = RepoRhodeCodeUi()
1600 setting = RepoRhodeCodeUi()
1599 setting.repository_id = repo.repo_id
1601 setting.repository_id = repo.repo_id
1600 setting.ui_section = section
1602 setting.ui_section = section
1601 setting.ui_value = value
1603 setting.ui_value = value
1602 setting.ui_key = key
1604 setting.ui_key = key
1603 setting.ui_active = active
1605 setting.ui_active = active
1604 Session().add(setting)
1606 Session().add(setting)
1605 Session().commit()
1607 Session().commit()
1606
1608
1607 if cleanup:
1609 if cleanup:
1608 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1610 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1609 return setting
1611 return setting
1610
1612
1611 def create_rhodecode_ui(
1613 def create_rhodecode_ui(
1612 self, section, value, key=None, active=True, cleanup=True):
1614 self, section, value, key=None, active=True, cleanup=True):
1613 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1615 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1614
1616
1615 setting = RhodeCodeUi()
1617 setting = RhodeCodeUi()
1616 setting.ui_section = section
1618 setting.ui_section = section
1617 setting.ui_value = value
1619 setting.ui_value = value
1618 setting.ui_key = key
1620 setting.ui_key = key
1619 setting.ui_active = active
1621 setting.ui_active = active
1620 Session().add(setting)
1622 Session().add(setting)
1621 Session().commit()
1623 Session().commit()
1622
1624
1623 if cleanup:
1625 if cleanup:
1624 self.rhodecode_ui_ids.append(setting.ui_id)
1626 self.rhodecode_ui_ids.append(setting.ui_id)
1625 return setting
1627 return setting
1626
1628
1627 def create_repo_rhodecode_setting(
1629 def create_repo_rhodecode_setting(
1628 self, repo, name, value, type_, cleanup=True):
1630 self, repo, name, value, type_, cleanup=True):
1629 setting = RepoRhodeCodeSetting(
1631 setting = RepoRhodeCodeSetting(
1630 repo.repo_id, key=name, val=value, type=type_)
1632 repo.repo_id, key=name, val=value, type=type_)
1631 Session().add(setting)
1633 Session().add(setting)
1632 Session().commit()
1634 Session().commit()
1633
1635
1634 if cleanup:
1636 if cleanup:
1635 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1637 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1636 return setting
1638 return setting
1637
1639
1638 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1640 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1639 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1641 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1640 Session().add(setting)
1642 Session().add(setting)
1641 Session().commit()
1643 Session().commit()
1642
1644
1643 if cleanup:
1645 if cleanup:
1644 self.rhodecode_setting_ids.append(setting.app_settings_id)
1646 self.rhodecode_setting_ids.append(setting.app_settings_id)
1645
1647
1646 return setting
1648 return setting
1647
1649
1648 def cleanup(self):
1650 def cleanup(self):
1649 for id_ in self.rhodecode_ui_ids:
1651 for id_ in self.rhodecode_ui_ids:
1650 setting = RhodeCodeUi.get(id_)
1652 setting = RhodeCodeUi.get(id_)
1651 Session().delete(setting)
1653 Session().delete(setting)
1652
1654
1653 for id_ in self.rhodecode_setting_ids:
1655 for id_ in self.rhodecode_setting_ids:
1654 setting = RhodeCodeSetting.get(id_)
1656 setting = RhodeCodeSetting.get(id_)
1655 Session().delete(setting)
1657 Session().delete(setting)
1656
1658
1657 for id_ in self.repo_rhodecode_ui_ids:
1659 for id_ in self.repo_rhodecode_ui_ids:
1658 setting = RepoRhodeCodeUi.get(id_)
1660 setting = RepoRhodeCodeUi.get(id_)
1659 Session().delete(setting)
1661 Session().delete(setting)
1660
1662
1661 for id_ in self.repo_rhodecode_setting_ids:
1663 for id_ in self.repo_rhodecode_setting_ids:
1662 setting = RepoRhodeCodeSetting.get(id_)
1664 setting = RepoRhodeCodeSetting.get(id_)
1663 Session().delete(setting)
1665 Session().delete(setting)
1664
1666
1665 Session().commit()
1667 Session().commit()
1666
1668
1667
1669
1668 @pytest.fixture
1670 @pytest.fixture
1669 def no_notifications(request):
1671 def no_notifications(request):
1670 notification_patcher = mock.patch(
1672 notification_patcher = mock.patch(
1671 'rhodecode.model.notification.NotificationModel.create')
1673 'rhodecode.model.notification.NotificationModel.create')
1672 notification_patcher.start()
1674 notification_patcher.start()
1673 request.addfinalizer(notification_patcher.stop)
1675 request.addfinalizer(notification_patcher.stop)
1674
1676
1675
1677
1676 @pytest.fixture(scope='session')
1678 @pytest.fixture(scope='session')
1677 def repeat(request):
1679 def repeat(request):
1678 """
1680 """
1679 The number of repetitions is based on this fixture.
1681 The number of repetitions is based on this fixture.
1680
1682
1681 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1683 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1682 tests are not too slow in our default test suite.
1684 tests are not too slow in our default test suite.
1683 """
1685 """
1684 return request.config.getoption('--repeat')
1686 return request.config.getoption('--repeat')
1685
1687
1686
1688
1687 @pytest.fixture
1689 @pytest.fixture
1688 def rhodecode_fixtures():
1690 def rhodecode_fixtures():
1689 return Fixture()
1691 return Fixture()
1690
1692
1691
1693
1692 @pytest.fixture
1694 @pytest.fixture
1693 def context_stub():
1695 def context_stub():
1694 """
1696 """
1695 Stub context object.
1697 Stub context object.
1696 """
1698 """
1697 context = pyramid.testing.DummyResource()
1699 context = pyramid.testing.DummyResource()
1698 return context
1700 return context
1699
1701
1700
1702
1701 @pytest.fixture
1703 @pytest.fixture
1702 def request_stub():
1704 def request_stub():
1703 """
1705 """
1704 Stub request object.
1706 Stub request object.
1705 """
1707 """
1706 from rhodecode.lib.base import bootstrap_request
1708 from rhodecode.lib.base import bootstrap_request
1707 request = bootstrap_request(scheme='https')
1709 request = bootstrap_request(scheme='https')
1708 return request
1710 return request
1709
1711
1710
1712
1711 @pytest.fixture
1713 @pytest.fixture
1712 def config_stub(request, request_stub):
1714 def config_stub(request, request_stub):
1713 """
1715 """
1714 Set up pyramid.testing and return the Configurator.
1716 Set up pyramid.testing and return the Configurator.
1715 """
1717 """
1716 from rhodecode.lib.base import bootstrap_config
1718 from rhodecode.lib.base import bootstrap_config
1717 config = bootstrap_config(request=request_stub)
1719 config = bootstrap_config(request=request_stub)
1718
1720
1719 @request.addfinalizer
1721 @request.addfinalizer
1720 def cleanup():
1722 def cleanup():
1721 pyramid.testing.tearDown()
1723 pyramid.testing.tearDown()
1722
1724
1723 return config
1725 return config
1724
1726
1725
1727
1726 @pytest.fixture
1728 @pytest.fixture
1727 def StubIntegrationType():
1729 def StubIntegrationType():
1728 class _StubIntegrationType(IntegrationTypeBase):
1730 class _StubIntegrationType(IntegrationTypeBase):
1729 """ Test integration type class """
1731 """ Test integration type class """
1730
1732
1731 key = 'test'
1733 key = 'test'
1732 display_name = 'Test integration type'
1734 display_name = 'Test integration type'
1733 description = 'A test integration type for testing'
1735 description = 'A test integration type for testing'
1734
1736
1735 @classmethod
1737 @classmethod
1736 def icon(cls):
1738 def icon(cls):
1737 return 'test_icon_html_image'
1739 return 'test_icon_html_image'
1738
1740
1739 def __init__(self, settings):
1741 def __init__(self, settings):
1740 super(_StubIntegrationType, self).__init__(settings)
1742 super(_StubIntegrationType, self).__init__(settings)
1741 self.sent_events = [] # for testing
1743 self.sent_events = [] # for testing
1742
1744
1743 def send_event(self, event):
1745 def send_event(self, event):
1744 self.sent_events.append(event)
1746 self.sent_events.append(event)
1745
1747
1746 def settings_schema(self):
1748 def settings_schema(self):
1747 class SettingsSchema(colander.Schema):
1749 class SettingsSchema(colander.Schema):
1748 test_string_field = colander.SchemaNode(
1750 test_string_field = colander.SchemaNode(
1749 colander.String(),
1751 colander.String(),
1750 missing=colander.required,
1752 missing=colander.required,
1751 title='test string field',
1753 title='test string field',
1752 )
1754 )
1753 test_int_field = colander.SchemaNode(
1755 test_int_field = colander.SchemaNode(
1754 colander.Int(),
1756 colander.Int(),
1755 title='some integer setting',
1757 title='some integer setting',
1756 )
1758 )
1757 return SettingsSchema()
1759 return SettingsSchema()
1758
1760
1759
1761
1760 integration_type_registry.register_integration_type(_StubIntegrationType)
1762 integration_type_registry.register_integration_type(_StubIntegrationType)
1761 return _StubIntegrationType
1763 return _StubIntegrationType
1762
1764
1763 @pytest.fixture
1765 @pytest.fixture
1764 def stub_integration_settings():
1766 def stub_integration_settings():
1765 return {
1767 return {
1766 'test_string_field': 'some data',
1768 'test_string_field': 'some data',
1767 'test_int_field': 100,
1769 'test_int_field': 100,
1768 }
1770 }
1769
1771
1770
1772
1771 @pytest.fixture
1773 @pytest.fixture
1772 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1774 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1773 stub_integration_settings):
1775 stub_integration_settings):
1774 integration = IntegrationModel().create(
1776 integration = IntegrationModel().create(
1775 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1777 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1776 name='test repo integration',
1778 name='test repo integration',
1777 repo=repo_stub, repo_group=None, child_repos_only=None)
1779 repo=repo_stub, repo_group=None, child_repos_only=None)
1778
1780
1779 @request.addfinalizer
1781 @request.addfinalizer
1780 def cleanup():
1782 def cleanup():
1781 IntegrationModel().delete(integration)
1783 IntegrationModel().delete(integration)
1782
1784
1783 return integration
1785 return integration
1784
1786
1785
1787
1786 @pytest.fixture
1788 @pytest.fixture
1787 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1789 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1788 stub_integration_settings):
1790 stub_integration_settings):
1789 integration = IntegrationModel().create(
1791 integration = IntegrationModel().create(
1790 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1792 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1791 name='test repogroup integration',
1793 name='test repogroup integration',
1792 repo=None, repo_group=test_repo_group, child_repos_only=True)
1794 repo=None, repo_group=test_repo_group, child_repos_only=True)
1793
1795
1794 @request.addfinalizer
1796 @request.addfinalizer
1795 def cleanup():
1797 def cleanup():
1796 IntegrationModel().delete(integration)
1798 IntegrationModel().delete(integration)
1797
1799
1798 return integration
1800 return integration
1799
1801
1800
1802
1801 @pytest.fixture
1803 @pytest.fixture
1802 def repogroup_recursive_integration_stub(request, test_repo_group,
1804 def repogroup_recursive_integration_stub(request, test_repo_group,
1803 StubIntegrationType, stub_integration_settings):
1805 StubIntegrationType, stub_integration_settings):
1804 integration = IntegrationModel().create(
1806 integration = IntegrationModel().create(
1805 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1807 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1806 name='test recursive repogroup integration',
1808 name='test recursive repogroup integration',
1807 repo=None, repo_group=test_repo_group, child_repos_only=False)
1809 repo=None, repo_group=test_repo_group, child_repos_only=False)
1808
1810
1809 @request.addfinalizer
1811 @request.addfinalizer
1810 def cleanup():
1812 def cleanup():
1811 IntegrationModel().delete(integration)
1813 IntegrationModel().delete(integration)
1812
1814
1813 return integration
1815 return integration
1814
1816
1815
1817
1816 @pytest.fixture
1818 @pytest.fixture
1817 def global_integration_stub(request, StubIntegrationType,
1819 def global_integration_stub(request, StubIntegrationType,
1818 stub_integration_settings):
1820 stub_integration_settings):
1819 integration = IntegrationModel().create(
1821 integration = IntegrationModel().create(
1820 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1822 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1821 name='test global integration',
1823 name='test global integration',
1822 repo=None, repo_group=None, child_repos_only=None)
1824 repo=None, repo_group=None, child_repos_only=None)
1823
1825
1824 @request.addfinalizer
1826 @request.addfinalizer
1825 def cleanup():
1827 def cleanup():
1826 IntegrationModel().delete(integration)
1828 IntegrationModel().delete(integration)
1827
1829
1828 return integration
1830 return integration
1829
1831
1830
1832
1831 @pytest.fixture
1833 @pytest.fixture
1832 def root_repos_integration_stub(request, StubIntegrationType,
1834 def root_repos_integration_stub(request, StubIntegrationType,
1833 stub_integration_settings):
1835 stub_integration_settings):
1834 integration = IntegrationModel().create(
1836 integration = IntegrationModel().create(
1835 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1837 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1836 name='test global integration',
1838 name='test global integration',
1837 repo=None, repo_group=None, child_repos_only=True)
1839 repo=None, repo_group=None, child_repos_only=True)
1838
1840
1839 @request.addfinalizer
1841 @request.addfinalizer
1840 def cleanup():
1842 def cleanup():
1841 IntegrationModel().delete(integration)
1843 IntegrationModel().delete(integration)
1842
1844
1843 return integration
1845 return integration
1844
1846
1845
1847
1846 @pytest.fixture
1848 @pytest.fixture
1847 def local_dt_to_utc():
1849 def local_dt_to_utc():
1848 def _factory(dt):
1850 def _factory(dt):
1849 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1851 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1850 dateutil.tz.tzutc()).replace(tzinfo=None)
1852 dateutil.tz.tzutc()).replace(tzinfo=None)
1851 return _factory
1853 return _factory
1852
1854
1853
1855
1854 @pytest.fixture
1856 @pytest.fixture
1855 def disable_anonymous_user(request, baseapp):
1857 def disable_anonymous_user(request, baseapp):
1856 set_anonymous_access(False)
1858 set_anonymous_access(False)
1857
1859
1858 @request.addfinalizer
1860 @request.addfinalizer
1859 def cleanup():
1861 def cleanup():
1860 set_anonymous_access(True)
1862 set_anonymous_access(True)
1861
1863
1862
1864
1863 @pytest.fixture(scope='module')
1865 @pytest.fixture(scope='module')
1864 def rc_fixture(request):
1866 def rc_fixture(request):
1865 return Fixture()
1867 return Fixture()
1866
1868
1867
1869
1868 @pytest.fixture
1870 @pytest.fixture
1869 def repo_groups(request):
1871 def repo_groups(request):
1870 fixture = Fixture()
1872 fixture = Fixture()
1871
1873
1872 session = Session()
1874 session = Session()
1873 zombie_group = fixture.create_repo_group('zombie')
1875 zombie_group = fixture.create_repo_group('zombie')
1874 parent_group = fixture.create_repo_group('parent')
1876 parent_group = fixture.create_repo_group('parent')
1875 child_group = fixture.create_repo_group('parent/child')
1877 child_group = fixture.create_repo_group('parent/child')
1876 groups_in_db = session.query(RepoGroup).all()
1878 groups_in_db = session.query(RepoGroup).all()
1877 assert len(groups_in_db) == 3
1879 assert len(groups_in_db) == 3
1878 assert child_group.group_parent_id == parent_group.group_id
1880 assert child_group.group_parent_id == parent_group.group_id
1879
1881
1880 @request.addfinalizer
1882 @request.addfinalizer
1881 def cleanup():
1883 def cleanup():
1882 fixture.destroy_repo_group(zombie_group)
1884 fixture.destroy_repo_group(zombie_group)
1883 fixture.destroy_repo_group(child_group)
1885 fixture.destroy_repo_group(child_group)
1884 fixture.destroy_repo_group(parent_group)
1886 fixture.destroy_repo_group(parent_group)
1885
1887
1886 return zombie_group, parent_group, child_group
1888 return zombie_group, parent_group, child_group
General Comments 0
You need to be logged in to leave comments. Login now