##// END OF EJS Templates
fix(tests): fixed tests for PR celery hooks deamon
super-admin -
r5589:750c46dc default
parent child Browse files
Show More
@@ -1,89 +1,94 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import os
19 import os
20 import time
20 import time
21 import logging
21 import logging
22
22
23 from rhodecode.lib.config_utils import get_config
23 from rhodecode.lib.config_utils import get_app_config_lightweight
24
24
25 from rhodecode.lib.svn_txn_utils import get_txn_id_from_store
25 from rhodecode.lib.svn_txn_utils import get_txn_id_from_store
26
26
27 log = logging.getLogger(__name__)
27 log = logging.getLogger(__name__)
28
28
29
29
30 class BaseHooksCallbackDaemon:
30 class BaseHooksCallbackDaemon:
31 """
31 """
32 Basic context manager for actions that don't require some extra
32 Basic context manager for actions that don't require some extra
33 """
33 """
34 def __init__(self):
34 def __init__(self):
35 pass
35 pass
36
36
37 def __enter__(self):
37 def __enter__(self):
38 log.debug('Running `%s` callback daemon', self.__class__.__name__)
38 log.debug('Running `%s` callback daemon', self.__class__.__name__)
39 return self
39 return self
40
40
41 def __exit__(self, exc_type, exc_val, exc_tb):
41 def __exit__(self, exc_type, exc_val, exc_tb):
42 log.debug('Exiting `%s` callback daemon', self.__class__.__name__)
42 log.debug('Exiting `%s` callback daemon', self.__class__.__name__)
43
43
44
44
45 class HooksModuleCallbackDaemon(BaseHooksCallbackDaemon):
45 class HooksModuleCallbackDaemon(BaseHooksCallbackDaemon):
46
46
47 def __init__(self, module):
47 def __init__(self, module):
48 super().__init__()
48 super().__init__()
49 self.hooks_module = module
49 self.hooks_module = module
50
50
51 def __repr__(self):
51 def __repr__(self):
52 return f'HooksModuleCallbackDaemon(hooks_module={self.hooks_module})'
52 return f'HooksModuleCallbackDaemon(hooks_module={self.hooks_module})'
53
53
54
54
55 def prepare_callback_daemon(extras, protocol, host, txn_id=None):
55 def prepare_callback_daemon(extras, protocol, host, txn_id=None):
56
56
57 match protocol:
57 match protocol:
58 case 'http':
58 case 'http':
59 from rhodecode.lib.hook_daemon.http_hooks_deamon import HttpHooksCallbackDaemon
59 from rhodecode.lib.hook_daemon.http_hooks_deamon import HttpHooksCallbackDaemon
60 port = 0
60 port = 0
61 if txn_id:
61 if txn_id:
62 # read txn-id to re-use the PORT for callback daemon
62 # read txn-id to re-use the PORT for callback daemon
63 repo_path = os.path.join(extras['repo_store'], extras['repository'])
63 repo_path = os.path.join(extras['repo_store'], extras['repository'])
64 txn_details = get_txn_id_from_store(repo_path, txn_id)
64 txn_details = get_txn_id_from_store(repo_path, txn_id)
65 port = txn_details.get('port', 0)
65 port = txn_details.get('port', 0)
66
66
67 callback_daemon = HttpHooksCallbackDaemon(
67 callback_daemon = HttpHooksCallbackDaemon(
68 txn_id=txn_id, host=host, port=port)
68 txn_id=txn_id, host=host, port=port)
69 case 'celery':
69 case 'celery':
70 from rhodecode.lib.hook_daemon.celery_hooks_deamon import CeleryHooksCallbackDaemon
70 from rhodecode.lib.hook_daemon.celery_hooks_deamon import CeleryHooksCallbackDaemon
71 callback_daemon = CeleryHooksCallbackDaemon(get_config(extras['config']))
71
72 config = get_app_config_lightweight(extras['config'])
73 task_queue = config.get('celery.broker_url')
74 task_backend = config.get('celery.result_backend')
75
76 callback_daemon = CeleryHooksCallbackDaemon(task_queue, task_backend)
72 case 'local':
77 case 'local':
73 from rhodecode.lib.hook_daemon.hook_module import Hooks
78 from rhodecode.lib.hook_daemon.hook_module import Hooks
74 callback_daemon = HooksModuleCallbackDaemon(Hooks.__module__)
79 callback_daemon = HooksModuleCallbackDaemon(Hooks.__module__)
75 case _:
80 case _:
76 log.error('Unsupported callback daemon protocol "%s"', protocol)
81 log.error('Unsupported callback daemon protocol "%s"', protocol)
77 raise Exception('Unsupported callback daemon protocol.')
82 raise Exception('Unsupported callback daemon protocol.')
78
83
79 extras['hooks_uri'] = getattr(callback_daemon, 'hooks_uri', '')
84 extras['hooks_uri'] = getattr(callback_daemon, 'hooks_uri', '')
80 extras['task_queue'] = getattr(callback_daemon, 'task_queue', '')
85 extras['task_queue'] = getattr(callback_daemon, 'task_queue', '')
81 extras['task_backend'] = getattr(callback_daemon, 'task_backend', '')
86 extras['task_backend'] = getattr(callback_daemon, 'task_backend', '')
82 extras['hooks_protocol'] = protocol
87 extras['hooks_protocol'] = protocol
83 extras['time'] = time.time()
88 extras['time'] = time.time()
84
89
85 # register txn_id
90 # register txn_id
86 extras['txn_id'] = txn_id
91 extras['txn_id'] = txn_id
87 log.debug('Prepared a callback daemon: %s',
92 log.debug('Prepared a callback daemon: %s',
88 callback_daemon.__class__.__name__)
93 callback_daemon.__class__.__name__)
89 return callback_daemon, extras
94 return callback_daemon, extras
@@ -1,33 +1,35 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 from rhodecode.lib.hook_daemon.base import BaseHooksCallbackDaemon
19 from rhodecode.lib.hook_daemon.base import BaseHooksCallbackDaemon
20
20
21
21
22 class CeleryHooksCallbackDaemon(BaseHooksCallbackDaemon):
22 class CeleryHooksCallbackDaemon(BaseHooksCallbackDaemon):
23 """
23 """
24 Context manger for achieving a compatibility with celery backend
24 Context manger for achieving a compatibility with celery backend
25 """
25 """
26
26
27 def __init__(self, config):
27 def __init__(self, task_queue, task_backend):
28 # TODO: replace this with settings bootstrapped...
28 self.task_queue = task_queue
29 self.task_queue = config.get('app:main', 'celery.broker_url')
29 self.task_backend = task_backend
30 self.task_backend = config.get('app:main', 'celery.result_backend')
31
30
32 def __repr__(self):
31 def __repr__(self):
33 return f'CeleryHooksCallbackDaemon(task_queue={self.task_queue}, task_backend={self.task_backend})'
32 return f'CeleryHooksCallbackDaemon(task_queue={self.task_queue}, task_backend={self.task_backend})'
33
34 def __repr__(self):
35 return f'CeleryHooksCallbackDaemon(task_queue={self.task_queue}, task_backend={self.task_backend})'
@@ -1,1750 +1,1750 b''
1
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
15 #
16 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
19
20 import collections
20 import collections
21 import datetime
21 import datetime
22 import os
22 import os
23 import re
23 import re
24 import pprint
24 import pprint
25 import shutil
25 import shutil
26 import socket
26 import socket
27 import subprocess
27 import subprocess
28 import time
28 import time
29 import uuid
29 import uuid
30 import dateutil.tz
30 import dateutil.tz
31 import logging
31 import logging
32 import functools
32 import functools
33
33
34 import mock
34 import mock
35 import pyramid.testing
35 import pyramid.testing
36 import pytest
36 import pytest
37 import colander
37 import colander
38 import requests
38 import requests
39 import pyramid.paster
39 import pyramid.paster
40
40
41 import rhodecode
41 import rhodecode
42 import rhodecode.lib
42 import rhodecode.lib
43 from rhodecode.model.changeset_status import ChangesetStatusModel
43 from rhodecode.model.changeset_status import ChangesetStatusModel
44 from rhodecode.model.comment import CommentsModel
44 from rhodecode.model.comment import CommentsModel
45 from rhodecode.model.db import (
45 from rhodecode.model.db import (
46 PullRequest, PullRequestReviewers, Repository, RhodeCodeSetting, ChangesetStatus,
46 PullRequest, PullRequestReviewers, Repository, RhodeCodeSetting, ChangesetStatus,
47 RepoGroup, UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
47 RepoGroup, UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
48 from rhodecode.model.meta import Session
48 from rhodecode.model.meta import Session
49 from rhodecode.model.pull_request import PullRequestModel
49 from rhodecode.model.pull_request import PullRequestModel
50 from rhodecode.model.repo import RepoModel
50 from rhodecode.model.repo import RepoModel
51 from rhodecode.model.repo_group import RepoGroupModel
51 from rhodecode.model.repo_group import RepoGroupModel
52 from rhodecode.model.user import UserModel
52 from rhodecode.model.user import UserModel
53 from rhodecode.model.settings import VcsSettingsModel
53 from rhodecode.model.settings import VcsSettingsModel
54 from rhodecode.model.user_group import UserGroupModel
54 from rhodecode.model.user_group import UserGroupModel
55 from rhodecode.model.integration import IntegrationModel
55 from rhodecode.model.integration import IntegrationModel
56 from rhodecode.integrations import integration_type_registry
56 from rhodecode.integrations import integration_type_registry
57 from rhodecode.integrations.types.base import IntegrationTypeBase
57 from rhodecode.integrations.types.base import IntegrationTypeBase
58 from rhodecode.lib.utils import repo2db_mapper
58 from rhodecode.lib.utils import repo2db_mapper
59 from rhodecode.lib.str_utils import safe_bytes
59 from rhodecode.lib.str_utils import safe_bytes
60 from rhodecode.lib.hash_utils import sha1_safe
60 from rhodecode.lib.hash_utils import sha1_safe
61 from rhodecode.lib.vcs.backends import get_backend
61 from rhodecode.lib.vcs.backends import get_backend
62 from rhodecode.lib.vcs.nodes import FileNode
62 from rhodecode.lib.vcs.nodes import FileNode
63 from rhodecode.tests import (
63 from rhodecode.tests import (
64 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
64 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
65 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
65 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
66 TEST_USER_REGULAR_PASS)
66 TEST_USER_REGULAR_PASS)
67 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
67 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
68 from rhodecode.tests.fixture import Fixture
68 from rhodecode.tests.fixture import Fixture
69 from rhodecode.config import utils as config_utils
69 from rhodecode.config import utils as config_utils
70
70
71 log = logging.getLogger(__name__)
71 log = logging.getLogger(__name__)
72
72
73
73
74 def cmp(a, b):
74 def cmp(a, b):
75 # backport cmp from python2 so we can still use it in the custom code in this module
75 # backport cmp from python2 so we can still use it in the custom code in this module
76 return (a > b) - (a < b)
76 return (a > b) - (a < b)
77
77
78
78
79 @pytest.fixture(scope='session', autouse=True)
79 @pytest.fixture(scope='session', autouse=True)
80 def activate_example_rcextensions(request):
80 def activate_example_rcextensions(request):
81 """
81 """
82 Patch in an example rcextensions module which verifies passed in kwargs.
82 Patch in an example rcextensions module which verifies passed in kwargs.
83 """
83 """
84 from rhodecode.config import rcextensions
84 from rhodecode.config import rcextensions
85
85
86 old_extensions = rhodecode.EXTENSIONS
86 old_extensions = rhodecode.EXTENSIONS
87 rhodecode.EXTENSIONS = rcextensions
87 rhodecode.EXTENSIONS = rcextensions
88 rhodecode.EXTENSIONS.calls = collections.defaultdict(list)
88 rhodecode.EXTENSIONS.calls = collections.defaultdict(list)
89
89
90 @request.addfinalizer
90 @request.addfinalizer
91 def cleanup():
91 def cleanup():
92 rhodecode.EXTENSIONS = old_extensions
92 rhodecode.EXTENSIONS = old_extensions
93
93
94
94
95 @pytest.fixture()
95 @pytest.fixture()
96 def capture_rcextensions():
96 def capture_rcextensions():
97 """
97 """
98 Returns the recorded calls to entry points in rcextensions.
98 Returns the recorded calls to entry points in rcextensions.
99 """
99 """
100 calls = rhodecode.EXTENSIONS.calls
100 calls = rhodecode.EXTENSIONS.calls
101 calls.clear()
101 calls.clear()
102 # Note: At this moment, it is still the empty dict, but that will
102 # Note: At this moment, it is still the empty dict, but that will
103 # be filled during the test run and since it is a reference this
103 # be filled during the test run and since it is a reference this
104 # is enough to make it work.
104 # is enough to make it work.
105 return calls
105 return calls
106
106
107
107
108 @pytest.fixture(scope='session')
108 @pytest.fixture(scope='session')
109 def http_environ_session():
109 def http_environ_session():
110 """
110 """
111 Allow to use "http_environ" in session scope.
111 Allow to use "http_environ" in session scope.
112 """
112 """
113 return plain_http_environ()
113 return plain_http_environ()
114
114
115
115
116 def plain_http_host_stub():
116 def plain_http_host_stub():
117 """
117 """
118 Value of HTTP_HOST in the test run.
118 Value of HTTP_HOST in the test run.
119 """
119 """
120 return 'example.com:80'
120 return 'example.com:80'
121
121
122
122
123 @pytest.fixture()
123 @pytest.fixture()
124 def http_host_stub():
124 def http_host_stub():
125 """
125 """
126 Value of HTTP_HOST in the test run.
126 Value of HTTP_HOST in the test run.
127 """
127 """
128 return plain_http_host_stub()
128 return plain_http_host_stub()
129
129
130
130
131 def plain_http_host_only_stub():
131 def plain_http_host_only_stub():
132 """
132 """
133 Value of HTTP_HOST in the test run.
133 Value of HTTP_HOST in the test run.
134 """
134 """
135 return plain_http_host_stub().split(':')[0]
135 return plain_http_host_stub().split(':')[0]
136
136
137
137
138 @pytest.fixture()
138 @pytest.fixture()
139 def http_host_only_stub():
139 def http_host_only_stub():
140 """
140 """
141 Value of HTTP_HOST in the test run.
141 Value of HTTP_HOST in the test run.
142 """
142 """
143 return plain_http_host_only_stub()
143 return plain_http_host_only_stub()
144
144
145
145
146 def plain_http_environ():
146 def plain_http_environ():
147 """
147 """
148 HTTP extra environ keys.
148 HTTP extra environ keys.
149
149
150 User by the test application and as well for setting up the pylons
150 User by the test application and as well for setting up the pylons
151 environment. In the case of the fixture "app" it should be possible
151 environment. In the case of the fixture "app" it should be possible
152 to override this for a specific test case.
152 to override this for a specific test case.
153 """
153 """
154 return {
154 return {
155 'SERVER_NAME': plain_http_host_only_stub(),
155 'SERVER_NAME': plain_http_host_only_stub(),
156 'SERVER_PORT': plain_http_host_stub().split(':')[1],
156 'SERVER_PORT': plain_http_host_stub().split(':')[1],
157 'HTTP_HOST': plain_http_host_stub(),
157 'HTTP_HOST': plain_http_host_stub(),
158 'HTTP_USER_AGENT': 'rc-test-agent',
158 'HTTP_USER_AGENT': 'rc-test-agent',
159 'REQUEST_METHOD': 'GET'
159 'REQUEST_METHOD': 'GET'
160 }
160 }
161
161
162
162
163 @pytest.fixture()
163 @pytest.fixture()
164 def http_environ():
164 def http_environ():
165 """
165 """
166 HTTP extra environ keys.
166 HTTP extra environ keys.
167
167
168 User by the test application and as well for setting up the pylons
168 User by the test application and as well for setting up the pylons
169 environment. In the case of the fixture "app" it should be possible
169 environment. In the case of the fixture "app" it should be possible
170 to override this for a specific test case.
170 to override this for a specific test case.
171 """
171 """
172 return plain_http_environ()
172 return plain_http_environ()
173
173
174
174
175 @pytest.fixture(scope='session')
175 @pytest.fixture(scope='session')
176 def baseapp(ini_config, vcsserver, http_environ_session):
176 def baseapp(ini_config, vcsserver, http_environ_session):
177 from rhodecode.lib.config_utils import get_app_config
177 from rhodecode.lib.config_utils import get_app_config
178 from rhodecode.config.middleware import make_pyramid_app
178 from rhodecode.config.middleware import make_pyramid_app
179
179
180 log.info("Using the RhodeCode configuration:{}".format(ini_config))
180 log.info("Using the RhodeCode configuration:%s", ini_config)
181 pyramid.paster.setup_logging(ini_config)
181 pyramid.paster.setup_logging(ini_config)
182
182
183 settings = get_app_config(ini_config)
183 settings = get_app_config(ini_config)
184 app = make_pyramid_app({'__file__': ini_config}, **settings)
184 app = make_pyramid_app({'__file__': ini_config}, **settings)
185
185
186 return app
186 return app
187
187
188
188
189 @pytest.fixture(scope='function')
189 @pytest.fixture(scope='function')
190 def app(request, config_stub, baseapp, http_environ):
190 def app(request, config_stub, baseapp, http_environ):
191 app = CustomTestApp(
191 app = CustomTestApp(
192 baseapp,
192 baseapp,
193 extra_environ=http_environ)
193 extra_environ=http_environ)
194 if request.cls:
194 if request.cls:
195 request.cls.app = app
195 request.cls.app = app
196 return app
196 return app
197
197
198
198
199 @pytest.fixture(scope='session')
199 @pytest.fixture(scope='session')
200 def app_settings(baseapp, ini_config):
200 def app_settings(baseapp, ini_config):
201 """
201 """
202 Settings dictionary used to create the app.
202 Settings dictionary used to create the app.
203
203
204 Parses the ini file and passes the result through the sanitize and apply
204 Parses the ini file and passes the result through the sanitize and apply
205 defaults mechanism in `rhodecode.config.middleware`.
205 defaults mechanism in `rhodecode.config.middleware`.
206 """
206 """
207 return baseapp.config.get_settings()
207 return baseapp.config.get_settings()
208
208
209
209
210 @pytest.fixture(scope='session')
210 @pytest.fixture(scope='session')
211 def db_connection(ini_settings):
211 def db_connection(ini_settings):
212 # Initialize the database connection.
212 # Initialize the database connection.
213 config_utils.initialize_database(ini_settings)
213 config_utils.initialize_database(ini_settings)
214
214
215
215
216 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
216 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
217
217
218
218
219 def _autologin_user(app, *args):
219 def _autologin_user(app, *args):
220 session = login_user_session(app, *args)
220 session = login_user_session(app, *args)
221 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
221 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
222 return LoginData(csrf_token, session['rhodecode_user'])
222 return LoginData(csrf_token, session['rhodecode_user'])
223
223
224
224
225 @pytest.fixture()
225 @pytest.fixture()
226 def autologin_user(app):
226 def autologin_user(app):
227 """
227 """
228 Utility fixture which makes sure that the admin user is logged in
228 Utility fixture which makes sure that the admin user is logged in
229 """
229 """
230 return _autologin_user(app)
230 return _autologin_user(app)
231
231
232
232
233 @pytest.fixture()
233 @pytest.fixture()
234 def autologin_regular_user(app):
234 def autologin_regular_user(app):
235 """
235 """
236 Utility fixture which makes sure that the regular user is logged in
236 Utility fixture which makes sure that the regular user is logged in
237 """
237 """
238 return _autologin_user(
238 return _autologin_user(
239 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
239 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
240
240
241
241
242 @pytest.fixture(scope='function')
242 @pytest.fixture(scope='function')
243 def csrf_token(request, autologin_user):
243 def csrf_token(request, autologin_user):
244 return autologin_user.csrf_token
244 return autologin_user.csrf_token
245
245
246
246
247 @pytest.fixture(scope='function')
247 @pytest.fixture(scope='function')
248 def xhr_header(request):
248 def xhr_header(request):
249 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
249 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
250
250
251
251
252 @pytest.fixture()
252 @pytest.fixture()
253 def real_crypto_backend(monkeypatch):
253 def real_crypto_backend(monkeypatch):
254 """
254 """
255 Switch the production crypto backend on for this test.
255 Switch the production crypto backend on for this test.
256
256
257 During the test run the crypto backend is replaced with a faster
257 During the test run the crypto backend is replaced with a faster
258 implementation based on the MD5 algorithm.
258 implementation based on the MD5 algorithm.
259 """
259 """
260 monkeypatch.setattr(rhodecode, 'is_test', False)
260 monkeypatch.setattr(rhodecode, 'is_test', False)
261
261
262
262
263 @pytest.fixture(scope='class')
263 @pytest.fixture(scope='class')
264 def index_location(request, baseapp):
264 def index_location(request, baseapp):
265 index_location = baseapp.config.get_settings()['search.location']
265 index_location = baseapp.config.get_settings()['search.location']
266 if request.cls:
266 if request.cls:
267 request.cls.index_location = index_location
267 request.cls.index_location = index_location
268 return index_location
268 return index_location
269
269
270
270
271 @pytest.fixture(scope='session', autouse=True)
271 @pytest.fixture(scope='session', autouse=True)
272 def tests_tmp_path(request):
272 def tests_tmp_path(request):
273 """
273 """
274 Create temporary directory to be used during the test session.
274 Create temporary directory to be used during the test session.
275 """
275 """
276 if not os.path.exists(TESTS_TMP_PATH):
276 if not os.path.exists(TESTS_TMP_PATH):
277 os.makedirs(TESTS_TMP_PATH)
277 os.makedirs(TESTS_TMP_PATH)
278
278
279 if not request.config.getoption('--keep-tmp-path'):
279 if not request.config.getoption('--keep-tmp-path'):
280 @request.addfinalizer
280 @request.addfinalizer
281 def remove_tmp_path():
281 def remove_tmp_path():
282 shutil.rmtree(TESTS_TMP_PATH)
282 shutil.rmtree(TESTS_TMP_PATH)
283
283
284 return TESTS_TMP_PATH
284 return TESTS_TMP_PATH
285
285
286
286
287 @pytest.fixture()
287 @pytest.fixture()
288 def test_repo_group(request):
288 def test_repo_group(request):
289 """
289 """
290 Create a temporary repository group, and destroy it after
290 Create a temporary repository group, and destroy it after
291 usage automatically
291 usage automatically
292 """
292 """
293 fixture = Fixture()
293 fixture = Fixture()
294 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
294 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
295 repo_group = fixture.create_repo_group(repogroupid)
295 repo_group = fixture.create_repo_group(repogroupid)
296
296
297 def _cleanup():
297 def _cleanup():
298 fixture.destroy_repo_group(repogroupid)
298 fixture.destroy_repo_group(repogroupid)
299
299
300 request.addfinalizer(_cleanup)
300 request.addfinalizer(_cleanup)
301 return repo_group
301 return repo_group
302
302
303
303
304 @pytest.fixture()
304 @pytest.fixture()
305 def test_user_group(request):
305 def test_user_group(request):
306 """
306 """
307 Create a temporary user group, and destroy it after
307 Create a temporary user group, and destroy it after
308 usage automatically
308 usage automatically
309 """
309 """
310 fixture = Fixture()
310 fixture = Fixture()
311 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
311 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
312 user_group = fixture.create_user_group(usergroupid)
312 user_group = fixture.create_user_group(usergroupid)
313
313
314 def _cleanup():
314 def _cleanup():
315 fixture.destroy_user_group(user_group)
315 fixture.destroy_user_group(user_group)
316
316
317 request.addfinalizer(_cleanup)
317 request.addfinalizer(_cleanup)
318 return user_group
318 return user_group
319
319
320
320
321 @pytest.fixture(scope='session')
321 @pytest.fixture(scope='session')
322 def test_repo(request):
322 def test_repo(request):
323 container = TestRepoContainer()
323 container = TestRepoContainer()
324 request.addfinalizer(container._cleanup)
324 request.addfinalizer(container._cleanup)
325 return container
325 return container
326
326
327
327
328 class TestRepoContainer(object):
328 class TestRepoContainer(object):
329 """
329 """
330 Container for test repositories which are used read only.
330 Container for test repositories which are used read only.
331
331
332 Repositories will be created on demand and re-used during the lifetime
332 Repositories will be created on demand and re-used during the lifetime
333 of this object.
333 of this object.
334
334
335 Usage to get the svn test repository "minimal"::
335 Usage to get the svn test repository "minimal"::
336
336
337 test_repo = TestContainer()
337 test_repo = TestContainer()
338 repo = test_repo('minimal', 'svn')
338 repo = test_repo('minimal', 'svn')
339
339
340 """
340 """
341
341
342 dump_extractors = {
342 dump_extractors = {
343 'git': utils.extract_git_repo_from_dump,
343 'git': utils.extract_git_repo_from_dump,
344 'hg': utils.extract_hg_repo_from_dump,
344 'hg': utils.extract_hg_repo_from_dump,
345 'svn': utils.extract_svn_repo_from_dump,
345 'svn': utils.extract_svn_repo_from_dump,
346 }
346 }
347
347
348 def __init__(self):
348 def __init__(self):
349 self._cleanup_repos = []
349 self._cleanup_repos = []
350 self._fixture = Fixture()
350 self._fixture = Fixture()
351 self._repos = {}
351 self._repos = {}
352
352
353 def __call__(self, dump_name, backend_alias, config=None):
353 def __call__(self, dump_name, backend_alias, config=None):
354 key = (dump_name, backend_alias)
354 key = (dump_name, backend_alias)
355 if key not in self._repos:
355 if key not in self._repos:
356 repo = self._create_repo(dump_name, backend_alias, config)
356 repo = self._create_repo(dump_name, backend_alias, config)
357 self._repos[key] = repo.repo_id
357 self._repos[key] = repo.repo_id
358 return Repository.get(self._repos[key])
358 return Repository.get(self._repos[key])
359
359
360 def _create_repo(self, dump_name, backend_alias, config):
360 def _create_repo(self, dump_name, backend_alias, config):
361 repo_name = f'{backend_alias}-{dump_name}'
361 repo_name = f'{backend_alias}-{dump_name}'
362 backend = get_backend(backend_alias)
362 backend = get_backend(backend_alias)
363 dump_extractor = self.dump_extractors[backend_alias]
363 dump_extractor = self.dump_extractors[backend_alias]
364 repo_path = dump_extractor(dump_name, repo_name)
364 repo_path = dump_extractor(dump_name, repo_name)
365
365
366 vcs_repo = backend(repo_path, config=config)
366 vcs_repo = backend(repo_path, config=config)
367 repo2db_mapper({repo_name: vcs_repo})
367 repo2db_mapper({repo_name: vcs_repo})
368
368
369 repo = RepoModel().get_by_repo_name(repo_name)
369 repo = RepoModel().get_by_repo_name(repo_name)
370 self._cleanup_repos.append(repo_name)
370 self._cleanup_repos.append(repo_name)
371 return repo
371 return repo
372
372
373 def _cleanup(self):
373 def _cleanup(self):
374 for repo_name in reversed(self._cleanup_repos):
374 for repo_name in reversed(self._cleanup_repos):
375 self._fixture.destroy_repo(repo_name)
375 self._fixture.destroy_repo(repo_name)
376
376
377
377
378 def backend_base(request, backend_alias, baseapp, test_repo):
378 def backend_base(request, backend_alias, baseapp, test_repo):
379 if backend_alias not in request.config.getoption('--backends'):
379 if backend_alias not in request.config.getoption('--backends'):
380 pytest.skip("Backend %s not selected." % (backend_alias, ))
380 pytest.skip("Backend %s not selected." % (backend_alias, ))
381
381
382 utils.check_xfail_backends(request.node, backend_alias)
382 utils.check_xfail_backends(request.node, backend_alias)
383 utils.check_skip_backends(request.node, backend_alias)
383 utils.check_skip_backends(request.node, backend_alias)
384
384
385 repo_name = 'vcs_test_%s' % (backend_alias, )
385 repo_name = 'vcs_test_%s' % (backend_alias, )
386 backend = Backend(
386 backend = Backend(
387 alias=backend_alias,
387 alias=backend_alias,
388 repo_name=repo_name,
388 repo_name=repo_name,
389 test_name=request.node.name,
389 test_name=request.node.name,
390 test_repo_container=test_repo)
390 test_repo_container=test_repo)
391 request.addfinalizer(backend.cleanup)
391 request.addfinalizer(backend.cleanup)
392 return backend
392 return backend
393
393
394
394
395 @pytest.fixture()
395 @pytest.fixture()
396 def backend(request, backend_alias, baseapp, test_repo):
396 def backend(request, backend_alias, baseapp, test_repo):
397 """
397 """
398 Parametrized fixture which represents a single backend implementation.
398 Parametrized fixture which represents a single backend implementation.
399
399
400 It respects the option `--backends` to focus the test run on specific
400 It respects the option `--backends` to focus the test run on specific
401 backend implementations.
401 backend implementations.
402
402
403 It also supports `pytest.mark.xfail_backends` to mark tests as failing
403 It also supports `pytest.mark.xfail_backends` to mark tests as failing
404 for specific backends. This is intended as a utility for incremental
404 for specific backends. This is intended as a utility for incremental
405 development of a new backend implementation.
405 development of a new backend implementation.
406 """
406 """
407 return backend_base(request, backend_alias, baseapp, test_repo)
407 return backend_base(request, backend_alias, baseapp, test_repo)
408
408
409
409
410 @pytest.fixture()
410 @pytest.fixture()
411 def backend_git(request, baseapp, test_repo):
411 def backend_git(request, baseapp, test_repo):
412 return backend_base(request, 'git', baseapp, test_repo)
412 return backend_base(request, 'git', baseapp, test_repo)
413
413
414
414
415 @pytest.fixture()
415 @pytest.fixture()
416 def backend_hg(request, baseapp, test_repo):
416 def backend_hg(request, baseapp, test_repo):
417 return backend_base(request, 'hg', baseapp, test_repo)
417 return backend_base(request, 'hg', baseapp, test_repo)
418
418
419
419
420 @pytest.fixture()
420 @pytest.fixture()
421 def backend_svn(request, baseapp, test_repo):
421 def backend_svn(request, baseapp, test_repo):
422 return backend_base(request, 'svn', baseapp, test_repo)
422 return backend_base(request, 'svn', baseapp, test_repo)
423
423
424
424
425 @pytest.fixture()
425 @pytest.fixture()
426 def backend_random(backend_git):
426 def backend_random(backend_git):
427 """
427 """
428 Use this to express that your tests need "a backend.
428 Use this to express that your tests need "a backend.
429
429
430 A few of our tests need a backend, so that we can run the code. This
430 A few of our tests need a backend, so that we can run the code. This
431 fixture is intended to be used for such cases. It will pick one of the
431 fixture is intended to be used for such cases. It will pick one of the
432 backends and run the tests.
432 backends and run the tests.
433
433
434 The fixture `backend` would run the test multiple times for each
434 The fixture `backend` would run the test multiple times for each
435 available backend which is a pure waste of time if the test is
435 available backend which is a pure waste of time if the test is
436 independent of the backend type.
436 independent of the backend type.
437 """
437 """
438 # TODO: johbo: Change this to pick a random backend
438 # TODO: johbo: Change this to pick a random backend
439 return backend_git
439 return backend_git
440
440
441
441
442 @pytest.fixture()
442 @pytest.fixture()
443 def backend_stub(backend_git):
443 def backend_stub(backend_git):
444 """
444 """
445 Use this to express that your tests need a backend stub
445 Use this to express that your tests need a backend stub
446
446
447 TODO: mikhail: Implement a real stub logic instead of returning
447 TODO: mikhail: Implement a real stub logic instead of returning
448 a git backend
448 a git backend
449 """
449 """
450 return backend_git
450 return backend_git
451
451
452
452
453 @pytest.fixture()
453 @pytest.fixture()
454 def repo_stub(backend_stub):
454 def repo_stub(backend_stub):
455 """
455 """
456 Use this to express that your tests need a repository stub
456 Use this to express that your tests need a repository stub
457 """
457 """
458 return backend_stub.create_repo()
458 return backend_stub.create_repo()
459
459
460
460
461 class Backend(object):
461 class Backend(object):
462 """
462 """
463 Represents the test configuration for one supported backend
463 Represents the test configuration for one supported backend
464
464
465 Provides easy access to different test repositories based on
465 Provides easy access to different test repositories based on
466 `__getitem__`. Such repositories will only be created once per test
466 `__getitem__`. Such repositories will only be created once per test
467 session.
467 session.
468 """
468 """
469
469
470 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
470 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
471 _master_repo = None
471 _master_repo = None
472 _master_repo_path = ''
472 _master_repo_path = ''
473 _commit_ids = {}
473 _commit_ids = {}
474
474
475 def __init__(self, alias, repo_name, test_name, test_repo_container):
475 def __init__(self, alias, repo_name, test_name, test_repo_container):
476 self.alias = alias
476 self.alias = alias
477 self.repo_name = repo_name
477 self.repo_name = repo_name
478 self._cleanup_repos = []
478 self._cleanup_repos = []
479 self._test_name = test_name
479 self._test_name = test_name
480 self._test_repo_container = test_repo_container
480 self._test_repo_container = test_repo_container
481 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
481 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
482 # Fixture will survive in the end.
482 # Fixture will survive in the end.
483 self._fixture = Fixture()
483 self._fixture = Fixture()
484
484
485 def __getitem__(self, key):
485 def __getitem__(self, key):
486 return self._test_repo_container(key, self.alias)
486 return self._test_repo_container(key, self.alias)
487
487
488 def create_test_repo(self, key, config=None):
488 def create_test_repo(self, key, config=None):
489 return self._test_repo_container(key, self.alias, config)
489 return self._test_repo_container(key, self.alias, config)
490
490
491 @property
491 @property
492 def repo_id(self):
492 def repo_id(self):
493 # just fake some repo_id
493 # just fake some repo_id
494 return self.repo.repo_id
494 return self.repo.repo_id
495
495
496 @property
496 @property
497 def repo(self):
497 def repo(self):
498 """
498 """
499 Returns the "current" repository. This is the vcs_test repo or the
499 Returns the "current" repository. This is the vcs_test repo or the
500 last repo which has been created with `create_repo`.
500 last repo which has been created with `create_repo`.
501 """
501 """
502 from rhodecode.model.db import Repository
502 from rhodecode.model.db import Repository
503 return Repository.get_by_repo_name(self.repo_name)
503 return Repository.get_by_repo_name(self.repo_name)
504
504
505 @property
505 @property
506 def default_branch_name(self):
506 def default_branch_name(self):
507 VcsRepository = get_backend(self.alias)
507 VcsRepository = get_backend(self.alias)
508 return VcsRepository.DEFAULT_BRANCH_NAME
508 return VcsRepository.DEFAULT_BRANCH_NAME
509
509
510 @property
510 @property
511 def default_head_id(self):
511 def default_head_id(self):
512 """
512 """
513 Returns the default head id of the underlying backend.
513 Returns the default head id of the underlying backend.
514
514
515 This will be the default branch name in case the backend does have a
515 This will be the default branch name in case the backend does have a
516 default branch. In the other cases it will point to a valid head
516 default branch. In the other cases it will point to a valid head
517 which can serve as the base to create a new commit on top of it.
517 which can serve as the base to create a new commit on top of it.
518 """
518 """
519 vcsrepo = self.repo.scm_instance()
519 vcsrepo = self.repo.scm_instance()
520 head_id = (
520 head_id = (
521 vcsrepo.DEFAULT_BRANCH_NAME or
521 vcsrepo.DEFAULT_BRANCH_NAME or
522 vcsrepo.commit_ids[-1])
522 vcsrepo.commit_ids[-1])
523 return head_id
523 return head_id
524
524
525 @property
525 @property
526 def commit_ids(self):
526 def commit_ids(self):
527 """
527 """
528 Returns the list of commits for the last created repository
528 Returns the list of commits for the last created repository
529 """
529 """
530 return self._commit_ids
530 return self._commit_ids
531
531
532 def create_master_repo(self, commits):
532 def create_master_repo(self, commits):
533 """
533 """
534 Create a repository and remember it as a template.
534 Create a repository and remember it as a template.
535
535
536 This allows to easily create derived repositories to construct
536 This allows to easily create derived repositories to construct
537 more complex scenarios for diff, compare and pull requests.
537 more complex scenarios for diff, compare and pull requests.
538
538
539 Returns a commit map which maps from commit message to raw_id.
539 Returns a commit map which maps from commit message to raw_id.
540 """
540 """
541 self._master_repo = self.create_repo(commits=commits)
541 self._master_repo = self.create_repo(commits=commits)
542 self._master_repo_path = self._master_repo.repo_full_path
542 self._master_repo_path = self._master_repo.repo_full_path
543
543
544 return self._commit_ids
544 return self._commit_ids
545
545
546 def create_repo(
546 def create_repo(
547 self, commits=None, number_of_commits=0, heads=None,
547 self, commits=None, number_of_commits=0, heads=None,
548 name_suffix='', bare=False, **kwargs):
548 name_suffix='', bare=False, **kwargs):
549 """
549 """
550 Create a repository and record it for later cleanup.
550 Create a repository and record it for later cleanup.
551
551
552 :param commits: Optional. A sequence of dict instances.
552 :param commits: Optional. A sequence of dict instances.
553 Will add a commit per entry to the new repository.
553 Will add a commit per entry to the new repository.
554 :param number_of_commits: Optional. If set to a number, this number of
554 :param number_of_commits: Optional. If set to a number, this number of
555 commits will be added to the new repository.
555 commits will be added to the new repository.
556 :param heads: Optional. Can be set to a sequence of of commit
556 :param heads: Optional. Can be set to a sequence of of commit
557 names which shall be pulled in from the master repository.
557 names which shall be pulled in from the master repository.
558 :param name_suffix: adds special suffix to generated repo name
558 :param name_suffix: adds special suffix to generated repo name
559 :param bare: set a repo as bare (no checkout)
559 :param bare: set a repo as bare (no checkout)
560 """
560 """
561 self.repo_name = self._next_repo_name() + name_suffix
561 self.repo_name = self._next_repo_name() + name_suffix
562 repo = self._fixture.create_repo(
562 repo = self._fixture.create_repo(
563 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
563 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
564 self._cleanup_repos.append(repo.repo_name)
564 self._cleanup_repos.append(repo.repo_name)
565
565
566 commits = commits or [
566 commits = commits or [
567 {'message': f'Commit {x} of {self.repo_name}'}
567 {'message': f'Commit {x} of {self.repo_name}'}
568 for x in range(number_of_commits)]
568 for x in range(number_of_commits)]
569 vcs_repo = repo.scm_instance()
569 vcs_repo = repo.scm_instance()
570 vcs_repo.count()
570 vcs_repo.count()
571 self._add_commits_to_repo(vcs_repo, commits)
571 self._add_commits_to_repo(vcs_repo, commits)
572 if heads:
572 if heads:
573 self.pull_heads(repo, heads)
573 self.pull_heads(repo, heads)
574
574
575 return repo
575 return repo
576
576
577 def pull_heads(self, repo, heads, do_fetch=False):
577 def pull_heads(self, repo, heads, do_fetch=False):
578 """
578 """
579 Make sure that repo contains all commits mentioned in `heads`
579 Make sure that repo contains all commits mentioned in `heads`
580 """
580 """
581 vcsrepo = repo.scm_instance()
581 vcsrepo = repo.scm_instance()
582 vcsrepo.config.clear_section('hooks')
582 vcsrepo.config.clear_section('hooks')
583 commit_ids = [self._commit_ids[h] for h in heads]
583 commit_ids = [self._commit_ids[h] for h in heads]
584 if do_fetch:
584 if do_fetch:
585 vcsrepo.fetch(self._master_repo_path, commit_ids=commit_ids)
585 vcsrepo.fetch(self._master_repo_path, commit_ids=commit_ids)
586 vcsrepo.pull(self._master_repo_path, commit_ids=commit_ids)
586 vcsrepo.pull(self._master_repo_path, commit_ids=commit_ids)
587
587
588 def create_fork(self):
588 def create_fork(self):
589 repo_to_fork = self.repo_name
589 repo_to_fork = self.repo_name
590 self.repo_name = self._next_repo_name()
590 self.repo_name = self._next_repo_name()
591 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
591 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
592 self._cleanup_repos.append(self.repo_name)
592 self._cleanup_repos.append(self.repo_name)
593 return repo
593 return repo
594
594
595 def new_repo_name(self, suffix=''):
595 def new_repo_name(self, suffix=''):
596 self.repo_name = self._next_repo_name() + suffix
596 self.repo_name = self._next_repo_name() + suffix
597 self._cleanup_repos.append(self.repo_name)
597 self._cleanup_repos.append(self.repo_name)
598 return self.repo_name
598 return self.repo_name
599
599
600 def _next_repo_name(self):
600 def _next_repo_name(self):
601 return "%s_%s" % (
601 return "%s_%s" % (
602 self.invalid_repo_name.sub('_', self._test_name), len(self._cleanup_repos))
602 self.invalid_repo_name.sub('_', self._test_name), len(self._cleanup_repos))
603
603
604 def ensure_file(self, filename, content=b'Test content\n'):
604 def ensure_file(self, filename, content=b'Test content\n'):
605 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
605 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
606 commits = [
606 commits = [
607 {'added': [
607 {'added': [
608 FileNode(filename, content=content),
608 FileNode(filename, content=content),
609 ]},
609 ]},
610 ]
610 ]
611 self._add_commits_to_repo(self.repo.scm_instance(), commits)
611 self._add_commits_to_repo(self.repo.scm_instance(), commits)
612
612
613 def enable_downloads(self):
613 def enable_downloads(self):
614 repo = self.repo
614 repo = self.repo
615 repo.enable_downloads = True
615 repo.enable_downloads = True
616 Session().add(repo)
616 Session().add(repo)
617 Session().commit()
617 Session().commit()
618
618
619 def cleanup(self):
619 def cleanup(self):
620 for repo_name in reversed(self._cleanup_repos):
620 for repo_name in reversed(self._cleanup_repos):
621 self._fixture.destroy_repo(repo_name)
621 self._fixture.destroy_repo(repo_name)
622
622
623 def _add_commits_to_repo(self, repo, commits):
623 def _add_commits_to_repo(self, repo, commits):
624 commit_ids = _add_commits_to_repo(repo, commits)
624 commit_ids = _add_commits_to_repo(repo, commits)
625 if not commit_ids:
625 if not commit_ids:
626 return
626 return
627 self._commit_ids = commit_ids
627 self._commit_ids = commit_ids
628
628
629 # Creating refs for Git to allow fetching them from remote repository
629 # Creating refs for Git to allow fetching them from remote repository
630 if self.alias == 'git':
630 if self.alias == 'git':
631 refs = {}
631 refs = {}
632 for message in self._commit_ids:
632 for message in self._commit_ids:
633 cleanup_message = message.replace(' ', '')
633 cleanup_message = message.replace(' ', '')
634 ref_name = f'refs/test-refs/{cleanup_message}'
634 ref_name = f'refs/test-refs/{cleanup_message}'
635 refs[ref_name] = self._commit_ids[message]
635 refs[ref_name] = self._commit_ids[message]
636 self._create_refs(repo, refs)
636 self._create_refs(repo, refs)
637
637
638 def _create_refs(self, repo, refs):
638 def _create_refs(self, repo, refs):
639 for ref_name, ref_val in refs.items():
639 for ref_name, ref_val in refs.items():
640 repo.set_refs(ref_name, ref_val)
640 repo.set_refs(ref_name, ref_val)
641
641
642
642
643 class VcsBackend(object):
643 class VcsBackend(object):
644 """
644 """
645 Represents the test configuration for one supported vcs backend.
645 Represents the test configuration for one supported vcs backend.
646 """
646 """
647
647
648 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
648 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
649
649
650 def __init__(self, alias, repo_path, test_name, test_repo_container):
650 def __init__(self, alias, repo_path, test_name, test_repo_container):
651 self.alias = alias
651 self.alias = alias
652 self._repo_path = repo_path
652 self._repo_path = repo_path
653 self._cleanup_repos = []
653 self._cleanup_repos = []
654 self._test_name = test_name
654 self._test_name = test_name
655 self._test_repo_container = test_repo_container
655 self._test_repo_container = test_repo_container
656
656
657 def __getitem__(self, key):
657 def __getitem__(self, key):
658 return self._test_repo_container(key, self.alias).scm_instance()
658 return self._test_repo_container(key, self.alias).scm_instance()
659
659
660 def __repr__(self):
660 def __repr__(self):
661 return f'{self.__class__.__name__}(alias={self.alias}, repo={self._repo_path})'
661 return f'{self.__class__.__name__}(alias={self.alias}, repo={self._repo_path})'
662
662
663 @property
663 @property
664 def repo(self):
664 def repo(self):
665 """
665 """
666 Returns the "current" repository. This is the vcs_test repo of the last
666 Returns the "current" repository. This is the vcs_test repo of the last
667 repo which has been created.
667 repo which has been created.
668 """
668 """
669 Repository = get_backend(self.alias)
669 Repository = get_backend(self.alias)
670 return Repository(self._repo_path)
670 return Repository(self._repo_path)
671
671
672 @property
672 @property
673 def backend(self):
673 def backend(self):
674 """
674 """
675 Returns the backend implementation class.
675 Returns the backend implementation class.
676 """
676 """
677 return get_backend(self.alias)
677 return get_backend(self.alias)
678
678
679 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
679 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
680 bare=False):
680 bare=False):
681 repo_name = self._next_repo_name()
681 repo_name = self._next_repo_name()
682 self._repo_path = get_new_dir(repo_name)
682 self._repo_path = get_new_dir(repo_name)
683 repo_class = get_backend(self.alias)
683 repo_class = get_backend(self.alias)
684 src_url = None
684 src_url = None
685 if _clone_repo:
685 if _clone_repo:
686 src_url = _clone_repo.path
686 src_url = _clone_repo.path
687 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
687 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
688 self._cleanup_repos.append(repo)
688 self._cleanup_repos.append(repo)
689
689
690 commits = commits or [
690 commits = commits or [
691 {'message': 'Commit %s of %s' % (x, repo_name)}
691 {'message': 'Commit %s of %s' % (x, repo_name)}
692 for x in range(number_of_commits)]
692 for x in range(number_of_commits)]
693 _add_commits_to_repo(repo, commits)
693 _add_commits_to_repo(repo, commits)
694 return repo
694 return repo
695
695
696 def clone_repo(self, repo):
696 def clone_repo(self, repo):
697 return self.create_repo(_clone_repo=repo)
697 return self.create_repo(_clone_repo=repo)
698
698
699 def cleanup(self):
699 def cleanup(self):
700 for repo in self._cleanup_repos:
700 for repo in self._cleanup_repos:
701 shutil.rmtree(repo.path)
701 shutil.rmtree(repo.path)
702
702
703 def new_repo_path(self):
703 def new_repo_path(self):
704 repo_name = self._next_repo_name()
704 repo_name = self._next_repo_name()
705 self._repo_path = get_new_dir(repo_name)
705 self._repo_path = get_new_dir(repo_name)
706 return self._repo_path
706 return self._repo_path
707
707
708 def _next_repo_name(self):
708 def _next_repo_name(self):
709
709
710 return "{}_{}".format(
710 return "{}_{}".format(
711 self.invalid_repo_name.sub('_', self._test_name),
711 self.invalid_repo_name.sub('_', self._test_name),
712 len(self._cleanup_repos)
712 len(self._cleanup_repos)
713 )
713 )
714
714
715 def add_file(self, repo, filename, content='Test content\n'):
715 def add_file(self, repo, filename, content='Test content\n'):
716 imc = repo.in_memory_commit
716 imc = repo.in_memory_commit
717 imc.add(FileNode(safe_bytes(filename), content=safe_bytes(content)))
717 imc.add(FileNode(safe_bytes(filename), content=safe_bytes(content)))
718 imc.commit(
718 imc.commit(
719 message='Automatic commit from vcsbackend fixture',
719 message='Automatic commit from vcsbackend fixture',
720 author='Automatic <automatic@rhodecode.com>')
720 author='Automatic <automatic@rhodecode.com>')
721
721
722 def ensure_file(self, filename, content='Test content\n'):
722 def ensure_file(self, filename, content='Test content\n'):
723 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
723 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
724 self.add_file(self.repo, filename, content)
724 self.add_file(self.repo, filename, content)
725
725
726
726
727 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo) -> VcsBackend:
727 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo) -> VcsBackend:
728 if backend_alias not in request.config.getoption('--backends'):
728 if backend_alias not in request.config.getoption('--backends'):
729 pytest.skip("Backend %s not selected." % (backend_alias, ))
729 pytest.skip("Backend %s not selected." % (backend_alias, ))
730
730
731 utils.check_xfail_backends(request.node, backend_alias)
731 utils.check_xfail_backends(request.node, backend_alias)
732 utils.check_skip_backends(request.node, backend_alias)
732 utils.check_skip_backends(request.node, backend_alias)
733
733
734 repo_name = f'vcs_test_{backend_alias}'
734 repo_name = f'vcs_test_{backend_alias}'
735 repo_path = os.path.join(tests_tmp_path, repo_name)
735 repo_path = os.path.join(tests_tmp_path, repo_name)
736 backend = VcsBackend(
736 backend = VcsBackend(
737 alias=backend_alias,
737 alias=backend_alias,
738 repo_path=repo_path,
738 repo_path=repo_path,
739 test_name=request.node.name,
739 test_name=request.node.name,
740 test_repo_container=test_repo)
740 test_repo_container=test_repo)
741 request.addfinalizer(backend.cleanup)
741 request.addfinalizer(backend.cleanup)
742 return backend
742 return backend
743
743
744
744
745 @pytest.fixture()
745 @pytest.fixture()
746 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
746 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
747 """
747 """
748 Parametrized fixture which represents a single vcs backend implementation.
748 Parametrized fixture which represents a single vcs backend implementation.
749
749
750 See the fixture `backend` for more details. This one implements the same
750 See the fixture `backend` for more details. This one implements the same
751 concept, but on vcs level. So it does not provide model instances etc.
751 concept, but on vcs level. So it does not provide model instances etc.
752
752
753 Parameters are generated dynamically, see :func:`pytest_generate_tests`
753 Parameters are generated dynamically, see :func:`pytest_generate_tests`
754 for how this works.
754 for how this works.
755 """
755 """
756 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
756 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
757
757
758
758
759 @pytest.fixture()
759 @pytest.fixture()
760 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
760 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
761 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
761 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
762
762
763
763
764 @pytest.fixture()
764 @pytest.fixture()
765 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
765 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
766 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
766 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
767
767
768
768
769 @pytest.fixture()
769 @pytest.fixture()
770 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
770 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
771 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
771 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
772
772
773
773
774 @pytest.fixture()
774 @pytest.fixture()
775 def vcsbackend_stub(vcsbackend_git):
775 def vcsbackend_stub(vcsbackend_git):
776 """
776 """
777 Use this to express that your test just needs a stub of a vcsbackend.
777 Use this to express that your test just needs a stub of a vcsbackend.
778
778
779 Plan is to eventually implement an in-memory stub to speed tests up.
779 Plan is to eventually implement an in-memory stub to speed tests up.
780 """
780 """
781 return vcsbackend_git
781 return vcsbackend_git
782
782
783
783
784 def _add_commits_to_repo(vcs_repo, commits):
784 def _add_commits_to_repo(vcs_repo, commits):
785 commit_ids = {}
785 commit_ids = {}
786 if not commits:
786 if not commits:
787 return commit_ids
787 return commit_ids
788
788
789 imc = vcs_repo.in_memory_commit
789 imc = vcs_repo.in_memory_commit
790
790
791 for idx, commit in enumerate(commits):
791 for idx, commit in enumerate(commits):
792 message = str(commit.get('message', f'Commit {idx}'))
792 message = str(commit.get('message', f'Commit {idx}'))
793
793
794 for node in commit.get('added', []):
794 for node in commit.get('added', []):
795 imc.add(FileNode(safe_bytes(node.path), content=node.content))
795 imc.add(FileNode(safe_bytes(node.path), content=node.content))
796 for node in commit.get('changed', []):
796 for node in commit.get('changed', []):
797 imc.change(FileNode(safe_bytes(node.path), content=node.content))
797 imc.change(FileNode(safe_bytes(node.path), content=node.content))
798 for node in commit.get('removed', []):
798 for node in commit.get('removed', []):
799 imc.remove(FileNode(safe_bytes(node.path)))
799 imc.remove(FileNode(safe_bytes(node.path)))
800
800
801 parents = [
801 parents = [
802 vcs_repo.get_commit(commit_id=commit_ids[p])
802 vcs_repo.get_commit(commit_id=commit_ids[p])
803 for p in commit.get('parents', [])]
803 for p in commit.get('parents', [])]
804
804
805 operations = ('added', 'changed', 'removed')
805 operations = ('added', 'changed', 'removed')
806 if not any((commit.get(o) for o in operations)):
806 if not any((commit.get(o) for o in operations)):
807 imc.add(FileNode(b'file_%b' % safe_bytes(str(idx)), content=safe_bytes(message)))
807 imc.add(FileNode(b'file_%b' % safe_bytes(str(idx)), content=safe_bytes(message)))
808
808
809 commit = imc.commit(
809 commit = imc.commit(
810 message=message,
810 message=message,
811 author=str(commit.get('author', 'Automatic <automatic@rhodecode.com>')),
811 author=str(commit.get('author', 'Automatic <automatic@rhodecode.com>')),
812 date=commit.get('date'),
812 date=commit.get('date'),
813 branch=commit.get('branch'),
813 branch=commit.get('branch'),
814 parents=parents)
814 parents=parents)
815
815
816 commit_ids[commit.message] = commit.raw_id
816 commit_ids[commit.message] = commit.raw_id
817
817
818 return commit_ids
818 return commit_ids
819
819
820
820
821 @pytest.fixture()
821 @pytest.fixture()
822 def reposerver(request):
822 def reposerver(request):
823 """
823 """
824 Allows to serve a backend repository
824 Allows to serve a backend repository
825 """
825 """
826
826
827 repo_server = RepoServer()
827 repo_server = RepoServer()
828 request.addfinalizer(repo_server.cleanup)
828 request.addfinalizer(repo_server.cleanup)
829 return repo_server
829 return repo_server
830
830
831
831
832 class RepoServer(object):
832 class RepoServer(object):
833 """
833 """
834 Utility to serve a local repository for the duration of a test case.
834 Utility to serve a local repository for the duration of a test case.
835
835
836 Supports only Subversion so far.
836 Supports only Subversion so far.
837 """
837 """
838
838
839 url = None
839 url = None
840
840
841 def __init__(self):
841 def __init__(self):
842 self._cleanup_servers = []
842 self._cleanup_servers = []
843
843
844 def serve(self, vcsrepo):
844 def serve(self, vcsrepo):
845 if vcsrepo.alias != 'svn':
845 if vcsrepo.alias != 'svn':
846 raise TypeError("Backend %s not supported" % vcsrepo.alias)
846 raise TypeError("Backend %s not supported" % vcsrepo.alias)
847
847
848 proc = subprocess.Popen(
848 proc = subprocess.Popen(
849 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
849 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
850 '--root', vcsrepo.path])
850 '--root', vcsrepo.path])
851 self._cleanup_servers.append(proc)
851 self._cleanup_servers.append(proc)
852 self.url = 'svn://localhost'
852 self.url = 'svn://localhost'
853
853
854 def cleanup(self):
854 def cleanup(self):
855 for proc in self._cleanup_servers:
855 for proc in self._cleanup_servers:
856 proc.terminate()
856 proc.terminate()
857
857
858
858
859 @pytest.fixture()
859 @pytest.fixture()
860 def pr_util(backend, request, config_stub):
860 def pr_util(backend, request, config_stub):
861 """
861 """
862 Utility for tests of models and for functional tests around pull requests.
862 Utility for tests of models and for functional tests around pull requests.
863
863
864 It gives an instance of :class:`PRTestUtility` which provides various
864 It gives an instance of :class:`PRTestUtility` which provides various
865 utility methods around one pull request.
865 utility methods around one pull request.
866
866
867 This fixture uses `backend` and inherits its parameterization.
867 This fixture uses `backend` and inherits its parameterization.
868 """
868 """
869
869
870 util = PRTestUtility(backend)
870 util = PRTestUtility(backend)
871 request.addfinalizer(util.cleanup)
871 request.addfinalizer(util.cleanup)
872
872
873 return util
873 return util
874
874
875
875
876 class PRTestUtility(object):
876 class PRTestUtility(object):
877
877
878 pull_request = None
878 pull_request = None
879 pull_request_id = None
879 pull_request_id = None
880 mergeable_patcher = None
880 mergeable_patcher = None
881 mergeable_mock = None
881 mergeable_mock = None
882 notification_patcher = None
882 notification_patcher = None
883 commit_ids: dict
883 commit_ids: dict
884
884
885 def __init__(self, backend):
885 def __init__(self, backend):
886 self.backend = backend
886 self.backend = backend
887
887
888 def create_pull_request(
888 def create_pull_request(
889 self, commits=None, target_head=None, source_head=None,
889 self, commits=None, target_head=None, source_head=None,
890 revisions=None, approved=False, author=None, mergeable=False,
890 revisions=None, approved=False, author=None, mergeable=False,
891 enable_notifications=True, name_suffix='', reviewers=None, observers=None,
891 enable_notifications=True, name_suffix='', reviewers=None, observers=None,
892 title="Test", description="Description"):
892 title="Test", description="Description"):
893 self.set_mergeable(mergeable)
893 self.set_mergeable(mergeable)
894 if not enable_notifications:
894 if not enable_notifications:
895 # mock notification side effect
895 # mock notification side effect
896 self.notification_patcher = mock.patch(
896 self.notification_patcher = mock.patch(
897 'rhodecode.model.notification.NotificationModel.create')
897 'rhodecode.model.notification.NotificationModel.create')
898 self.notification_patcher.start()
898 self.notification_patcher.start()
899
899
900 if not self.pull_request:
900 if not self.pull_request:
901 if not commits:
901 if not commits:
902 commits = [
902 commits = [
903 {'message': 'c1'},
903 {'message': 'c1'},
904 {'message': 'c2'},
904 {'message': 'c2'},
905 {'message': 'c3'},
905 {'message': 'c3'},
906 ]
906 ]
907 target_head = 'c1'
907 target_head = 'c1'
908 source_head = 'c2'
908 source_head = 'c2'
909 revisions = ['c2']
909 revisions = ['c2']
910
910
911 self.commit_ids = self.backend.create_master_repo(commits)
911 self.commit_ids = self.backend.create_master_repo(commits)
912 self.target_repository = self.backend.create_repo(
912 self.target_repository = self.backend.create_repo(
913 heads=[target_head], name_suffix=name_suffix)
913 heads=[target_head], name_suffix=name_suffix)
914 self.source_repository = self.backend.create_repo(
914 self.source_repository = self.backend.create_repo(
915 heads=[source_head], name_suffix=name_suffix)
915 heads=[source_head], name_suffix=name_suffix)
916 self.author = author or UserModel().get_by_username(
916 self.author = author or UserModel().get_by_username(
917 TEST_USER_ADMIN_LOGIN)
917 TEST_USER_ADMIN_LOGIN)
918
918
919 model = PullRequestModel()
919 model = PullRequestModel()
920 self.create_parameters = {
920 self.create_parameters = {
921 'created_by': self.author,
921 'created_by': self.author,
922 'source_repo': self.source_repository.repo_name,
922 'source_repo': self.source_repository.repo_name,
923 'source_ref': self._default_branch_reference(source_head),
923 'source_ref': self._default_branch_reference(source_head),
924 'target_repo': self.target_repository.repo_name,
924 'target_repo': self.target_repository.repo_name,
925 'target_ref': self._default_branch_reference(target_head),
925 'target_ref': self._default_branch_reference(target_head),
926 'revisions': [self.commit_ids[r] for r in revisions],
926 'revisions': [self.commit_ids[r] for r in revisions],
927 'reviewers': reviewers or self._get_reviewers(),
927 'reviewers': reviewers or self._get_reviewers(),
928 'observers': observers or self._get_observers(),
928 'observers': observers or self._get_observers(),
929 'title': title,
929 'title': title,
930 'description': description,
930 'description': description,
931 }
931 }
932 self.pull_request = model.create(**self.create_parameters)
932 self.pull_request = model.create(**self.create_parameters)
933 assert model.get_versions(self.pull_request) == []
933 assert model.get_versions(self.pull_request) == []
934
934
935 self.pull_request_id = self.pull_request.pull_request_id
935 self.pull_request_id = self.pull_request.pull_request_id
936
936
937 if approved:
937 if approved:
938 self.approve()
938 self.approve()
939
939
940 Session().add(self.pull_request)
940 Session().add(self.pull_request)
941 Session().commit()
941 Session().commit()
942
942
943 return self.pull_request
943 return self.pull_request
944
944
945 def approve(self):
945 def approve(self):
946 self.create_status_votes(
946 self.create_status_votes(
947 ChangesetStatus.STATUS_APPROVED,
947 ChangesetStatus.STATUS_APPROVED,
948 *self.pull_request.reviewers)
948 *self.pull_request.reviewers)
949
949
950 def close(self):
950 def close(self):
951 PullRequestModel().close_pull_request(self.pull_request, self.author)
951 PullRequestModel().close_pull_request(self.pull_request, self.author)
952
952
953 def _default_branch_reference(self, commit_message, branch: str = None) -> str:
953 def _default_branch_reference(self, commit_message, branch: str = None) -> str:
954 default_branch = branch or self.backend.default_branch_name
954 default_branch = branch or self.backend.default_branch_name
955 message = self.commit_ids[commit_message]
955 message = self.commit_ids[commit_message]
956 reference = f'branch:{default_branch}:{message}'
956 reference = f'branch:{default_branch}:{message}'
957
957
958 return reference
958 return reference
959
959
960 def _get_reviewers(self):
960 def _get_reviewers(self):
961 role = PullRequestReviewers.ROLE_REVIEWER
961 role = PullRequestReviewers.ROLE_REVIEWER
962 return [
962 return [
963 (TEST_USER_REGULAR_LOGIN, ['default1'], False, role, []),
963 (TEST_USER_REGULAR_LOGIN, ['default1'], False, role, []),
964 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, role, []),
964 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, role, []),
965 ]
965 ]
966
966
967 def _get_observers(self):
967 def _get_observers(self):
968 return [
968 return [
969
969
970 ]
970 ]
971
971
972 def update_source_repository(self, head=None, do_fetch=False):
972 def update_source_repository(self, head=None, do_fetch=False):
973 heads = [head or 'c3']
973 heads = [head or 'c3']
974 self.backend.pull_heads(self.source_repository, heads=heads, do_fetch=do_fetch)
974 self.backend.pull_heads(self.source_repository, heads=heads, do_fetch=do_fetch)
975
975
976 def update_target_repository(self, head=None, do_fetch=False):
976 def update_target_repository(self, head=None, do_fetch=False):
977 heads = [head or 'c3']
977 heads = [head or 'c3']
978 self.backend.pull_heads(self.target_repository, heads=heads, do_fetch=do_fetch)
978 self.backend.pull_heads(self.target_repository, heads=heads, do_fetch=do_fetch)
979
979
980 def set_pr_target_ref(self, ref_type: str = "branch", ref_name: str = "branch", ref_commit_id: str = "") -> str:
980 def set_pr_target_ref(self, ref_type: str = "branch", ref_name: str = "branch", ref_commit_id: str = "") -> str:
981 full_ref = f"{ref_type}:{ref_name}:{ref_commit_id}"
981 full_ref = f"{ref_type}:{ref_name}:{ref_commit_id}"
982 self.pull_request.target_ref = full_ref
982 self.pull_request.target_ref = full_ref
983 return full_ref
983 return full_ref
984
984
985 def set_pr_source_ref(self, ref_type: str = "branch", ref_name: str = "branch", ref_commit_id: str = "") -> str:
985 def set_pr_source_ref(self, ref_type: str = "branch", ref_name: str = "branch", ref_commit_id: str = "") -> str:
986 full_ref = f"{ref_type}:{ref_name}:{ref_commit_id}"
986 full_ref = f"{ref_type}:{ref_name}:{ref_commit_id}"
987 self.pull_request.source_ref = full_ref
987 self.pull_request.source_ref = full_ref
988 return full_ref
988 return full_ref
989
989
990 def add_one_commit(self, head=None):
990 def add_one_commit(self, head=None):
991 self.update_source_repository(head=head)
991 self.update_source_repository(head=head)
992 old_commit_ids = set(self.pull_request.revisions)
992 old_commit_ids = set(self.pull_request.revisions)
993 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
993 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
994 commit_ids = set(self.pull_request.revisions)
994 commit_ids = set(self.pull_request.revisions)
995 new_commit_ids = commit_ids - old_commit_ids
995 new_commit_ids = commit_ids - old_commit_ids
996 assert len(new_commit_ids) == 1
996 assert len(new_commit_ids) == 1
997 return new_commit_ids.pop()
997 return new_commit_ids.pop()
998
998
999 def remove_one_commit(self):
999 def remove_one_commit(self):
1000 assert len(self.pull_request.revisions) == 2
1000 assert len(self.pull_request.revisions) == 2
1001 source_vcs = self.source_repository.scm_instance()
1001 source_vcs = self.source_repository.scm_instance()
1002 removed_commit_id = source_vcs.commit_ids[-1]
1002 removed_commit_id = source_vcs.commit_ids[-1]
1003
1003
1004 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1004 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1005 # remove the if once that's sorted out.
1005 # remove the if once that's sorted out.
1006 if self.backend.alias == "git":
1006 if self.backend.alias == "git":
1007 kwargs = {'branch_name': self.backend.default_branch_name}
1007 kwargs = {'branch_name': self.backend.default_branch_name}
1008 else:
1008 else:
1009 kwargs = {}
1009 kwargs = {}
1010 source_vcs.strip(removed_commit_id, **kwargs)
1010 source_vcs.strip(removed_commit_id, **kwargs)
1011
1011
1012 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
1012 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
1013 assert len(self.pull_request.revisions) == 1
1013 assert len(self.pull_request.revisions) == 1
1014 return removed_commit_id
1014 return removed_commit_id
1015
1015
1016 def create_comment(self, linked_to=None):
1016 def create_comment(self, linked_to=None):
1017 comment = CommentsModel().create(
1017 comment = CommentsModel().create(
1018 text="Test comment",
1018 text="Test comment",
1019 repo=self.target_repository.repo_name,
1019 repo=self.target_repository.repo_name,
1020 user=self.author,
1020 user=self.author,
1021 pull_request=self.pull_request)
1021 pull_request=self.pull_request)
1022 assert comment.pull_request_version_id is None
1022 assert comment.pull_request_version_id is None
1023
1023
1024 if linked_to:
1024 if linked_to:
1025 PullRequestModel()._link_comments_to_version(linked_to)
1025 PullRequestModel()._link_comments_to_version(linked_to)
1026
1026
1027 return comment
1027 return comment
1028
1028
1029 def create_inline_comment(
1029 def create_inline_comment(
1030 self, linked_to=None, line_no='n1', file_path='file_1'):
1030 self, linked_to=None, line_no='n1', file_path='file_1'):
1031 comment = CommentsModel().create(
1031 comment = CommentsModel().create(
1032 text="Test comment",
1032 text="Test comment",
1033 repo=self.target_repository.repo_name,
1033 repo=self.target_repository.repo_name,
1034 user=self.author,
1034 user=self.author,
1035 line_no=line_no,
1035 line_no=line_no,
1036 f_path=file_path,
1036 f_path=file_path,
1037 pull_request=self.pull_request)
1037 pull_request=self.pull_request)
1038 assert comment.pull_request_version_id is None
1038 assert comment.pull_request_version_id is None
1039
1039
1040 if linked_to:
1040 if linked_to:
1041 PullRequestModel()._link_comments_to_version(linked_to)
1041 PullRequestModel()._link_comments_to_version(linked_to)
1042
1042
1043 return comment
1043 return comment
1044
1044
1045 def create_version_of_pull_request(self):
1045 def create_version_of_pull_request(self):
1046 pull_request = self.create_pull_request()
1046 pull_request = self.create_pull_request()
1047 version = PullRequestModel()._create_version_from_snapshot(
1047 version = PullRequestModel()._create_version_from_snapshot(
1048 pull_request)
1048 pull_request)
1049 return version
1049 return version
1050
1050
1051 def create_status_votes(self, status, *reviewers):
1051 def create_status_votes(self, status, *reviewers):
1052 for reviewer in reviewers:
1052 for reviewer in reviewers:
1053 ChangesetStatusModel().set_status(
1053 ChangesetStatusModel().set_status(
1054 repo=self.pull_request.target_repo,
1054 repo=self.pull_request.target_repo,
1055 status=status,
1055 status=status,
1056 user=reviewer.user_id,
1056 user=reviewer.user_id,
1057 pull_request=self.pull_request)
1057 pull_request=self.pull_request)
1058
1058
1059 def set_mergeable(self, value):
1059 def set_mergeable(self, value):
1060 if not self.mergeable_patcher:
1060 if not self.mergeable_patcher:
1061 self.mergeable_patcher = mock.patch.object(
1061 self.mergeable_patcher = mock.patch.object(
1062 VcsSettingsModel, 'get_general_settings')
1062 VcsSettingsModel, 'get_general_settings')
1063 self.mergeable_mock = self.mergeable_patcher.start()
1063 self.mergeable_mock = self.mergeable_patcher.start()
1064 self.mergeable_mock.return_value = {
1064 self.mergeable_mock.return_value = {
1065 'rhodecode_pr_merge_enabled': value}
1065 'rhodecode_pr_merge_enabled': value}
1066
1066
1067 def cleanup(self):
1067 def cleanup(self):
1068 # In case the source repository is already cleaned up, the pull
1068 # In case the source repository is already cleaned up, the pull
1069 # request will already be deleted.
1069 # request will already be deleted.
1070 pull_request = PullRequest().get(self.pull_request_id)
1070 pull_request = PullRequest().get(self.pull_request_id)
1071 if pull_request:
1071 if pull_request:
1072 PullRequestModel().delete(pull_request, pull_request.author)
1072 PullRequestModel().delete(pull_request, pull_request.author)
1073 Session().commit()
1073 Session().commit()
1074
1074
1075 if self.notification_patcher:
1075 if self.notification_patcher:
1076 self.notification_patcher.stop()
1076 self.notification_patcher.stop()
1077
1077
1078 if self.mergeable_patcher:
1078 if self.mergeable_patcher:
1079 self.mergeable_patcher.stop()
1079 self.mergeable_patcher.stop()
1080
1080
1081
1081
1082 @pytest.fixture()
1082 @pytest.fixture()
1083 def user_admin(baseapp):
1083 def user_admin(baseapp):
1084 """
1084 """
1085 Provides the default admin test user as an instance of `db.User`.
1085 Provides the default admin test user as an instance of `db.User`.
1086 """
1086 """
1087 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1087 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1088 return user
1088 return user
1089
1089
1090
1090
1091 @pytest.fixture()
1091 @pytest.fixture()
1092 def user_regular(baseapp):
1092 def user_regular(baseapp):
1093 """
1093 """
1094 Provides the default regular test user as an instance of `db.User`.
1094 Provides the default regular test user as an instance of `db.User`.
1095 """
1095 """
1096 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1096 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1097 return user
1097 return user
1098
1098
1099
1099
1100 @pytest.fixture()
1100 @pytest.fixture()
1101 def user_util(request, db_connection):
1101 def user_util(request, db_connection):
1102 """
1102 """
1103 Provides a wired instance of `UserUtility` with integrated cleanup.
1103 Provides a wired instance of `UserUtility` with integrated cleanup.
1104 """
1104 """
1105 utility = UserUtility(test_name=request.node.name)
1105 utility = UserUtility(test_name=request.node.name)
1106 request.addfinalizer(utility.cleanup)
1106 request.addfinalizer(utility.cleanup)
1107 return utility
1107 return utility
1108
1108
1109
1109
1110 # TODO: johbo: Split this up into utilities per domain or something similar
1110 # TODO: johbo: Split this up into utilities per domain or something similar
1111 class UserUtility(object):
1111 class UserUtility(object):
1112
1112
1113 def __init__(self, test_name="test"):
1113 def __init__(self, test_name="test"):
1114 self._test_name = self._sanitize_name(test_name)
1114 self._test_name = self._sanitize_name(test_name)
1115 self.fixture = Fixture()
1115 self.fixture = Fixture()
1116 self.repo_group_ids = []
1116 self.repo_group_ids = []
1117 self.repos_ids = []
1117 self.repos_ids = []
1118 self.user_ids = []
1118 self.user_ids = []
1119 self.user_group_ids = []
1119 self.user_group_ids = []
1120 self.user_repo_permission_ids = []
1120 self.user_repo_permission_ids = []
1121 self.user_group_repo_permission_ids = []
1121 self.user_group_repo_permission_ids = []
1122 self.user_repo_group_permission_ids = []
1122 self.user_repo_group_permission_ids = []
1123 self.user_group_repo_group_permission_ids = []
1123 self.user_group_repo_group_permission_ids = []
1124 self.user_user_group_permission_ids = []
1124 self.user_user_group_permission_ids = []
1125 self.user_group_user_group_permission_ids = []
1125 self.user_group_user_group_permission_ids = []
1126 self.user_permissions = []
1126 self.user_permissions = []
1127
1127
1128 def _sanitize_name(self, name):
1128 def _sanitize_name(self, name):
1129 for char in ['[', ']']:
1129 for char in ['[', ']']:
1130 name = name.replace(char, '_')
1130 name = name.replace(char, '_')
1131 return name
1131 return name
1132
1132
1133 def create_repo_group(
1133 def create_repo_group(
1134 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1134 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1135 group_name = "{prefix}_repogroup_{count}".format(
1135 group_name = "{prefix}_repogroup_{count}".format(
1136 prefix=self._test_name,
1136 prefix=self._test_name,
1137 count=len(self.repo_group_ids))
1137 count=len(self.repo_group_ids))
1138 repo_group = self.fixture.create_repo_group(
1138 repo_group = self.fixture.create_repo_group(
1139 group_name, cur_user=owner)
1139 group_name, cur_user=owner)
1140 if auto_cleanup:
1140 if auto_cleanup:
1141 self.repo_group_ids.append(repo_group.group_id)
1141 self.repo_group_ids.append(repo_group.group_id)
1142 return repo_group
1142 return repo_group
1143
1143
1144 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1144 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1145 auto_cleanup=True, repo_type='hg', bare=False):
1145 auto_cleanup=True, repo_type='hg', bare=False):
1146 repo_name = "{prefix}_repository_{count}".format(
1146 repo_name = "{prefix}_repository_{count}".format(
1147 prefix=self._test_name,
1147 prefix=self._test_name,
1148 count=len(self.repos_ids))
1148 count=len(self.repos_ids))
1149
1149
1150 repository = self.fixture.create_repo(
1150 repository = self.fixture.create_repo(
1151 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1151 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1152 if auto_cleanup:
1152 if auto_cleanup:
1153 self.repos_ids.append(repository.repo_id)
1153 self.repos_ids.append(repository.repo_id)
1154 return repository
1154 return repository
1155
1155
1156 def create_user(self, auto_cleanup=True, **kwargs):
1156 def create_user(self, auto_cleanup=True, **kwargs):
1157 user_name = "{prefix}_user_{count}".format(
1157 user_name = "{prefix}_user_{count}".format(
1158 prefix=self._test_name,
1158 prefix=self._test_name,
1159 count=len(self.user_ids))
1159 count=len(self.user_ids))
1160 user = self.fixture.create_user(user_name, **kwargs)
1160 user = self.fixture.create_user(user_name, **kwargs)
1161 if auto_cleanup:
1161 if auto_cleanup:
1162 self.user_ids.append(user.user_id)
1162 self.user_ids.append(user.user_id)
1163 return user
1163 return user
1164
1164
1165 def create_additional_user_email(self, user, email):
1165 def create_additional_user_email(self, user, email):
1166 uem = self.fixture.create_additional_user_email(user=user, email=email)
1166 uem = self.fixture.create_additional_user_email(user=user, email=email)
1167 return uem
1167 return uem
1168
1168
1169 def create_user_with_group(self):
1169 def create_user_with_group(self):
1170 user = self.create_user()
1170 user = self.create_user()
1171 user_group = self.create_user_group(members=[user])
1171 user_group = self.create_user_group(members=[user])
1172 return user, user_group
1172 return user, user_group
1173
1173
1174 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1174 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1175 auto_cleanup=True, **kwargs):
1175 auto_cleanup=True, **kwargs):
1176 group_name = "{prefix}_usergroup_{count}".format(
1176 group_name = "{prefix}_usergroup_{count}".format(
1177 prefix=self._test_name,
1177 prefix=self._test_name,
1178 count=len(self.user_group_ids))
1178 count=len(self.user_group_ids))
1179 user_group = self.fixture.create_user_group(
1179 user_group = self.fixture.create_user_group(
1180 group_name, cur_user=owner, **kwargs)
1180 group_name, cur_user=owner, **kwargs)
1181
1181
1182 if auto_cleanup:
1182 if auto_cleanup:
1183 self.user_group_ids.append(user_group.users_group_id)
1183 self.user_group_ids.append(user_group.users_group_id)
1184 if members:
1184 if members:
1185 for user in members:
1185 for user in members:
1186 UserGroupModel().add_user_to_group(user_group, user)
1186 UserGroupModel().add_user_to_group(user_group, user)
1187 return user_group
1187 return user_group
1188
1188
1189 def grant_user_permission(self, user_name, permission_name):
1189 def grant_user_permission(self, user_name, permission_name):
1190 self.inherit_default_user_permissions(user_name, False)
1190 self.inherit_default_user_permissions(user_name, False)
1191 self.user_permissions.append((user_name, permission_name))
1191 self.user_permissions.append((user_name, permission_name))
1192
1192
1193 def grant_user_permission_to_repo_group(
1193 def grant_user_permission_to_repo_group(
1194 self, repo_group, user, permission_name):
1194 self, repo_group, user, permission_name):
1195 permission = RepoGroupModel().grant_user_permission(
1195 permission = RepoGroupModel().grant_user_permission(
1196 repo_group, user, permission_name)
1196 repo_group, user, permission_name)
1197 self.user_repo_group_permission_ids.append(
1197 self.user_repo_group_permission_ids.append(
1198 (repo_group.group_id, user.user_id))
1198 (repo_group.group_id, user.user_id))
1199 return permission
1199 return permission
1200
1200
1201 def grant_user_group_permission_to_repo_group(
1201 def grant_user_group_permission_to_repo_group(
1202 self, repo_group, user_group, permission_name):
1202 self, repo_group, user_group, permission_name):
1203 permission = RepoGroupModel().grant_user_group_permission(
1203 permission = RepoGroupModel().grant_user_group_permission(
1204 repo_group, user_group, permission_name)
1204 repo_group, user_group, permission_name)
1205 self.user_group_repo_group_permission_ids.append(
1205 self.user_group_repo_group_permission_ids.append(
1206 (repo_group.group_id, user_group.users_group_id))
1206 (repo_group.group_id, user_group.users_group_id))
1207 return permission
1207 return permission
1208
1208
1209 def grant_user_permission_to_repo(
1209 def grant_user_permission_to_repo(
1210 self, repo, user, permission_name):
1210 self, repo, user, permission_name):
1211 permission = RepoModel().grant_user_permission(
1211 permission = RepoModel().grant_user_permission(
1212 repo, user, permission_name)
1212 repo, user, permission_name)
1213 self.user_repo_permission_ids.append(
1213 self.user_repo_permission_ids.append(
1214 (repo.repo_id, user.user_id))
1214 (repo.repo_id, user.user_id))
1215 return permission
1215 return permission
1216
1216
1217 def grant_user_group_permission_to_repo(
1217 def grant_user_group_permission_to_repo(
1218 self, repo, user_group, permission_name):
1218 self, repo, user_group, permission_name):
1219 permission = RepoModel().grant_user_group_permission(
1219 permission = RepoModel().grant_user_group_permission(
1220 repo, user_group, permission_name)
1220 repo, user_group, permission_name)
1221 self.user_group_repo_permission_ids.append(
1221 self.user_group_repo_permission_ids.append(
1222 (repo.repo_id, user_group.users_group_id))
1222 (repo.repo_id, user_group.users_group_id))
1223 return permission
1223 return permission
1224
1224
1225 def grant_user_permission_to_user_group(
1225 def grant_user_permission_to_user_group(
1226 self, target_user_group, user, permission_name):
1226 self, target_user_group, user, permission_name):
1227 permission = UserGroupModel().grant_user_permission(
1227 permission = UserGroupModel().grant_user_permission(
1228 target_user_group, user, permission_name)
1228 target_user_group, user, permission_name)
1229 self.user_user_group_permission_ids.append(
1229 self.user_user_group_permission_ids.append(
1230 (target_user_group.users_group_id, user.user_id))
1230 (target_user_group.users_group_id, user.user_id))
1231 return permission
1231 return permission
1232
1232
1233 def grant_user_group_permission_to_user_group(
1233 def grant_user_group_permission_to_user_group(
1234 self, target_user_group, user_group, permission_name):
1234 self, target_user_group, user_group, permission_name):
1235 permission = UserGroupModel().grant_user_group_permission(
1235 permission = UserGroupModel().grant_user_group_permission(
1236 target_user_group, user_group, permission_name)
1236 target_user_group, user_group, permission_name)
1237 self.user_group_user_group_permission_ids.append(
1237 self.user_group_user_group_permission_ids.append(
1238 (target_user_group.users_group_id, user_group.users_group_id))
1238 (target_user_group.users_group_id, user_group.users_group_id))
1239 return permission
1239 return permission
1240
1240
1241 def revoke_user_permission(self, user_name, permission_name):
1241 def revoke_user_permission(self, user_name, permission_name):
1242 self.inherit_default_user_permissions(user_name, True)
1242 self.inherit_default_user_permissions(user_name, True)
1243 UserModel().revoke_perm(user_name, permission_name)
1243 UserModel().revoke_perm(user_name, permission_name)
1244
1244
1245 def inherit_default_user_permissions(self, user_name, value):
1245 def inherit_default_user_permissions(self, user_name, value):
1246 user = UserModel().get_by_username(user_name)
1246 user = UserModel().get_by_username(user_name)
1247 user.inherit_default_permissions = value
1247 user.inherit_default_permissions = value
1248 Session().add(user)
1248 Session().add(user)
1249 Session().commit()
1249 Session().commit()
1250
1250
1251 def cleanup(self):
1251 def cleanup(self):
1252 self._cleanup_permissions()
1252 self._cleanup_permissions()
1253 self._cleanup_repos()
1253 self._cleanup_repos()
1254 self._cleanup_repo_groups()
1254 self._cleanup_repo_groups()
1255 self._cleanup_user_groups()
1255 self._cleanup_user_groups()
1256 self._cleanup_users()
1256 self._cleanup_users()
1257
1257
1258 def _cleanup_permissions(self):
1258 def _cleanup_permissions(self):
1259 if self.user_permissions:
1259 if self.user_permissions:
1260 for user_name, permission_name in self.user_permissions:
1260 for user_name, permission_name in self.user_permissions:
1261 self.revoke_user_permission(user_name, permission_name)
1261 self.revoke_user_permission(user_name, permission_name)
1262
1262
1263 for permission in self.user_repo_permission_ids:
1263 for permission in self.user_repo_permission_ids:
1264 RepoModel().revoke_user_permission(*permission)
1264 RepoModel().revoke_user_permission(*permission)
1265
1265
1266 for permission in self.user_group_repo_permission_ids:
1266 for permission in self.user_group_repo_permission_ids:
1267 RepoModel().revoke_user_group_permission(*permission)
1267 RepoModel().revoke_user_group_permission(*permission)
1268
1268
1269 for permission in self.user_repo_group_permission_ids:
1269 for permission in self.user_repo_group_permission_ids:
1270 RepoGroupModel().revoke_user_permission(*permission)
1270 RepoGroupModel().revoke_user_permission(*permission)
1271
1271
1272 for permission in self.user_group_repo_group_permission_ids:
1272 for permission in self.user_group_repo_group_permission_ids:
1273 RepoGroupModel().revoke_user_group_permission(*permission)
1273 RepoGroupModel().revoke_user_group_permission(*permission)
1274
1274
1275 for permission in self.user_user_group_permission_ids:
1275 for permission in self.user_user_group_permission_ids:
1276 UserGroupModel().revoke_user_permission(*permission)
1276 UserGroupModel().revoke_user_permission(*permission)
1277
1277
1278 for permission in self.user_group_user_group_permission_ids:
1278 for permission in self.user_group_user_group_permission_ids:
1279 UserGroupModel().revoke_user_group_permission(*permission)
1279 UserGroupModel().revoke_user_group_permission(*permission)
1280
1280
1281 def _cleanup_repo_groups(self):
1281 def _cleanup_repo_groups(self):
1282 def _repo_group_compare(first_group_id, second_group_id):
1282 def _repo_group_compare(first_group_id, second_group_id):
1283 """
1283 """
1284 Gives higher priority to the groups with the most complex paths
1284 Gives higher priority to the groups with the most complex paths
1285 """
1285 """
1286 first_group = RepoGroup.get(first_group_id)
1286 first_group = RepoGroup.get(first_group_id)
1287 second_group = RepoGroup.get(second_group_id)
1287 second_group = RepoGroup.get(second_group_id)
1288 first_group_parts = (
1288 first_group_parts = (
1289 len(first_group.group_name.split('/')) if first_group else 0)
1289 len(first_group.group_name.split('/')) if first_group else 0)
1290 second_group_parts = (
1290 second_group_parts = (
1291 len(second_group.group_name.split('/')) if second_group else 0)
1291 len(second_group.group_name.split('/')) if second_group else 0)
1292 return cmp(second_group_parts, first_group_parts)
1292 return cmp(second_group_parts, first_group_parts)
1293
1293
1294 sorted_repo_group_ids = sorted(
1294 sorted_repo_group_ids = sorted(
1295 self.repo_group_ids, key=functools.cmp_to_key(_repo_group_compare))
1295 self.repo_group_ids, key=functools.cmp_to_key(_repo_group_compare))
1296 for repo_group_id in sorted_repo_group_ids:
1296 for repo_group_id in sorted_repo_group_ids:
1297 self.fixture.destroy_repo_group(repo_group_id)
1297 self.fixture.destroy_repo_group(repo_group_id)
1298
1298
1299 def _cleanup_repos(self):
1299 def _cleanup_repos(self):
1300 sorted_repos_ids = sorted(self.repos_ids)
1300 sorted_repos_ids = sorted(self.repos_ids)
1301 for repo_id in sorted_repos_ids:
1301 for repo_id in sorted_repos_ids:
1302 self.fixture.destroy_repo(repo_id)
1302 self.fixture.destroy_repo(repo_id)
1303
1303
1304 def _cleanup_user_groups(self):
1304 def _cleanup_user_groups(self):
1305 def _user_group_compare(first_group_id, second_group_id):
1305 def _user_group_compare(first_group_id, second_group_id):
1306 """
1306 """
1307 Gives higher priority to the groups with the most complex paths
1307 Gives higher priority to the groups with the most complex paths
1308 """
1308 """
1309 first_group = UserGroup.get(first_group_id)
1309 first_group = UserGroup.get(first_group_id)
1310 second_group = UserGroup.get(second_group_id)
1310 second_group = UserGroup.get(second_group_id)
1311 first_group_parts = (
1311 first_group_parts = (
1312 len(first_group.users_group_name.split('/'))
1312 len(first_group.users_group_name.split('/'))
1313 if first_group else 0)
1313 if first_group else 0)
1314 second_group_parts = (
1314 second_group_parts = (
1315 len(second_group.users_group_name.split('/'))
1315 len(second_group.users_group_name.split('/'))
1316 if second_group else 0)
1316 if second_group else 0)
1317 return cmp(second_group_parts, first_group_parts)
1317 return cmp(second_group_parts, first_group_parts)
1318
1318
1319 sorted_user_group_ids = sorted(
1319 sorted_user_group_ids = sorted(
1320 self.user_group_ids, key=functools.cmp_to_key(_user_group_compare))
1320 self.user_group_ids, key=functools.cmp_to_key(_user_group_compare))
1321 for user_group_id in sorted_user_group_ids:
1321 for user_group_id in sorted_user_group_ids:
1322 self.fixture.destroy_user_group(user_group_id)
1322 self.fixture.destroy_user_group(user_group_id)
1323
1323
1324 def _cleanup_users(self):
1324 def _cleanup_users(self):
1325 for user_id in self.user_ids:
1325 for user_id in self.user_ids:
1326 self.fixture.destroy_user(user_id)
1326 self.fixture.destroy_user(user_id)
1327
1327
1328
1328
1329 @pytest.fixture(scope='session')
1329 @pytest.fixture(scope='session')
1330 def testrun():
1330 def testrun():
1331 return {
1331 return {
1332 'uuid': uuid.uuid4(),
1332 'uuid': uuid.uuid4(),
1333 'start': datetime.datetime.utcnow().isoformat(),
1333 'start': datetime.datetime.utcnow().isoformat(),
1334 'timestamp': int(time.time()),
1334 'timestamp': int(time.time()),
1335 }
1335 }
1336
1336
1337
1337
1338 class AppenlightClient(object):
1338 class AppenlightClient(object):
1339
1339
1340 url_template = '{url}?protocol_version=0.5'
1340 url_template = '{url}?protocol_version=0.5'
1341
1341
1342 def __init__(
1342 def __init__(
1343 self, url, api_key, add_server=True, add_timestamp=True,
1343 self, url, api_key, add_server=True, add_timestamp=True,
1344 namespace=None, request=None, testrun=None):
1344 namespace=None, request=None, testrun=None):
1345 self.url = self.url_template.format(url=url)
1345 self.url = self.url_template.format(url=url)
1346 self.api_key = api_key
1346 self.api_key = api_key
1347 self.add_server = add_server
1347 self.add_server = add_server
1348 self.add_timestamp = add_timestamp
1348 self.add_timestamp = add_timestamp
1349 self.namespace = namespace
1349 self.namespace = namespace
1350 self.request = request
1350 self.request = request
1351 self.server = socket.getfqdn(socket.gethostname())
1351 self.server = socket.getfqdn(socket.gethostname())
1352 self.tags_before = {}
1352 self.tags_before = {}
1353 self.tags_after = {}
1353 self.tags_after = {}
1354 self.stats = []
1354 self.stats = []
1355 self.testrun = testrun or {}
1355 self.testrun = testrun or {}
1356
1356
1357 def tag_before(self, tag, value):
1357 def tag_before(self, tag, value):
1358 self.tags_before[tag] = value
1358 self.tags_before[tag] = value
1359
1359
1360 def tag_after(self, tag, value):
1360 def tag_after(self, tag, value):
1361 self.tags_after[tag] = value
1361 self.tags_after[tag] = value
1362
1362
1363 def collect(self, data):
1363 def collect(self, data):
1364 if self.add_server:
1364 if self.add_server:
1365 data.setdefault('server', self.server)
1365 data.setdefault('server', self.server)
1366 if self.add_timestamp:
1366 if self.add_timestamp:
1367 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1367 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1368 if self.namespace:
1368 if self.namespace:
1369 data.setdefault('namespace', self.namespace)
1369 data.setdefault('namespace', self.namespace)
1370 if self.request:
1370 if self.request:
1371 data.setdefault('request', self.request)
1371 data.setdefault('request', self.request)
1372 self.stats.append(data)
1372 self.stats.append(data)
1373
1373
1374 def send_stats(self):
1374 def send_stats(self):
1375 tags = [
1375 tags = [
1376 ('testrun', self.request),
1376 ('testrun', self.request),
1377 ('testrun.start', self.testrun['start']),
1377 ('testrun.start', self.testrun['start']),
1378 ('testrun.timestamp', self.testrun['timestamp']),
1378 ('testrun.timestamp', self.testrun['timestamp']),
1379 ('test', self.namespace),
1379 ('test', self.namespace),
1380 ]
1380 ]
1381 for key, value in self.tags_before.items():
1381 for key, value in self.tags_before.items():
1382 tags.append((key + '.before', value))
1382 tags.append((key + '.before', value))
1383 try:
1383 try:
1384 delta = self.tags_after[key] - value
1384 delta = self.tags_after[key] - value
1385 tags.append((key + '.delta', delta))
1385 tags.append((key + '.delta', delta))
1386 except Exception:
1386 except Exception:
1387 pass
1387 pass
1388 for key, value in self.tags_after.items():
1388 for key, value in self.tags_after.items():
1389 tags.append((key + '.after', value))
1389 tags.append((key + '.after', value))
1390 self.collect({
1390 self.collect({
1391 'message': "Collected tags",
1391 'message': "Collected tags",
1392 'tags': tags,
1392 'tags': tags,
1393 })
1393 })
1394
1394
1395 response = requests.post(
1395 response = requests.post(
1396 self.url,
1396 self.url,
1397 headers={
1397 headers={
1398 'X-appenlight-api-key': self.api_key},
1398 'X-appenlight-api-key': self.api_key},
1399 json=self.stats,
1399 json=self.stats,
1400 )
1400 )
1401
1401
1402 if not response.status_code == 200:
1402 if not response.status_code == 200:
1403 pprint.pprint(self.stats)
1403 pprint.pprint(self.stats)
1404 print(response.headers)
1404 print(response.headers)
1405 print(response.text)
1405 print(response.text)
1406 raise Exception('Sending to appenlight failed')
1406 raise Exception('Sending to appenlight failed')
1407
1407
1408
1408
1409 @pytest.fixture()
1409 @pytest.fixture()
1410 def gist_util(request, db_connection):
1410 def gist_util(request, db_connection):
1411 """
1411 """
1412 Provides a wired instance of `GistUtility` with integrated cleanup.
1412 Provides a wired instance of `GistUtility` with integrated cleanup.
1413 """
1413 """
1414 utility = GistUtility()
1414 utility = GistUtility()
1415 request.addfinalizer(utility.cleanup)
1415 request.addfinalizer(utility.cleanup)
1416 return utility
1416 return utility
1417
1417
1418
1418
1419 class GistUtility(object):
1419 class GistUtility(object):
1420 def __init__(self):
1420 def __init__(self):
1421 self.fixture = Fixture()
1421 self.fixture = Fixture()
1422 self.gist_ids = []
1422 self.gist_ids = []
1423
1423
1424 def create_gist(self, **kwargs):
1424 def create_gist(self, **kwargs):
1425 gist = self.fixture.create_gist(**kwargs)
1425 gist = self.fixture.create_gist(**kwargs)
1426 self.gist_ids.append(gist.gist_id)
1426 self.gist_ids.append(gist.gist_id)
1427 return gist
1427 return gist
1428
1428
1429 def cleanup(self):
1429 def cleanup(self):
1430 for id_ in self.gist_ids:
1430 for id_ in self.gist_ids:
1431 self.fixture.destroy_gists(str(id_))
1431 self.fixture.destroy_gists(str(id_))
1432
1432
1433
1433
1434 @pytest.fixture()
1434 @pytest.fixture()
1435 def enabled_backends(request):
1435 def enabled_backends(request):
1436 backends = request.config.option.backends
1436 backends = request.config.option.backends
1437 return backends[:]
1437 return backends[:]
1438
1438
1439
1439
1440 @pytest.fixture()
1440 @pytest.fixture()
1441 def settings_util(request, db_connection):
1441 def settings_util(request, db_connection):
1442 """
1442 """
1443 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1443 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1444 """
1444 """
1445 utility = SettingsUtility()
1445 utility = SettingsUtility()
1446 request.addfinalizer(utility.cleanup)
1446 request.addfinalizer(utility.cleanup)
1447 return utility
1447 return utility
1448
1448
1449
1449
1450 class SettingsUtility(object):
1450 class SettingsUtility(object):
1451 def __init__(self):
1451 def __init__(self):
1452 self.rhodecode_ui_ids = []
1452 self.rhodecode_ui_ids = []
1453 self.rhodecode_setting_ids = []
1453 self.rhodecode_setting_ids = []
1454 self.repo_rhodecode_ui_ids = []
1454 self.repo_rhodecode_ui_ids = []
1455 self.repo_rhodecode_setting_ids = []
1455 self.repo_rhodecode_setting_ids = []
1456
1456
1457 def create_repo_rhodecode_ui(
1457 def create_repo_rhodecode_ui(
1458 self, repo, section, value, key=None, active=True, cleanup=True):
1458 self, repo, section, value, key=None, active=True, cleanup=True):
1459 key = key or sha1_safe(f'{section}{value}{repo.repo_id}')
1459 key = key or sha1_safe(f'{section}{value}{repo.repo_id}')
1460
1460
1461 setting = RepoRhodeCodeUi()
1461 setting = RepoRhodeCodeUi()
1462 setting.repository_id = repo.repo_id
1462 setting.repository_id = repo.repo_id
1463 setting.ui_section = section
1463 setting.ui_section = section
1464 setting.ui_value = value
1464 setting.ui_value = value
1465 setting.ui_key = key
1465 setting.ui_key = key
1466 setting.ui_active = active
1466 setting.ui_active = active
1467 Session().add(setting)
1467 Session().add(setting)
1468 Session().commit()
1468 Session().commit()
1469
1469
1470 if cleanup:
1470 if cleanup:
1471 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1471 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1472 return setting
1472 return setting
1473
1473
1474 def create_rhodecode_ui(
1474 def create_rhodecode_ui(
1475 self, section, value, key=None, active=True, cleanup=True):
1475 self, section, value, key=None, active=True, cleanup=True):
1476 key = key or sha1_safe(f'{section}{value}')
1476 key = key or sha1_safe(f'{section}{value}')
1477
1477
1478 setting = RhodeCodeUi()
1478 setting = RhodeCodeUi()
1479 setting.ui_section = section
1479 setting.ui_section = section
1480 setting.ui_value = value
1480 setting.ui_value = value
1481 setting.ui_key = key
1481 setting.ui_key = key
1482 setting.ui_active = active
1482 setting.ui_active = active
1483 Session().add(setting)
1483 Session().add(setting)
1484 Session().commit()
1484 Session().commit()
1485
1485
1486 if cleanup:
1486 if cleanup:
1487 self.rhodecode_ui_ids.append(setting.ui_id)
1487 self.rhodecode_ui_ids.append(setting.ui_id)
1488 return setting
1488 return setting
1489
1489
1490 def create_repo_rhodecode_setting(
1490 def create_repo_rhodecode_setting(
1491 self, repo, name, value, type_, cleanup=True):
1491 self, repo, name, value, type_, cleanup=True):
1492 setting = RepoRhodeCodeSetting(
1492 setting = RepoRhodeCodeSetting(
1493 repo.repo_id, key=name, val=value, type=type_)
1493 repo.repo_id, key=name, val=value, type=type_)
1494 Session().add(setting)
1494 Session().add(setting)
1495 Session().commit()
1495 Session().commit()
1496
1496
1497 if cleanup:
1497 if cleanup:
1498 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1498 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1499 return setting
1499 return setting
1500
1500
1501 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1501 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1502 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1502 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1503 Session().add(setting)
1503 Session().add(setting)
1504 Session().commit()
1504 Session().commit()
1505
1505
1506 if cleanup:
1506 if cleanup:
1507 self.rhodecode_setting_ids.append(setting.app_settings_id)
1507 self.rhodecode_setting_ids.append(setting.app_settings_id)
1508
1508
1509 return setting
1509 return setting
1510
1510
1511 def cleanup(self):
1511 def cleanup(self):
1512 for id_ in self.rhodecode_ui_ids:
1512 for id_ in self.rhodecode_ui_ids:
1513 setting = RhodeCodeUi.get(id_)
1513 setting = RhodeCodeUi.get(id_)
1514 Session().delete(setting)
1514 Session().delete(setting)
1515
1515
1516 for id_ in self.rhodecode_setting_ids:
1516 for id_ in self.rhodecode_setting_ids:
1517 setting = RhodeCodeSetting.get(id_)
1517 setting = RhodeCodeSetting.get(id_)
1518 Session().delete(setting)
1518 Session().delete(setting)
1519
1519
1520 for id_ in self.repo_rhodecode_ui_ids:
1520 for id_ in self.repo_rhodecode_ui_ids:
1521 setting = RepoRhodeCodeUi.get(id_)
1521 setting = RepoRhodeCodeUi.get(id_)
1522 Session().delete(setting)
1522 Session().delete(setting)
1523
1523
1524 for id_ in self.repo_rhodecode_setting_ids:
1524 for id_ in self.repo_rhodecode_setting_ids:
1525 setting = RepoRhodeCodeSetting.get(id_)
1525 setting = RepoRhodeCodeSetting.get(id_)
1526 Session().delete(setting)
1526 Session().delete(setting)
1527
1527
1528 Session().commit()
1528 Session().commit()
1529
1529
1530
1530
1531 @pytest.fixture()
1531 @pytest.fixture()
1532 def no_notifications(request):
1532 def no_notifications(request):
1533 notification_patcher = mock.patch(
1533 notification_patcher = mock.patch(
1534 'rhodecode.model.notification.NotificationModel.create')
1534 'rhodecode.model.notification.NotificationModel.create')
1535 notification_patcher.start()
1535 notification_patcher.start()
1536 request.addfinalizer(notification_patcher.stop)
1536 request.addfinalizer(notification_patcher.stop)
1537
1537
1538
1538
1539 @pytest.fixture(scope='session')
1539 @pytest.fixture(scope='session')
1540 def repeat(request):
1540 def repeat(request):
1541 """
1541 """
1542 The number of repetitions is based on this fixture.
1542 The number of repetitions is based on this fixture.
1543
1543
1544 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1544 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1545 tests are not too slow in our default test suite.
1545 tests are not too slow in our default test suite.
1546 """
1546 """
1547 return request.config.getoption('--repeat')
1547 return request.config.getoption('--repeat')
1548
1548
1549
1549
1550 @pytest.fixture()
1550 @pytest.fixture()
1551 def rhodecode_fixtures():
1551 def rhodecode_fixtures():
1552 return Fixture()
1552 return Fixture()
1553
1553
1554
1554
1555 @pytest.fixture()
1555 @pytest.fixture()
1556 def context_stub():
1556 def context_stub():
1557 """
1557 """
1558 Stub context object.
1558 Stub context object.
1559 """
1559 """
1560 context = pyramid.testing.DummyResource()
1560 context = pyramid.testing.DummyResource()
1561 return context
1561 return context
1562
1562
1563
1563
1564 @pytest.fixture()
1564 @pytest.fixture()
1565 def request_stub():
1565 def request_stub():
1566 """
1566 """
1567 Stub request object.
1567 Stub request object.
1568 """
1568 """
1569 from rhodecode.lib.base import bootstrap_request
1569 from rhodecode.lib.base import bootstrap_request
1570 request = bootstrap_request(scheme='https')
1570 request = bootstrap_request(scheme='https')
1571 return request
1571 return request
1572
1572
1573
1573
1574 @pytest.fixture()
1574 @pytest.fixture()
1575 def config_stub(request, request_stub):
1575 def config_stub(request, request_stub):
1576 """
1576 """
1577 Set up pyramid.testing and return the Configurator.
1577 Set up pyramid.testing and return the Configurator.
1578 """
1578 """
1579 from rhodecode.lib.base import bootstrap_config
1579 from rhodecode.lib.base import bootstrap_config
1580 config = bootstrap_config(request=request_stub)
1580 config = bootstrap_config(request=request_stub)
1581
1581
1582 @request.addfinalizer
1582 @request.addfinalizer
1583 def cleanup():
1583 def cleanup():
1584 pyramid.testing.tearDown()
1584 pyramid.testing.tearDown()
1585
1585
1586 return config
1586 return config
1587
1587
1588
1588
1589 @pytest.fixture()
1589 @pytest.fixture()
1590 def StubIntegrationType():
1590 def StubIntegrationType():
1591 class _StubIntegrationType(IntegrationTypeBase):
1591 class _StubIntegrationType(IntegrationTypeBase):
1592 """ Test integration type class """
1592 """ Test integration type class """
1593
1593
1594 key = 'test'
1594 key = 'test'
1595 display_name = 'Test integration type'
1595 display_name = 'Test integration type'
1596 description = 'A test integration type for testing'
1596 description = 'A test integration type for testing'
1597
1597
1598 @classmethod
1598 @classmethod
1599 def icon(cls):
1599 def icon(cls):
1600 return 'test_icon_html_image'
1600 return 'test_icon_html_image'
1601
1601
1602 def __init__(self, settings):
1602 def __init__(self, settings):
1603 super(_StubIntegrationType, self).__init__(settings)
1603 super(_StubIntegrationType, self).__init__(settings)
1604 self.sent_events = [] # for testing
1604 self.sent_events = [] # for testing
1605
1605
1606 def send_event(self, event):
1606 def send_event(self, event):
1607 self.sent_events.append(event)
1607 self.sent_events.append(event)
1608
1608
1609 def settings_schema(self):
1609 def settings_schema(self):
1610 class SettingsSchema(colander.Schema):
1610 class SettingsSchema(colander.Schema):
1611 test_string_field = colander.SchemaNode(
1611 test_string_field = colander.SchemaNode(
1612 colander.String(),
1612 colander.String(),
1613 missing=colander.required,
1613 missing=colander.required,
1614 title='test string field',
1614 title='test string field',
1615 )
1615 )
1616 test_int_field = colander.SchemaNode(
1616 test_int_field = colander.SchemaNode(
1617 colander.Int(),
1617 colander.Int(),
1618 title='some integer setting',
1618 title='some integer setting',
1619 )
1619 )
1620 return SettingsSchema()
1620 return SettingsSchema()
1621
1621
1622
1622
1623 integration_type_registry.register_integration_type(_StubIntegrationType)
1623 integration_type_registry.register_integration_type(_StubIntegrationType)
1624 return _StubIntegrationType
1624 return _StubIntegrationType
1625
1625
1626
1626
1627 @pytest.fixture()
1627 @pytest.fixture()
1628 def stub_integration_settings():
1628 def stub_integration_settings():
1629 return {
1629 return {
1630 'test_string_field': 'some data',
1630 'test_string_field': 'some data',
1631 'test_int_field': 100,
1631 'test_int_field': 100,
1632 }
1632 }
1633
1633
1634
1634
1635 @pytest.fixture()
1635 @pytest.fixture()
1636 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1636 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1637 stub_integration_settings):
1637 stub_integration_settings):
1638 integration = IntegrationModel().create(
1638 integration = IntegrationModel().create(
1639 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1639 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1640 name='test repo integration',
1640 name='test repo integration',
1641 repo=repo_stub, repo_group=None, child_repos_only=None)
1641 repo=repo_stub, repo_group=None, child_repos_only=None)
1642
1642
1643 @request.addfinalizer
1643 @request.addfinalizer
1644 def cleanup():
1644 def cleanup():
1645 IntegrationModel().delete(integration)
1645 IntegrationModel().delete(integration)
1646
1646
1647 return integration
1647 return integration
1648
1648
1649
1649
1650 @pytest.fixture()
1650 @pytest.fixture()
1651 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1651 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1652 stub_integration_settings):
1652 stub_integration_settings):
1653 integration = IntegrationModel().create(
1653 integration = IntegrationModel().create(
1654 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1654 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1655 name='test repogroup integration',
1655 name='test repogroup integration',
1656 repo=None, repo_group=test_repo_group, child_repos_only=True)
1656 repo=None, repo_group=test_repo_group, child_repos_only=True)
1657
1657
1658 @request.addfinalizer
1658 @request.addfinalizer
1659 def cleanup():
1659 def cleanup():
1660 IntegrationModel().delete(integration)
1660 IntegrationModel().delete(integration)
1661
1661
1662 return integration
1662 return integration
1663
1663
1664
1664
1665 @pytest.fixture()
1665 @pytest.fixture()
1666 def repogroup_recursive_integration_stub(request, test_repo_group,
1666 def repogroup_recursive_integration_stub(request, test_repo_group,
1667 StubIntegrationType, stub_integration_settings):
1667 StubIntegrationType, stub_integration_settings):
1668 integration = IntegrationModel().create(
1668 integration = IntegrationModel().create(
1669 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1669 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1670 name='test recursive repogroup integration',
1670 name='test recursive repogroup integration',
1671 repo=None, repo_group=test_repo_group, child_repos_only=False)
1671 repo=None, repo_group=test_repo_group, child_repos_only=False)
1672
1672
1673 @request.addfinalizer
1673 @request.addfinalizer
1674 def cleanup():
1674 def cleanup():
1675 IntegrationModel().delete(integration)
1675 IntegrationModel().delete(integration)
1676
1676
1677 return integration
1677 return integration
1678
1678
1679
1679
1680 @pytest.fixture()
1680 @pytest.fixture()
1681 def global_integration_stub(request, StubIntegrationType,
1681 def global_integration_stub(request, StubIntegrationType,
1682 stub_integration_settings):
1682 stub_integration_settings):
1683 integration = IntegrationModel().create(
1683 integration = IntegrationModel().create(
1684 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1684 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1685 name='test global integration',
1685 name='test global integration',
1686 repo=None, repo_group=None, child_repos_only=None)
1686 repo=None, repo_group=None, child_repos_only=None)
1687
1687
1688 @request.addfinalizer
1688 @request.addfinalizer
1689 def cleanup():
1689 def cleanup():
1690 IntegrationModel().delete(integration)
1690 IntegrationModel().delete(integration)
1691
1691
1692 return integration
1692 return integration
1693
1693
1694
1694
1695 @pytest.fixture()
1695 @pytest.fixture()
1696 def root_repos_integration_stub(request, StubIntegrationType,
1696 def root_repos_integration_stub(request, StubIntegrationType,
1697 stub_integration_settings):
1697 stub_integration_settings):
1698 integration = IntegrationModel().create(
1698 integration = IntegrationModel().create(
1699 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1699 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1700 name='test global integration',
1700 name='test global integration',
1701 repo=None, repo_group=None, child_repos_only=True)
1701 repo=None, repo_group=None, child_repos_only=True)
1702
1702
1703 @request.addfinalizer
1703 @request.addfinalizer
1704 def cleanup():
1704 def cleanup():
1705 IntegrationModel().delete(integration)
1705 IntegrationModel().delete(integration)
1706
1706
1707 return integration
1707 return integration
1708
1708
1709
1709
1710 @pytest.fixture()
1710 @pytest.fixture()
1711 def local_dt_to_utc():
1711 def local_dt_to_utc():
1712 def _factory(dt):
1712 def _factory(dt):
1713 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1713 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1714 dateutil.tz.tzutc()).replace(tzinfo=None)
1714 dateutil.tz.tzutc()).replace(tzinfo=None)
1715 return _factory
1715 return _factory
1716
1716
1717
1717
1718 @pytest.fixture()
1718 @pytest.fixture()
1719 def disable_anonymous_user(request, baseapp):
1719 def disable_anonymous_user(request, baseapp):
1720 set_anonymous_access(False)
1720 set_anonymous_access(False)
1721
1721
1722 @request.addfinalizer
1722 @request.addfinalizer
1723 def cleanup():
1723 def cleanup():
1724 set_anonymous_access(True)
1724 set_anonymous_access(True)
1725
1725
1726
1726
1727 @pytest.fixture(scope='module')
1727 @pytest.fixture(scope='module')
1728 def rc_fixture(request):
1728 def rc_fixture(request):
1729 return Fixture()
1729 return Fixture()
1730
1730
1731
1731
1732 @pytest.fixture()
1732 @pytest.fixture()
1733 def repo_groups(request):
1733 def repo_groups(request):
1734 fixture = Fixture()
1734 fixture = Fixture()
1735
1735
1736 session = Session()
1736 session = Session()
1737 zombie_group = fixture.create_repo_group('zombie')
1737 zombie_group = fixture.create_repo_group('zombie')
1738 parent_group = fixture.create_repo_group('parent')
1738 parent_group = fixture.create_repo_group('parent')
1739 child_group = fixture.create_repo_group('parent/child')
1739 child_group = fixture.create_repo_group('parent/child')
1740 groups_in_db = session.query(RepoGroup).all()
1740 groups_in_db = session.query(RepoGroup).all()
1741 assert len(groups_in_db) == 3
1741 assert len(groups_in_db) == 3
1742 assert child_group.group_parent_id == parent_group.group_id
1742 assert child_group.group_parent_id == parent_group.group_id
1743
1743
1744 @request.addfinalizer
1744 @request.addfinalizer
1745 def cleanup():
1745 def cleanup():
1746 fixture.destroy_repo_group(zombie_group)
1746 fixture.destroy_repo_group(zombie_group)
1747 fixture.destroy_repo_group(child_group)
1747 fixture.destroy_repo_group(child_group)
1748 fixture.destroy_repo_group(parent_group)
1748 fixture.destroy_repo_group(parent_group)
1749
1749
1750 return zombie_group, parent_group, child_group
1750 return zombie_group, parent_group, child_group
@@ -1,363 +1,364 b''
1
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
15 #
16 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
19
20 import logging
20 import logging
21 import io
21 import io
22
22
23 import mock
23 import mock
24 import msgpack
24 import msgpack
25 import pytest
25 import pytest
26 import tempfile
26 import tempfile
27
27
28 from rhodecode.lib.hook_daemon import http_hooks_deamon
28 from rhodecode.lib.hook_daemon import http_hooks_deamon
29 from rhodecode.lib.hook_daemon import celery_hooks_deamon
29 from rhodecode.lib.hook_daemon import celery_hooks_deamon
30 from rhodecode.lib.hook_daemon import hook_module
30 from rhodecode.lib.hook_daemon import hook_module
31 from rhodecode.lib.hook_daemon import base as hook_base
31 from rhodecode.lib.hook_daemon import base as hook_base
32 from rhodecode.lib.str_utils import safe_bytes
32 from rhodecode.lib.str_utils import safe_bytes
33 from rhodecode.tests.utils import assert_message_in_log
33 from rhodecode.tests.utils import assert_message_in_log
34 from rhodecode.lib.ext_json import json
34 from rhodecode.lib.ext_json import json
35
35
36 test_proto = http_hooks_deamon.HooksHttpHandler.MSGPACK_HOOKS_PROTO
36 test_proto = http_hooks_deamon.HooksHttpHandler.MSGPACK_HOOKS_PROTO
37
37
38
38
39 class TestHooks(object):
39 class TestHooks(object):
40 def test_hooks_can_be_used_as_a_context_processor(self):
40 def test_hooks_can_be_used_as_a_context_processor(self):
41 hooks = hook_module.Hooks()
41 hooks = hook_module.Hooks()
42 with hooks as return_value:
42 with hooks as return_value:
43 pass
43 pass
44 assert hooks == return_value
44 assert hooks == return_value
45
45
46
46
47 class TestHooksHttpHandler(object):
47 class TestHooksHttpHandler(object):
48 def test_read_request_parses_method_name_and_arguments(self):
48 def test_read_request_parses_method_name_and_arguments(self):
49 data = {
49 data = {
50 'method': 'test',
50 'method': 'test',
51 'extras': {
51 'extras': {
52 'param1': 1,
52 'param1': 1,
53 'param2': 'a'
53 'param2': 'a'
54 }
54 }
55 }
55 }
56 request = self._generate_post_request(data)
56 request = self._generate_post_request(data)
57 hooks_patcher = mock.patch.object(
57 hooks_patcher = mock.patch.object(
58 hook_module.Hooks, data['method'], create=True, return_value=1)
58 hook_module.Hooks, data['method'], create=True, return_value=1)
59
59
60 with hooks_patcher as hooks_mock:
60 with hooks_patcher as hooks_mock:
61 handler = http_hooks_deamon.HooksHttpHandler
61 handler = http_hooks_deamon.HooksHttpHandler
62 handler.DEFAULT_HOOKS_PROTO = test_proto
62 handler.DEFAULT_HOOKS_PROTO = test_proto
63 handler.wbufsize = 10240
63 handler.wbufsize = 10240
64 MockServer(handler, request)
64 MockServer(handler, request)
65
65
66 hooks_mock.assert_called_once_with(data['extras'])
66 hooks_mock.assert_called_once_with(data['extras'])
67
67
68 def test_hooks_serialized_result_is_returned(self):
68 def test_hooks_serialized_result_is_returned(self):
69 request = self._generate_post_request({})
69 request = self._generate_post_request({})
70 rpc_method = 'test'
70 rpc_method = 'test'
71 hook_result = {
71 hook_result = {
72 'first': 'one',
72 'first': 'one',
73 'second': 2
73 'second': 2
74 }
74 }
75 extras = {}
75 extras = {}
76
76
77 # patching our _read to return test method and proto used
77 # patching our _read to return test method and proto used
78 read_patcher = mock.patch.object(
78 read_patcher = mock.patch.object(
79 http_hooks_deamon.HooksHttpHandler, '_read_request',
79 http_hooks_deamon.HooksHttpHandler, '_read_request',
80 return_value=(test_proto, rpc_method, extras))
80 return_value=(test_proto, rpc_method, extras))
81
81
82 # patch Hooks instance to return hook_result data on 'test' call
82 # patch Hooks instance to return hook_result data on 'test' call
83 hooks_patcher = mock.patch.object(
83 hooks_patcher = mock.patch.object(
84 hook_module.Hooks, rpc_method, create=True,
84 hook_module.Hooks, rpc_method, create=True,
85 return_value=hook_result)
85 return_value=hook_result)
86
86
87 with read_patcher, hooks_patcher:
87 with read_patcher, hooks_patcher:
88 handler = http_hooks_deamon.HooksHttpHandler
88 handler = http_hooks_deamon.HooksHttpHandler
89 handler.DEFAULT_HOOKS_PROTO = test_proto
89 handler.DEFAULT_HOOKS_PROTO = test_proto
90 handler.wbufsize = 10240
90 handler.wbufsize = 10240
91 server = MockServer(handler, request)
91 server = MockServer(handler, request)
92
92
93 expected_result = http_hooks_deamon.HooksHttpHandler.serialize_data(hook_result)
93 expected_result = http_hooks_deamon.HooksHttpHandler.serialize_data(hook_result)
94
94
95 server.request.output_stream.seek(0)
95 server.request.output_stream.seek(0)
96 assert server.request.output_stream.readlines()[-1] == expected_result
96 assert server.request.output_stream.readlines()[-1] == expected_result
97
97
98 def test_exception_is_returned_in_response(self):
98 def test_exception_is_returned_in_response(self):
99 request = self._generate_post_request({})
99 request = self._generate_post_request({})
100 rpc_method = 'test'
100 rpc_method = 'test'
101
101
102 read_patcher = mock.patch.object(
102 read_patcher = mock.patch.object(
103 http_hooks_deamon.HooksHttpHandler, '_read_request',
103 http_hooks_deamon.HooksHttpHandler, '_read_request',
104 return_value=(test_proto, rpc_method, {}))
104 return_value=(test_proto, rpc_method, {}))
105
105
106 hooks_patcher = mock.patch.object(
106 hooks_patcher = mock.patch.object(
107 hook_module.Hooks, rpc_method, create=True,
107 hook_module.Hooks, rpc_method, create=True,
108 side_effect=Exception('Test exception'))
108 side_effect=Exception('Test exception'))
109
109
110 with read_patcher, hooks_patcher:
110 with read_patcher, hooks_patcher:
111 handler = http_hooks_deamon.HooksHttpHandler
111 handler = http_hooks_deamon.HooksHttpHandler
112 handler.DEFAULT_HOOKS_PROTO = test_proto
112 handler.DEFAULT_HOOKS_PROTO = test_proto
113 handler.wbufsize = 10240
113 handler.wbufsize = 10240
114 server = MockServer(handler, request)
114 server = MockServer(handler, request)
115
115
116 server.request.output_stream.seek(0)
116 server.request.output_stream.seek(0)
117 data = server.request.output_stream.readlines()
117 data = server.request.output_stream.readlines()
118 msgpack_data = b''.join(data[5:])
118 msgpack_data = b''.join(data[5:])
119 org_exc = http_hooks_deamon.HooksHttpHandler.deserialize_data(msgpack_data)
119 org_exc = http_hooks_deamon.HooksHttpHandler.deserialize_data(msgpack_data)
120 expected_result = {
120 expected_result = {
121 'exception': 'Exception',
121 'exception': 'Exception',
122 'exception_traceback': org_exc['exception_traceback'],
122 'exception_traceback': org_exc['exception_traceback'],
123 'exception_args': ['Test exception']
123 'exception_args': ['Test exception']
124 }
124 }
125 assert org_exc == expected_result
125 assert org_exc == expected_result
126
126
127 def test_log_message_writes_to_debug_log(self, caplog):
127 def test_log_message_writes_to_debug_log(self, caplog):
128 ip_port = ('0.0.0.0', 8888)
128 ip_port = ('0.0.0.0', 8888)
129 handler = http_hooks_deamon.HooksHttpHandler(MockRequest('POST /'), ip_port, mock.Mock())
129 handler = http_hooks_deamon.HooksHttpHandler(MockRequest('POST /'), ip_port, mock.Mock())
130 fake_date = '1/Nov/2015 00:00:00'
130 fake_date = '1/Nov/2015 00:00:00'
131 date_patcher = mock.patch.object(
131 date_patcher = mock.patch.object(
132 handler, 'log_date_time_string', return_value=fake_date)
132 handler, 'log_date_time_string', return_value=fake_date)
133
133
134 with date_patcher, caplog.at_level(logging.DEBUG):
134 with date_patcher, caplog.at_level(logging.DEBUG):
135 handler.log_message('Some message %d, %s', 123, 'string')
135 handler.log_message('Some message %d, %s', 123, 'string')
136
136
137 expected_message = f"HOOKS: client={ip_port} - - [{fake_date}] Some message 123, string"
137 expected_message = f"HOOKS: client={ip_port} - - [{fake_date}] Some message 123, string"
138
138
139 assert_message_in_log(
139 assert_message_in_log(
140 caplog.records, expected_message,
140 caplog.records, expected_message,
141 levelno=logging.DEBUG, module='http_hooks_deamon')
141 levelno=logging.DEBUG, module='http_hooks_deamon')
142
142
143 def _generate_post_request(self, data, proto=test_proto):
143 def _generate_post_request(self, data, proto=test_proto):
144 if proto == http_hooks_deamon.HooksHttpHandler.MSGPACK_HOOKS_PROTO:
144 if proto == http_hooks_deamon.HooksHttpHandler.MSGPACK_HOOKS_PROTO:
145 payload = msgpack.packb(data)
145 payload = msgpack.packb(data)
146 else:
146 else:
147 payload = json.dumps(data)
147 payload = json.dumps(data)
148
148
149 return b'POST / HTTP/1.0\nContent-Length: %d\n\n%b' % (
149 return b'POST / HTTP/1.0\nContent-Length: %d\n\n%b' % (
150 len(payload), payload)
150 len(payload), payload)
151
151
152
152
153 class ThreadedHookCallbackDaemon(object):
153 class ThreadedHookCallbackDaemon(object):
154 def test_constructor_calls_prepare(self):
154 def test_constructor_calls_prepare(self):
155 prepare_daemon_patcher = mock.patch.object(
155 prepare_daemon_patcher = mock.patch.object(
156 http_hooks_deamon.ThreadedHookCallbackDaemon, '_prepare')
156 http_hooks_deamon.ThreadedHookCallbackDaemon, '_prepare')
157 with prepare_daemon_patcher as prepare_daemon_mock:
157 with prepare_daemon_patcher as prepare_daemon_mock:
158 http_hooks_deamon.ThreadedHookCallbackDaemon()
158 http_hooks_deamon.ThreadedHookCallbackDaemon()
159 prepare_daemon_mock.assert_called_once_with()
159 prepare_daemon_mock.assert_called_once_with()
160
160
161 def test_run_is_called_on_context_start(self):
161 def test_run_is_called_on_context_start(self):
162 patchers = mock.patch.multiple(
162 patchers = mock.patch.multiple(
163 http_hooks_deamon.ThreadedHookCallbackDaemon,
163 http_hooks_deamon.ThreadedHookCallbackDaemon,
164 _run=mock.DEFAULT, _prepare=mock.DEFAULT, __exit__=mock.DEFAULT)
164 _run=mock.DEFAULT, _prepare=mock.DEFAULT, __exit__=mock.DEFAULT)
165
165
166 with patchers as mocks:
166 with patchers as mocks:
167 daemon = http_hooks_deamon.ThreadedHookCallbackDaemon()
167 daemon = http_hooks_deamon.ThreadedHookCallbackDaemon()
168 with daemon as daemon_context:
168 with daemon as daemon_context:
169 pass
169 pass
170 mocks['_run'].assert_called_once_with()
170 mocks['_run'].assert_called_once_with()
171 assert daemon_context == daemon
171 assert daemon_context == daemon
172
172
173 def test_stop_is_called_on_context_exit(self):
173 def test_stop_is_called_on_context_exit(self):
174 patchers = mock.patch.multiple(
174 patchers = mock.patch.multiple(
175 http_hooks_deamon.ThreadedHookCallbackDaemon,
175 http_hooks_deamon.ThreadedHookCallbackDaemon,
176 _run=mock.DEFAULT, _prepare=mock.DEFAULT, _stop=mock.DEFAULT)
176 _run=mock.DEFAULT, _prepare=mock.DEFAULT, _stop=mock.DEFAULT)
177
177
178 with patchers as mocks:
178 with patchers as mocks:
179 daemon = http_hooks_deamon.ThreadedHookCallbackDaemon()
179 daemon = http_hooks_deamon.ThreadedHookCallbackDaemon()
180 with daemon as daemon_context:
180 with daemon as daemon_context:
181 assert mocks['_stop'].call_count == 0
181 assert mocks['_stop'].call_count == 0
182
182
183 mocks['_stop'].assert_called_once_with()
183 mocks['_stop'].assert_called_once_with()
184 assert daemon_context == daemon
184 assert daemon_context == daemon
185
185
186
186
187 class TestHttpHooksCallbackDaemon(object):
187 class TestHttpHooksCallbackDaemon(object):
188 def test_hooks_callback_generates_new_port(self, caplog):
188 def test_hooks_callback_generates_new_port(self, caplog):
189 with caplog.at_level(logging.DEBUG):
189 with caplog.at_level(logging.DEBUG):
190 daemon = http_hooks_deamon.HttpHooksCallbackDaemon(host='127.0.0.1', port=8881)
190 daemon = http_hooks_deamon.HttpHooksCallbackDaemon(host='127.0.0.1', port=8881)
191 assert daemon._daemon.server_address == ('127.0.0.1', 8881)
191 assert daemon._daemon.server_address == ('127.0.0.1', 8881)
192
192
193 with caplog.at_level(logging.DEBUG):
193 with caplog.at_level(logging.DEBUG):
194 daemon = http_hooks_deamon.HttpHooksCallbackDaemon(host=None, port=None)
194 daemon = http_hooks_deamon.HttpHooksCallbackDaemon(host=None, port=None)
195 assert daemon._daemon.server_address[1] in range(0, 66000)
195 assert daemon._daemon.server_address[1] in range(0, 66000)
196 assert daemon._daemon.server_address[0] != '127.0.0.1'
196 assert daemon._daemon.server_address[0] != '127.0.0.1'
197
197
198 def test_prepare_inits_daemon_variable(self, tcp_server, caplog):
198 def test_prepare_inits_daemon_variable(self, tcp_server, caplog):
199 with self._tcp_patcher(tcp_server), caplog.at_level(logging.DEBUG):
199 with self._tcp_patcher(tcp_server), caplog.at_level(logging.DEBUG):
200 daemon = http_hooks_deamon.HttpHooksCallbackDaemon(host='127.0.0.1', port=8881)
200 daemon = http_hooks_deamon.HttpHooksCallbackDaemon(host='127.0.0.1', port=8881)
201 assert daemon._daemon == tcp_server
201 assert daemon._daemon == tcp_server
202
202
203 _, port = tcp_server.server_address
203 _, port = tcp_server.server_address
204
204
205 msg = f"HOOKS: 127.0.0.1:{port} Preparing HTTP callback daemon registering " \
205 msg = f"HOOKS: 127.0.0.1:{port} Preparing HTTP callback daemon registering " \
206 f"hook object: <class 'rhodecode.lib.hook_daemon.http_hooks_deamon.HooksHttpHandler'>"
206 f"hook object: <class 'rhodecode.lib.hook_daemon.http_hooks_deamon.HooksHttpHandler'>"
207 assert_message_in_log(
207 assert_message_in_log(
208 caplog.records, msg, levelno=logging.DEBUG, module='http_hooks_deamon')
208 caplog.records, msg, levelno=logging.DEBUG, module='http_hooks_deamon')
209
209
210 def test_prepare_inits_hooks_uri_and_logs_it(
210 def test_prepare_inits_hooks_uri_and_logs_it(
211 self, tcp_server, caplog):
211 self, tcp_server, caplog):
212 with self._tcp_patcher(tcp_server), caplog.at_level(logging.DEBUG):
212 with self._tcp_patcher(tcp_server), caplog.at_level(logging.DEBUG):
213 daemon = http_hooks_deamon.HttpHooksCallbackDaemon(host='127.0.0.1', port=8881)
213 daemon = http_hooks_deamon.HttpHooksCallbackDaemon(host='127.0.0.1', port=8881)
214
214
215 _, port = tcp_server.server_address
215 _, port = tcp_server.server_address
216 expected_uri = '{}:{}'.format('127.0.0.1', port)
216 expected_uri = '{}:{}'.format('127.0.0.1', port)
217 assert daemon.hooks_uri == expected_uri
217 assert daemon.hooks_uri == expected_uri
218
218
219 msg = f"HOOKS: 127.0.0.1:{port} Preparing HTTP callback daemon registering " \
219 msg = f"HOOKS: 127.0.0.1:{port} Preparing HTTP callback daemon registering " \
220 f"hook object: <class 'rhodecode.lib.hook_daemon.http_hooks_deamon.HooksHttpHandler'>"
220 f"hook object: <class 'rhodecode.lib.hook_daemon.http_hooks_deamon.HooksHttpHandler'>"
221
221
222 assert_message_in_log(
222 assert_message_in_log(
223 caplog.records, msg,
223 caplog.records, msg,
224 levelno=logging.DEBUG, module='http_hooks_deamon')
224 levelno=logging.DEBUG, module='http_hooks_deamon')
225
225
226 def test_run_creates_a_thread(self, tcp_server):
226 def test_run_creates_a_thread(self, tcp_server):
227 thread = mock.Mock()
227 thread = mock.Mock()
228
228
229 with self._tcp_patcher(tcp_server):
229 with self._tcp_patcher(tcp_server):
230 daemon = http_hooks_deamon.HttpHooksCallbackDaemon()
230 daemon = http_hooks_deamon.HttpHooksCallbackDaemon()
231
231
232 with self._thread_patcher(thread) as thread_mock:
232 with self._thread_patcher(thread) as thread_mock:
233 daemon._run()
233 daemon._run()
234
234
235 thread_mock.assert_called_once_with(
235 thread_mock.assert_called_once_with(
236 target=tcp_server.serve_forever,
236 target=tcp_server.serve_forever,
237 kwargs={'poll_interval': daemon.POLL_INTERVAL})
237 kwargs={'poll_interval': daemon.POLL_INTERVAL})
238 assert thread.daemon is True
238 assert thread.daemon is True
239 thread.start.assert_called_once_with()
239 thread.start.assert_called_once_with()
240
240
241 def test_run_logs(self, tcp_server, caplog):
241 def test_run_logs(self, tcp_server, caplog):
242
242
243 with self._tcp_patcher(tcp_server):
243 with self._tcp_patcher(tcp_server):
244 daemon = http_hooks_deamon.HttpHooksCallbackDaemon()
244 daemon = http_hooks_deamon.HttpHooksCallbackDaemon()
245
245
246 with self._thread_patcher(mock.Mock()), caplog.at_level(logging.DEBUG):
246 with self._thread_patcher(mock.Mock()), caplog.at_level(logging.DEBUG):
247 daemon._run()
247 daemon._run()
248
248
249 assert_message_in_log(
249 assert_message_in_log(
250 caplog.records,
250 caplog.records,
251 'Running thread-based loop of callback daemon in background',
251 'Running thread-based loop of callback daemon in background',
252 levelno=logging.DEBUG, module='http_hooks_deamon')
252 levelno=logging.DEBUG, module='http_hooks_deamon')
253
253
254 def test_stop_cleans_up_the_connection(self, tcp_server, caplog):
254 def test_stop_cleans_up_the_connection(self, tcp_server, caplog):
255 thread = mock.Mock()
255 thread = mock.Mock()
256
256
257 with self._tcp_patcher(tcp_server):
257 with self._tcp_patcher(tcp_server):
258 daemon = http_hooks_deamon.HttpHooksCallbackDaemon()
258 daemon = http_hooks_deamon.HttpHooksCallbackDaemon()
259
259
260 with self._thread_patcher(thread), caplog.at_level(logging.DEBUG):
260 with self._thread_patcher(thread), caplog.at_level(logging.DEBUG):
261 with daemon:
261 with daemon:
262 assert daemon._daemon == tcp_server
262 assert daemon._daemon == tcp_server
263 assert daemon._callback_thread == thread
263 assert daemon._callback_thread == thread
264
264
265 assert daemon._daemon is None
265 assert daemon._daemon is None
266 assert daemon._callback_thread is None
266 assert daemon._callback_thread is None
267 tcp_server.shutdown.assert_called_with()
267 tcp_server.shutdown.assert_called_with()
268 thread.join.assert_called_once_with()
268 thread.join.assert_called_once_with()
269
269
270 assert_message_in_log(
270 assert_message_in_log(
271 caplog.records, 'Waiting for background thread to finish.',
271 caplog.records, 'Waiting for background thread to finish.',
272 levelno=logging.DEBUG, module='http_hooks_deamon')
272 levelno=logging.DEBUG, module='http_hooks_deamon')
273
273
274 def _tcp_patcher(self, tcp_server):
274 def _tcp_patcher(self, tcp_server):
275 return mock.patch.object(
275 return mock.patch.object(
276 http_hooks_deamon, 'TCPServer', return_value=tcp_server)
276 http_hooks_deamon, 'TCPServer', return_value=tcp_server)
277
277
278 def _thread_patcher(self, thread):
278 def _thread_patcher(self, thread):
279 return mock.patch.object(
279 return mock.patch.object(
280 http_hooks_deamon.threading, 'Thread', return_value=thread)
280 http_hooks_deamon.threading, 'Thread', return_value=thread)
281
281
282
282
283 class TestPrepareHooksDaemon(object):
283 class TestPrepareHooksDaemon(object):
284
284
285 @pytest.mark.parametrize('protocol', ('celery',))
285 @pytest.mark.parametrize('protocol', ('celery',))
286 def test_returns_celery_hooks_callback_daemon_when_celery_protocol_specified(
286 def test_returns_celery_hooks_callback_daemon_when_celery_protocol_specified(
287 self, protocol):
287 self, protocol):
288 with tempfile.NamedTemporaryFile(mode='w') as temp_file:
288 with tempfile.NamedTemporaryFile(mode='w') as temp_file:
289 temp_file.write("[app:main]\ncelery.broker_url = redis://redis/0\n"
289 temp_file.write("[app:main]\ncelery.broker_url = redis://redis/0\n"
290 "celery.result_backend = redis://redis/0")
290 "celery.result_backend = redis://redis/0")
291 temp_file.flush()
291 temp_file.flush()
292 expected_extras = {'config': temp_file.name}
292 expected_extras = {'config': temp_file.name}
293 callback, extras = hook_base.prepare_callback_daemon(
293 callback, extras = hook_base.prepare_callback_daemon(
294 expected_extras, protocol=protocol, host='')
294 expected_extras, protocol=protocol, host='')
295 assert isinstance(callback, celery_hooks_deamon.CeleryHooksCallbackDaemon)
295 assert isinstance(callback, celery_hooks_deamon.CeleryHooksCallbackDaemon)
296
296
297 @pytest.mark.parametrize('protocol, expected_class', (
297 @pytest.mark.parametrize('protocol, expected_class', (
298 ('http', http_hooks_deamon.HttpHooksCallbackDaemon),
298 ('http', http_hooks_deamon.HttpHooksCallbackDaemon),
299 ))
299 ))
300 def test_returns_real_hooks_callback_daemon_when_protocol_is_specified(
300 def test_returns_real_hooks_callback_daemon_when_protocol_is_specified(
301 self, protocol, expected_class):
301 self, protocol, expected_class):
302 expected_extras = {
302 expected_extras = {
303 'extra1': 'value1',
303 'extra1': 'value1',
304 'txn_id': 'txnid2',
304 'txn_id': 'txnid2',
305 'hooks_protocol': protocol.lower(),
305 'hooks_protocol': protocol.lower(),
306 'task_backend': '',
306 'task_backend': '',
307 'task_queue': '',
307 'task_queue': '',
308 'repo_store': '/var/opt/rhodecode_repo_store',
308 'repo_store': '/var/opt/rhodecode_repo_store',
309 'repository': 'rhodecode',
309 'repository': 'rhodecode',
310 }
310 }
311 from rhodecode import CONFIG
311 from rhodecode import CONFIG
312 CONFIG['vcs.svn.redis_conn'] = 'redis://redis:6379/0'
312 CONFIG['vcs.svn.redis_conn'] = 'redis://redis:6379/0'
313 callback, extras = hook_base.prepare_callback_daemon(
313 callback, extras = hook_base.prepare_callback_daemon(
314 expected_extras.copy(), protocol=protocol, host='127.0.0.1',
314 expected_extras.copy(), protocol=protocol, host='127.0.0.1',
315 txn_id='txnid2')
315 txn_id='txnid2')
316 assert isinstance(callback, expected_class)
316 assert isinstance(callback, expected_class)
317 extras.pop('hooks_uri')
317 extras.pop('hooks_uri')
318 expected_extras['time'] = extras['time']
318 expected_extras['time'] = extras['time']
319 assert extras == expected_extras
319 assert extras == expected_extras
320
320
321 @pytest.mark.parametrize('protocol', (
321 @pytest.mark.parametrize('protocol', (
322 'invalid',
322 'invalid',
323 'Http',
323 'Http',
324 'HTTP',
324 'HTTP',
325 'celerY'
325 ))
326 ))
326 def test_raises_on_invalid_protocol(self, protocol):
327 def test_raises_on_invalid_protocol(self, protocol):
327 expected_extras = {
328 expected_extras = {
328 'extra1': 'value1',
329 'extra1': 'value1',
329 'hooks_protocol': protocol.lower()
330 'hooks_protocol': protocol.lower()
330 }
331 }
331 with pytest.raises(Exception):
332 with pytest.raises(Exception):
332 callback, extras = hook_base.prepare_callback_daemon(
333 callback, extras = hook_base.prepare_callback_daemon(
333 expected_extras.copy(),
334 expected_extras.copy(),
334 protocol=protocol, host='127.0.0.1')
335 protocol=protocol, host='127.0.0.1')
335
336
336
337
337 class MockRequest(object):
338 class MockRequest(object):
338
339
339 def __init__(self, request):
340 def __init__(self, request):
340 self.request = request
341 self.request = request
341 self.input_stream = io.BytesIO(safe_bytes(self.request))
342 self.input_stream = io.BytesIO(safe_bytes(self.request))
342 self.output_stream = io.BytesIO() # make it un-closable for testing invesitagion
343 self.output_stream = io.BytesIO() # make it un-closable for testing invesitagion
343 self.output_stream.close = lambda: None
344 self.output_stream.close = lambda: None
344
345
345 def makefile(self, mode, *args, **kwargs):
346 def makefile(self, mode, *args, **kwargs):
346 return self.output_stream if mode == 'wb' else self.input_stream
347 return self.output_stream if mode == 'wb' else self.input_stream
347
348
348
349
349 class MockServer(object):
350 class MockServer(object):
350
351
351 def __init__(self, handler_cls, request):
352 def __init__(self, handler_cls, request):
352 ip_port = ('0.0.0.0', 8888)
353 ip_port = ('0.0.0.0', 8888)
353 self.request = MockRequest(request)
354 self.request = MockRequest(request)
354 self.server_address = ip_port
355 self.server_address = ip_port
355 self.handler = handler_cls(self.request, ip_port, self)
356 self.handler = handler_cls(self.request, ip_port, self)
356
357
357
358
358 @pytest.fixture()
359 @pytest.fixture()
359 def tcp_server():
360 def tcp_server():
360 server = mock.Mock()
361 server = mock.Mock()
361 server.server_address = ('127.0.0.1', 8881)
362 server.server_address = ('127.0.0.1', 8881)
362 server.wbufsize = 1024
363 server.wbufsize = 1024
363 return server
364 return server
@@ -1,978 +1,979 b''
1
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
15 #
16 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
19
20 import mock
20 import mock
21 import pytest
21 import pytest
22 import textwrap
22 import textwrap
23
23
24 import rhodecode
24 import rhodecode
25 from rhodecode.lib.vcs.backends import get_backend
25 from rhodecode.lib.vcs.backends import get_backend
26 from rhodecode.lib.vcs.backends.base import (
26 from rhodecode.lib.vcs.backends.base import (
27 MergeResponse, MergeFailureReason, Reference)
27 MergeResponse, MergeFailureReason, Reference)
28 from rhodecode.lib.vcs.exceptions import RepositoryError
28 from rhodecode.lib.vcs.exceptions import RepositoryError
29 from rhodecode.lib.vcs.nodes import FileNode
29 from rhodecode.lib.vcs.nodes import FileNode
30 from rhodecode.model.comment import CommentsModel
30 from rhodecode.model.comment import CommentsModel
31 from rhodecode.model.db import PullRequest, Session
31 from rhodecode.model.db import PullRequest, Session
32 from rhodecode.model.pull_request import PullRequestModel
32 from rhodecode.model.pull_request import PullRequestModel
33 from rhodecode.model.user import UserModel
33 from rhodecode.model.user import UserModel
34 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
34 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
35 from rhodecode.lib.str_utils import safe_str
35 from rhodecode.lib.str_utils import safe_str
36
36
37 pytestmark = [
37 pytestmark = [
38 pytest.mark.backends("git", "hg"),
38 pytest.mark.backends("git", "hg"),
39 ]
39 ]
40
40
41
41
42 @pytest.mark.usefixtures('config_stub')
42 @pytest.mark.usefixtures('config_stub')
43 class TestPullRequestModel(object):
43 class TestPullRequestModel(object):
44
44
45 @pytest.fixture()
45 @pytest.fixture()
46 def pull_request(self, request, backend, pr_util):
46 def pull_request(self, request, backend, pr_util):
47 """
47 """
48 A pull request combined with multiples patches.
48 A pull request combined with multiples patches.
49 """
49 """
50 BackendClass = get_backend(backend.alias)
50 BackendClass = get_backend(backend.alias)
51 merge_resp = MergeResponse(
51 merge_resp = MergeResponse(
52 False, False, None, MergeFailureReason.UNKNOWN,
52 False, False, None, MergeFailureReason.UNKNOWN,
53 metadata={'exception': 'MockError'})
53 metadata={'exception': 'MockError'})
54 self.merge_patcher = mock.patch.object(
54 self.merge_patcher = mock.patch.object(
55 BackendClass, 'merge', return_value=merge_resp)
55 BackendClass, 'merge', return_value=merge_resp)
56 self.workspace_remove_patcher = mock.patch.object(
56 self.workspace_remove_patcher = mock.patch.object(
57 BackendClass, 'cleanup_merge_workspace')
57 BackendClass, 'cleanup_merge_workspace')
58
58
59 self.workspace_remove_mock = self.workspace_remove_patcher.start()
59 self.workspace_remove_mock = self.workspace_remove_patcher.start()
60 self.merge_mock = self.merge_patcher.start()
60 self.merge_mock = self.merge_patcher.start()
61 self.comment_patcher = mock.patch(
61 self.comment_patcher = mock.patch(
62 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status')
62 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status')
63 self.comment_patcher.start()
63 self.comment_patcher.start()
64 self.notification_patcher = mock.patch(
64 self.notification_patcher = mock.patch(
65 'rhodecode.model.notification.NotificationModel.create')
65 'rhodecode.model.notification.NotificationModel.create')
66 self.notification_patcher.start()
66 self.notification_patcher.start()
67 self.helper_patcher = mock.patch(
67 self.helper_patcher = mock.patch(
68 'rhodecode.lib.helpers.route_path')
68 'rhodecode.lib.helpers.route_path')
69 self.helper_patcher.start()
69 self.helper_patcher.start()
70
70
71 self.hook_patcher = mock.patch.object(PullRequestModel,
71 self.hook_patcher = mock.patch.object(PullRequestModel,
72 'trigger_pull_request_hook')
72 'trigger_pull_request_hook')
73 self.hook_mock = self.hook_patcher.start()
73 self.hook_mock = self.hook_patcher.start()
74
74
75 self.invalidation_patcher = mock.patch(
75 self.invalidation_patcher = mock.patch(
76 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation')
76 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation')
77 self.invalidation_mock = self.invalidation_patcher.start()
77 self.invalidation_mock = self.invalidation_patcher.start()
78
78
79 self.pull_request = pr_util.create_pull_request(
79 self.pull_request = pr_util.create_pull_request(
80 mergeable=True, name_suffix=u'Δ…Δ‡')
80 mergeable=True, name_suffix=u'Δ…Δ‡')
81 self.source_commit = self.pull_request.source_ref_parts.commit_id
81 self.source_commit = self.pull_request.source_ref_parts.commit_id
82 self.target_commit = self.pull_request.target_ref_parts.commit_id
82 self.target_commit = self.pull_request.target_ref_parts.commit_id
83 self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id
83 self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id
84 self.repo_id = self.pull_request.target_repo.repo_id
84 self.repo_id = self.pull_request.target_repo.repo_id
85
85
86 @request.addfinalizer
86 @request.addfinalizer
87 def cleanup_pull_request():
87 def cleanup_pull_request():
88 calls = [mock.call(
88 calls = [mock.call(
89 self.pull_request, self.pull_request.author, 'create')]
89 self.pull_request, self.pull_request.author, 'create')]
90 self.hook_mock.assert_has_calls(calls)
90 self.hook_mock.assert_has_calls(calls)
91
91
92 self.workspace_remove_patcher.stop()
92 self.workspace_remove_patcher.stop()
93 self.merge_patcher.stop()
93 self.merge_patcher.stop()
94 self.comment_patcher.stop()
94 self.comment_patcher.stop()
95 self.notification_patcher.stop()
95 self.notification_patcher.stop()
96 self.helper_patcher.stop()
96 self.helper_patcher.stop()
97 self.hook_patcher.stop()
97 self.hook_patcher.stop()
98 self.invalidation_patcher.stop()
98 self.invalidation_patcher.stop()
99
99
100 return self.pull_request
100 return self.pull_request
101
101
102 def test_get_all(self, pull_request):
102 def test_get_all(self, pull_request):
103 prs = PullRequestModel().get_all(pull_request.target_repo)
103 prs = PullRequestModel().get_all(pull_request.target_repo)
104 assert isinstance(prs, list)
104 assert isinstance(prs, list)
105 assert len(prs) == 1
105 assert len(prs) == 1
106
106
107 def test_count_all(self, pull_request):
107 def test_count_all(self, pull_request):
108 pr_count = PullRequestModel().count_all(pull_request.target_repo)
108 pr_count = PullRequestModel().count_all(pull_request.target_repo)
109 assert pr_count == 1
109 assert pr_count == 1
110
110
111 def test_get_awaiting_review(self, pull_request):
111 def test_get_awaiting_review(self, pull_request):
112 prs = PullRequestModel().get_awaiting_review(pull_request.target_repo)
112 prs = PullRequestModel().get_awaiting_review(pull_request.target_repo)
113 assert isinstance(prs, list)
113 assert isinstance(prs, list)
114 assert len(prs) == 1
114 assert len(prs) == 1
115
115
116 def test_count_awaiting_review(self, pull_request):
116 def test_count_awaiting_review(self, pull_request):
117 pr_count = PullRequestModel().count_awaiting_review(
117 pr_count = PullRequestModel().count_awaiting_review(
118 pull_request.target_repo)
118 pull_request.target_repo)
119 assert pr_count == 1
119 assert pr_count == 1
120
120
121 def test_get_awaiting_my_review(self, pull_request):
121 def test_get_awaiting_my_review(self, pull_request):
122 PullRequestModel().update_reviewers(
122 PullRequestModel().update_reviewers(
123 pull_request, [(pull_request.author, ['author'], False, 'reviewer', [])],
123 pull_request, [(pull_request.author, ['author'], False, 'reviewer', [])],
124 pull_request.author)
124 pull_request.author)
125 Session().commit()
125 Session().commit()
126
126
127 prs = PullRequestModel().get_awaiting_my_review(
127 prs = PullRequestModel().get_awaiting_my_review(
128 pull_request.target_repo.repo_name, user_id=pull_request.author.user_id)
128 pull_request.target_repo.repo_name, user_id=pull_request.author.user_id)
129 assert isinstance(prs, list)
129 assert isinstance(prs, list)
130 assert len(prs) == 1
130 assert len(prs) == 1
131
131
132 def test_count_awaiting_my_review(self, pull_request):
132 def test_count_awaiting_my_review(self, pull_request):
133 PullRequestModel().update_reviewers(
133 PullRequestModel().update_reviewers(
134 pull_request, [(pull_request.author, ['author'], False, 'reviewer', [])],
134 pull_request, [(pull_request.author, ['author'], False, 'reviewer', [])],
135 pull_request.author)
135 pull_request.author)
136 Session().commit()
136 Session().commit()
137
137
138 pr_count = PullRequestModel().count_awaiting_my_review(
138 pr_count = PullRequestModel().count_awaiting_my_review(
139 pull_request.target_repo.repo_name, user_id=pull_request.author.user_id)
139 pull_request.target_repo.repo_name, user_id=pull_request.author.user_id)
140 assert pr_count == 1
140 assert pr_count == 1
141
141
142 def test_delete_calls_cleanup_merge(self, pull_request):
142 def test_delete_calls_cleanup_merge(self, pull_request):
143 repo_id = pull_request.target_repo.repo_id
143 repo_id = pull_request.target_repo.repo_id
144 PullRequestModel().delete(pull_request, pull_request.author)
144 PullRequestModel().delete(pull_request, pull_request.author)
145 Session().commit()
145 Session().commit()
146
146
147 self.workspace_remove_mock.assert_called_once_with(
147 self.workspace_remove_mock.assert_called_once_with(
148 repo_id, self.workspace_id)
148 repo_id, self.workspace_id)
149
149
150 def test_close_calls_cleanup_and_hook(self, pull_request):
150 def test_close_calls_cleanup_and_hook(self, pull_request):
151 PullRequestModel().close_pull_request(
151 PullRequestModel().close_pull_request(
152 pull_request, pull_request.author)
152 pull_request, pull_request.author)
153 Session().commit()
153 Session().commit()
154
154
155 repo_id = pull_request.target_repo.repo_id
155 repo_id = pull_request.target_repo.repo_id
156
156
157 self.workspace_remove_mock.assert_called_once_with(
157 self.workspace_remove_mock.assert_called_once_with(
158 repo_id, self.workspace_id)
158 repo_id, self.workspace_id)
159 self.hook_mock.assert_called_with(
159 self.hook_mock.assert_called_with(
160 self.pull_request, self.pull_request.author, 'close')
160 self.pull_request, self.pull_request.author, 'close')
161
161
162 def test_merge_status(self, pull_request):
162 def test_merge_status(self, pull_request):
163 self.merge_mock.return_value = MergeResponse(
163 self.merge_mock.return_value = MergeResponse(
164 True, False, None, MergeFailureReason.NONE)
164 True, False, None, MergeFailureReason.NONE)
165
165
166 assert pull_request._last_merge_source_rev is None
166 assert pull_request._last_merge_source_rev is None
167 assert pull_request._last_merge_target_rev is None
167 assert pull_request._last_merge_target_rev is None
168 assert pull_request.last_merge_status is None
168 assert pull_request.last_merge_status is None
169
169
170 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
170 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
171 assert status is True
171 assert status is True
172 assert msg == 'This pull request can be automatically merged.'
172 assert msg == 'This pull request can be automatically merged.'
173 self.merge_mock.assert_called_with(
173 self.merge_mock.assert_called_with(
174 self.repo_id, self.workspace_id,
174 self.repo_id, self.workspace_id,
175 pull_request.target_ref_parts,
175 pull_request.target_ref_parts,
176 pull_request.source_repo.scm_instance(),
176 pull_request.source_repo.scm_instance(),
177 pull_request.source_ref_parts, dry_run=True,
177 pull_request.source_ref_parts, dry_run=True,
178 use_rebase=False, close_branch=False)
178 use_rebase=False, close_branch=False)
179
179
180 assert pull_request._last_merge_source_rev == self.source_commit
180 assert pull_request._last_merge_source_rev == self.source_commit
181 assert pull_request._last_merge_target_rev == self.target_commit
181 assert pull_request._last_merge_target_rev == self.target_commit
182 assert pull_request.last_merge_status is MergeFailureReason.NONE
182 assert pull_request.last_merge_status is MergeFailureReason.NONE
183
183
184 self.merge_mock.reset_mock()
184 self.merge_mock.reset_mock()
185 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
185 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
186 assert status is True
186 assert status is True
187 assert msg == 'This pull request can be automatically merged.'
187 assert msg == 'This pull request can be automatically merged.'
188 assert self.merge_mock.called is False
188 assert self.merge_mock.called is False
189
189
190 def test_merge_status_known_failure(self, pull_request):
190 def test_merge_status_known_failure(self, pull_request):
191 self.merge_mock.return_value = MergeResponse(
191 self.merge_mock.return_value = MergeResponse(
192 False, False, None, MergeFailureReason.MERGE_FAILED,
192 False, False, None, MergeFailureReason.MERGE_FAILED,
193 metadata={'unresolved_files': 'file1'})
193 metadata={'unresolved_files': 'file1'})
194
194
195 assert pull_request._last_merge_source_rev is None
195 assert pull_request._last_merge_source_rev is None
196 assert pull_request._last_merge_target_rev is None
196 assert pull_request._last_merge_target_rev is None
197 assert pull_request.last_merge_status is None
197 assert pull_request.last_merge_status is None
198
198
199 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
199 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
200 assert status is False
200 assert status is False
201 assert msg == 'This pull request cannot be merged because of merge conflicts. file1'
201 assert msg == 'This pull request cannot be merged because of merge conflicts. file1'
202 self.merge_mock.assert_called_with(
202 self.merge_mock.assert_called_with(
203 self.repo_id, self.workspace_id,
203 self.repo_id, self.workspace_id,
204 pull_request.target_ref_parts,
204 pull_request.target_ref_parts,
205 pull_request.source_repo.scm_instance(),
205 pull_request.source_repo.scm_instance(),
206 pull_request.source_ref_parts, dry_run=True,
206 pull_request.source_ref_parts, dry_run=True,
207 use_rebase=False, close_branch=False)
207 use_rebase=False, close_branch=False)
208
208
209 assert pull_request._last_merge_source_rev == self.source_commit
209 assert pull_request._last_merge_source_rev == self.source_commit
210 assert pull_request._last_merge_target_rev == self.target_commit
210 assert pull_request._last_merge_target_rev == self.target_commit
211 assert pull_request.last_merge_status is MergeFailureReason.MERGE_FAILED
211 assert pull_request.last_merge_status is MergeFailureReason.MERGE_FAILED
212
212
213 self.merge_mock.reset_mock()
213 self.merge_mock.reset_mock()
214 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
214 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
215 assert status is False
215 assert status is False
216 assert msg == 'This pull request cannot be merged because of merge conflicts. file1'
216 assert msg == 'This pull request cannot be merged because of merge conflicts. file1'
217 assert self.merge_mock.called is False
217 assert self.merge_mock.called is False
218
218
219 def test_merge_status_unknown_failure(self, pull_request):
219 def test_merge_status_unknown_failure(self, pull_request):
220 self.merge_mock.return_value = MergeResponse(
220 self.merge_mock.return_value = MergeResponse(
221 False, False, None, MergeFailureReason.UNKNOWN,
221 False, False, None, MergeFailureReason.UNKNOWN,
222 metadata={'exception': 'MockError'})
222 metadata={'exception': 'MockError'})
223
223
224 assert pull_request._last_merge_source_rev is None
224 assert pull_request._last_merge_source_rev is None
225 assert pull_request._last_merge_target_rev is None
225 assert pull_request._last_merge_target_rev is None
226 assert pull_request.last_merge_status is None
226 assert pull_request.last_merge_status is None
227
227
228 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
228 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
229 assert status is False
229 assert status is False
230 assert msg == (
230 assert msg == (
231 'This pull request cannot be merged because of an unhandled exception. '
231 'This pull request cannot be merged because of an unhandled exception. '
232 'MockError')
232 'MockError')
233 self.merge_mock.assert_called_with(
233 self.merge_mock.assert_called_with(
234 self.repo_id, self.workspace_id,
234 self.repo_id, self.workspace_id,
235 pull_request.target_ref_parts,
235 pull_request.target_ref_parts,
236 pull_request.source_repo.scm_instance(),
236 pull_request.source_repo.scm_instance(),
237 pull_request.source_ref_parts, dry_run=True,
237 pull_request.source_ref_parts, dry_run=True,
238 use_rebase=False, close_branch=False)
238 use_rebase=False, close_branch=False)
239
239
240 assert pull_request._last_merge_source_rev is None
240 assert pull_request._last_merge_source_rev is None
241 assert pull_request._last_merge_target_rev is None
241 assert pull_request._last_merge_target_rev is None
242 assert pull_request.last_merge_status is None
242 assert pull_request.last_merge_status is None
243
243
244 self.merge_mock.reset_mock()
244 self.merge_mock.reset_mock()
245 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
245 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
246 assert status is False
246 assert status is False
247 assert msg == (
247 assert msg == (
248 'This pull request cannot be merged because of an unhandled exception. '
248 'This pull request cannot be merged because of an unhandled exception. '
249 'MockError')
249 'MockError')
250 assert self.merge_mock.called is True
250 assert self.merge_mock.called is True
251
251
252 def test_merge_status_when_target_is_locked(self, pull_request):
252 def test_merge_status_when_target_is_locked(self, pull_request):
253 pull_request.target_repo.locked = [1, u'12345.50', 'lock_web']
253 pull_request.target_repo.locked = [1, u'12345.50', 'lock_web']
254 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
254 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
255 assert status is False
255 assert status is False
256 assert msg == (
256 assert msg == (
257 'This pull request cannot be merged because the target repository '
257 'This pull request cannot be merged because the target repository '
258 'is locked by user:1.')
258 'is locked by user:1.')
259
259
260 def test_merge_status_requirements_check_target(self, pull_request):
260 def test_merge_status_requirements_check_target(self, pull_request):
261
261
262 def has_largefiles(self, repo):
262 def has_largefiles(self, repo):
263 return repo == pull_request.source_repo
263 return repo == pull_request.source_repo
264
264
265 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
265 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
266 with patcher:
266 with patcher:
267 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
267 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
268
268
269 assert status is False
269 assert status is False
270 assert msg == 'Target repository large files support is disabled.'
270 assert msg == 'Target repository large files support is disabled.'
271
271
272 def test_merge_status_requirements_check_source(self, pull_request):
272 def test_merge_status_requirements_check_source(self, pull_request):
273
273
274 def has_largefiles(self, repo):
274 def has_largefiles(self, repo):
275 return repo == pull_request.target_repo
275 return repo == pull_request.target_repo
276
276
277 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
277 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
278 with patcher:
278 with patcher:
279 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
279 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
280
280
281 assert status is False
281 assert status is False
282 assert msg == 'Source repository large files support is disabled.'
282 assert msg == 'Source repository large files support is disabled.'
283
283
284 def test_merge(self, pull_request, merge_extras):
284 def test_merge(self, pull_request, merge_extras):
285 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
285 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
286 merge_ref = Reference(
286 merge_ref = Reference(
287 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
287 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
288 self.merge_mock.return_value = MergeResponse(
288 self.merge_mock.return_value = MergeResponse(
289 True, True, merge_ref, MergeFailureReason.NONE)
289 True, True, merge_ref, MergeFailureReason.NONE)
290
290
291 merge_extras['repository'] = pull_request.target_repo.repo_name
291 merge_extras['repository'] = pull_request.target_repo.repo_name
292 PullRequestModel().merge_repo(
292 PullRequestModel().merge_repo(
293 pull_request, pull_request.author, extras=merge_extras)
293 pull_request, pull_request.author, extras=merge_extras)
294 Session().commit()
294 Session().commit()
295
295
296 message = (
296 message = (
297 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
297 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
298 u'\n\n {pr_title}'.format(
298 u'\n\n {pr_title}'.format(
299 pr_id=pull_request.pull_request_id,
299 pr_id=pull_request.pull_request_id,
300 source_repo=safe_str(
300 source_repo=safe_str(
301 pull_request.source_repo.scm_instance().name),
301 pull_request.source_repo.scm_instance().name),
302 source_ref_name=pull_request.source_ref_parts.name,
302 source_ref_name=pull_request.source_ref_parts.name,
303 pr_title=safe_str(pull_request.title)
303 pr_title=safe_str(pull_request.title)
304 )
304 )
305 )
305 )
306 self.merge_mock.assert_called_with(
306 self.merge_mock.assert_called_with(
307 self.repo_id, self.workspace_id,
307 self.repo_id, self.workspace_id,
308 pull_request.target_ref_parts,
308 pull_request.target_ref_parts,
309 pull_request.source_repo.scm_instance(),
309 pull_request.source_repo.scm_instance(),
310 pull_request.source_ref_parts,
310 pull_request.source_ref_parts,
311 user_name=user.short_contact, user_email=user.email, message=message,
311 user_name=user.short_contact, user_email=user.email, message=message,
312 use_rebase=False, close_branch=False
312 use_rebase=False, close_branch=False
313 )
313 )
314 self.invalidation_mock.assert_called_once_with(
314 self.invalidation_mock.assert_called_once_with(
315 pull_request.target_repo.repo_name)
315 pull_request.target_repo.repo_name)
316
316
317 self.hook_mock.assert_called_with(
317 self.hook_mock.assert_called_with(
318 self.pull_request, self.pull_request.author, 'merge')
318 self.pull_request, self.pull_request.author, 'merge')
319
319
320 pull_request = PullRequest.get(pull_request.pull_request_id)
320 pull_request = PullRequest.get(pull_request.pull_request_id)
321 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
321 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
322
322
323 def test_merge_with_status_lock(self, pull_request, merge_extras):
323 def test_merge_with_status_lock(self, pull_request, merge_extras):
324 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
324 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
325 merge_ref = Reference(
325 merge_ref = Reference(
326 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
326 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
327 self.merge_mock.return_value = MergeResponse(
327 self.merge_mock.return_value = MergeResponse(
328 True, True, merge_ref, MergeFailureReason.NONE)
328 True, True, merge_ref, MergeFailureReason.NONE)
329
329
330 merge_extras['repository'] = pull_request.target_repo.repo_name
330 merge_extras['repository'] = pull_request.target_repo.repo_name
331
331
332 with pull_request.set_state(PullRequest.STATE_UPDATING):
332 with pull_request.set_state(PullRequest.STATE_UPDATING):
333 assert pull_request.pull_request_state == PullRequest.STATE_UPDATING
333 assert pull_request.pull_request_state == PullRequest.STATE_UPDATING
334 PullRequestModel().merge_repo(
334 PullRequestModel().merge_repo(
335 pull_request, pull_request.author, extras=merge_extras)
335 pull_request, pull_request.author, extras=merge_extras)
336 Session().commit()
336 Session().commit()
337
337
338 assert pull_request.pull_request_state == PullRequest.STATE_CREATED
338 assert pull_request.pull_request_state == PullRequest.STATE_CREATED
339
339
340 message = (
340 message = (
341 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
341 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
342 u'\n\n {pr_title}'.format(
342 u'\n\n {pr_title}'.format(
343 pr_id=pull_request.pull_request_id,
343 pr_id=pull_request.pull_request_id,
344 source_repo=safe_str(
344 source_repo=safe_str(
345 pull_request.source_repo.scm_instance().name),
345 pull_request.source_repo.scm_instance().name),
346 source_ref_name=pull_request.source_ref_parts.name,
346 source_ref_name=pull_request.source_ref_parts.name,
347 pr_title=safe_str(pull_request.title)
347 pr_title=safe_str(pull_request.title)
348 )
348 )
349 )
349 )
350 self.merge_mock.assert_called_with(
350 self.merge_mock.assert_called_with(
351 self.repo_id, self.workspace_id,
351 self.repo_id, self.workspace_id,
352 pull_request.target_ref_parts,
352 pull_request.target_ref_parts,
353 pull_request.source_repo.scm_instance(),
353 pull_request.source_repo.scm_instance(),
354 pull_request.source_ref_parts,
354 pull_request.source_ref_parts,
355 user_name=user.short_contact, user_email=user.email, message=message,
355 user_name=user.short_contact, user_email=user.email, message=message,
356 use_rebase=False, close_branch=False
356 use_rebase=False, close_branch=False
357 )
357 )
358 self.invalidation_mock.assert_called_once_with(
358 self.invalidation_mock.assert_called_once_with(
359 pull_request.target_repo.repo_name)
359 pull_request.target_repo.repo_name)
360
360
361 self.hook_mock.assert_called_with(
361 self.hook_mock.assert_called_with(
362 self.pull_request, self.pull_request.author, 'merge')
362 self.pull_request, self.pull_request.author, 'merge')
363
363
364 pull_request = PullRequest.get(pull_request.pull_request_id)
364 pull_request = PullRequest.get(pull_request.pull_request_id)
365 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
365 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
366
366
367 def test_merge_failed(self, pull_request, merge_extras):
367 def test_merge_failed(self, pull_request, merge_extras):
368 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
368 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
369 merge_ref = Reference(
369 merge_ref = Reference(
370 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
370 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
371 self.merge_mock.return_value = MergeResponse(
371 self.merge_mock.return_value = MergeResponse(
372 False, False, merge_ref, MergeFailureReason.MERGE_FAILED)
372 False, False, merge_ref, MergeFailureReason.MERGE_FAILED)
373
373
374 merge_extras['repository'] = pull_request.target_repo.repo_name
374 merge_extras['repository'] = pull_request.target_repo.repo_name
375 PullRequestModel().merge_repo(
375 PullRequestModel().merge_repo(
376 pull_request, pull_request.author, extras=merge_extras)
376 pull_request, pull_request.author, extras=merge_extras)
377 Session().commit()
377 Session().commit()
378
378
379 message = (
379 message = (
380 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
380 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
381 u'\n\n {pr_title}'.format(
381 u'\n\n {pr_title}'.format(
382 pr_id=pull_request.pull_request_id,
382 pr_id=pull_request.pull_request_id,
383 source_repo=safe_str(
383 source_repo=safe_str(
384 pull_request.source_repo.scm_instance().name),
384 pull_request.source_repo.scm_instance().name),
385 source_ref_name=pull_request.source_ref_parts.name,
385 source_ref_name=pull_request.source_ref_parts.name,
386 pr_title=safe_str(pull_request.title)
386 pr_title=safe_str(pull_request.title)
387 )
387 )
388 )
388 )
389 self.merge_mock.assert_called_with(
389 self.merge_mock.assert_called_with(
390 self.repo_id, self.workspace_id,
390 self.repo_id, self.workspace_id,
391 pull_request.target_ref_parts,
391 pull_request.target_ref_parts,
392 pull_request.source_repo.scm_instance(),
392 pull_request.source_repo.scm_instance(),
393 pull_request.source_ref_parts,
393 pull_request.source_ref_parts,
394 user_name=user.short_contact, user_email=user.email, message=message,
394 user_name=user.short_contact, user_email=user.email, message=message,
395 use_rebase=False, close_branch=False
395 use_rebase=False, close_branch=False
396 )
396 )
397
397
398 pull_request = PullRequest.get(pull_request.pull_request_id)
398 pull_request = PullRequest.get(pull_request.pull_request_id)
399 assert self.invalidation_mock.called is False
399 assert self.invalidation_mock.called is False
400 assert pull_request.merge_rev is None
400 assert pull_request.merge_rev is None
401
401
402 def test_get_commit_ids(self, pull_request):
402 def test_get_commit_ids(self, pull_request):
403 # The PR has been not merged yet, so expect an exception
403 # The PR has been not merged yet, so expect an exception
404 with pytest.raises(ValueError):
404 with pytest.raises(ValueError):
405 PullRequestModel()._get_commit_ids(pull_request)
405 PullRequestModel()._get_commit_ids(pull_request)
406
406
407 # Merge revision is in the revisions list
407 # Merge revision is in the revisions list
408 pull_request.merge_rev = pull_request.revisions[0]
408 pull_request.merge_rev = pull_request.revisions[0]
409 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
409 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
410 assert commit_ids == pull_request.revisions
410 assert commit_ids == pull_request.revisions
411
411
412 # Merge revision is not in the revisions list
412 # Merge revision is not in the revisions list
413 pull_request.merge_rev = 'f000' * 10
413 pull_request.merge_rev = 'f000' * 10
414 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
414 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
415 assert commit_ids == pull_request.revisions + [pull_request.merge_rev]
415 assert commit_ids == pull_request.revisions + [pull_request.merge_rev]
416
416
417 def test_get_diff_from_pr_version(self, pull_request):
417 def test_get_diff_from_pr_version(self, pull_request):
418 source_repo = pull_request.source_repo
418 source_repo = pull_request.source_repo
419 source_ref_id = pull_request.source_ref_parts.commit_id
419 source_ref_id = pull_request.source_ref_parts.commit_id
420 target_ref_id = pull_request.target_ref_parts.commit_id
420 target_ref_id = pull_request.target_ref_parts.commit_id
421 diff = PullRequestModel()._get_diff_from_pr_or_version(
421 diff = PullRequestModel()._get_diff_from_pr_or_version(
422 source_repo, source_ref_id, target_ref_id,
422 source_repo, source_ref_id, target_ref_id,
423 hide_whitespace_changes=False, diff_context=6)
423 hide_whitespace_changes=False, diff_context=6)
424 assert b'file_1' in diff.raw.tobytes()
424 assert b'file_1' in diff.raw.tobytes()
425
425
426 def test_generate_title_returns_unicode(self):
426 def test_generate_title_returns_unicode(self):
427 title = PullRequestModel().generate_pullrequest_title(
427 title = PullRequestModel().generate_pullrequest_title(
428 source='source-dummy',
428 source='source-dummy',
429 source_ref='source-ref-dummy',
429 source_ref='source-ref-dummy',
430 target='target-dummy',
430 target='target-dummy',
431 )
431 )
432 assert type(title) == str
432 assert type(title) == str
433
433
434 @pytest.mark.parametrize('title, has_wip', [
434 @pytest.mark.parametrize('title, has_wip', [
435 ('hello', False),
435 ('hello', False),
436 ('hello wip', False),
436 ('hello wip', False),
437 ('hello wip: xxx', False),
437 ('hello wip: xxx', False),
438 ('[wip] hello', True),
438 ('[wip] hello', True),
439 ('[wip] hello', True),
439 ('[wip] hello', True),
440 ('wip: hello', True),
440 ('wip: hello', True),
441 ('wip hello', True),
441 ('wip hello', True),
442
442
443 ])
443 ])
444 def test_wip_title_marker(self, pull_request, title, has_wip):
444 def test_wip_title_marker(self, pull_request, title, has_wip):
445 pull_request.title = title
445 pull_request.title = title
446 assert pull_request.work_in_progress == has_wip
446 assert pull_request.work_in_progress == has_wip
447
447
448
448
449 @pytest.mark.usefixtures('config_stub')
449 @pytest.mark.usefixtures('config_stub')
450 class TestIntegrationMerge(object):
450 class TestIntegrationMerge(object):
451 @pytest.mark.parametrize('extra_config', (
451 @pytest.mark.parametrize('extra_config', (
452 {'vcs.hooks.protocol.v2': 'celery', 'vcs.hooks.direct_calls': False},
452 {'vcs.hooks.protocol.v2': 'celery', 'vcs.hooks.direct_calls': False},
453 ))
453 ))
454 def test_merge_triggers_push_hooks(
454 def test_merge_triggers_push_hooks(
455 self, pr_util, user_admin, capture_rcextensions, merge_extras,
455 self, pr_util, user_admin, capture_rcextensions, merge_extras,
456 extra_config):
456 extra_config):
457
457
458 pull_request = pr_util.create_pull_request(
458 pull_request = pr_util.create_pull_request(
459 approved=True, mergeable=True)
459 approved=True, mergeable=True)
460 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
460 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
461 merge_extras['repository'] = pull_request.target_repo.repo_name
461 merge_extras['repository'] = pull_request.target_repo.repo_name
462 Session().commit()
462 Session().commit()
463
463
464 with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False):
464 with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False):
465 merge_state = PullRequestModel().merge_repo(
465 merge_state = PullRequestModel().merge_repo(
466 pull_request, user_admin, extras=merge_extras)
466 pull_request, user_admin, extras=merge_extras)
467 Session().commit()
467 Session().commit()
468
468
469 assert merge_state.executed
469 assert merge_state.executed
470 assert '_pre_push_hook' in capture_rcextensions
470 assert '_pre_push_hook' in capture_rcextensions
471 assert '_push_hook' in capture_rcextensions
471 assert '_push_hook' in capture_rcextensions
472
472
473 def test_merge_can_be_rejected_by_pre_push_hook(
473 def test_merge_can_be_rejected_by_pre_push_hook(
474 self, pr_util, user_admin, capture_rcextensions, merge_extras):
474 self, pr_util, user_admin, capture_rcextensions, merge_extras):
475 pull_request = pr_util.create_pull_request(
475 pull_request = pr_util.create_pull_request(
476 approved=True, mergeable=True)
476 approved=True, mergeable=True)
477 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
477 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
478 merge_extras['repository'] = pull_request.target_repo.repo_name
478 merge_extras['repository'] = pull_request.target_repo.repo_name
479 Session().commit()
479 Session().commit()
480
480
481 with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull:
481 with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull:
482 pre_pull.side_effect = RepositoryError("Disallow push!")
482 pre_pull.side_effect = RepositoryError("Disallow push!")
483 merge_status = PullRequestModel().merge_repo(
483 merge_status = PullRequestModel().merge_repo(
484 pull_request, user_admin, extras=merge_extras)
484 pull_request, user_admin, extras=merge_extras)
485 Session().commit()
485 Session().commit()
486
486
487 assert not merge_status.executed
487 assert not merge_status.executed
488 assert 'pre_push' not in capture_rcextensions
488 assert 'pre_push' not in capture_rcextensions
489 assert 'post_push' not in capture_rcextensions
489 assert 'post_push' not in capture_rcextensions
490
490
491 def test_merge_fails_if_target_is_locked(
491 def test_merge_fails_if_target_is_locked(
492 self, pr_util, user_regular, merge_extras):
492 self, pr_util, user_regular, merge_extras):
493 pull_request = pr_util.create_pull_request(
493 pull_request = pr_util.create_pull_request(
494 approved=True, mergeable=True)
494 approved=True, mergeable=True)
495 locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web']
495 locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web']
496 pull_request.target_repo.locked = locked_by
496 pull_request.target_repo.locked = locked_by
497 # TODO: johbo: Check if this can work based on the database, currently
497 # TODO: johbo: Check if this can work based on the database, currently
498 # all data is pre-computed, that's why just updating the DB is not
498 # all data is pre-computed, that's why just updating the DB is not
499 # enough.
499 # enough.
500 merge_extras['locked_by'] = locked_by
500 merge_extras['locked_by'] = locked_by
501 merge_extras['repository'] = pull_request.target_repo.repo_name
501 merge_extras['repository'] = pull_request.target_repo.repo_name
502 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
502 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
503 Session().commit()
503 Session().commit()
504 merge_status = PullRequestModel().merge_repo(
504 merge_status = PullRequestModel().merge_repo(
505 pull_request, user_regular, extras=merge_extras)
505 pull_request, user_regular, extras=merge_extras)
506 Session().commit()
506 Session().commit()
507
507
508 assert not merge_status.executed
508 assert not merge_status.executed
509
509
510
510
511 @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [
511 @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [
512 (False, 1, 0),
512 (False, 1, 0),
513 (True, 0, 1),
513 (True, 0, 1),
514 ])
514 ])
515 def test_outdated_comments(
515 def test_outdated_comments(
516 pr_util, use_outdated, inlines_count, outdated_count, config_stub):
516 pr_util, use_outdated, inlines_count, outdated_count, config_stub):
517 pull_request = pr_util.create_pull_request()
517 pull_request = pr_util.create_pull_request()
518 pr_util.create_inline_comment(file_path='not_in_updated_diff')
518 pr_util.create_inline_comment(file_path='not_in_updated_diff')
519
519
520 with outdated_comments_patcher(use_outdated) as outdated_comment_mock:
520 with outdated_comments_patcher(use_outdated) as outdated_comment_mock:
521 pr_util.add_one_commit()
521 pr_util.add_one_commit()
522 assert_inline_comments(
522 assert_inline_comments(
523 pull_request, visible=inlines_count, outdated=outdated_count)
523 pull_request, visible=inlines_count, outdated=outdated_count)
524 outdated_comment_mock.assert_called_with(pull_request)
524 outdated_comment_mock.assert_called_with(pull_request)
525
525
526
526
527 @pytest.mark.parametrize('mr_type, expected_msg', [
527 @pytest.mark.parametrize('mr_type, expected_msg', [
528 (MergeFailureReason.NONE,
528 (MergeFailureReason.NONE,
529 'This pull request can be automatically merged.'),
529 'This pull request can be automatically merged.'),
530 (MergeFailureReason.UNKNOWN,
530 (MergeFailureReason.UNKNOWN,
531 'This pull request cannot be merged because of an unhandled exception. CRASH'),
531 'This pull request cannot be merged because of an unhandled exception. CRASH'),
532 (MergeFailureReason.MERGE_FAILED,
532 (MergeFailureReason.MERGE_FAILED,
533 'This pull request cannot be merged because of merge conflicts. CONFLICT_FILE'),
533 'This pull request cannot be merged because of merge conflicts. CONFLICT_FILE'),
534 (MergeFailureReason.PUSH_FAILED,
534 (MergeFailureReason.PUSH_FAILED,
535 'This pull request could not be merged because push to target:`some-repo@merge_commit` failed.'),
535 'This pull request could not be merged because push to target:`some-repo@merge_commit` failed.'),
536 (MergeFailureReason.TARGET_IS_NOT_HEAD,
536 (MergeFailureReason.TARGET_IS_NOT_HEAD,
537 'This pull request cannot be merged because the target `ref_name` is not a head.'),
537 'This pull request cannot be merged because the target `ref_name` is not a head.'),
538 (MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES,
538 (MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES,
539 'This pull request cannot be merged because the source contains more branches than the target.'),
539 'This pull request cannot be merged because the source contains more branches than the target.'),
540 (MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
540 (MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
541 'This pull request cannot be merged because the target `ref_name` has multiple heads: `a,b,c`.'),
541 'This pull request cannot be merged because the target `ref_name` has multiple heads: `a,b,c`.'),
542 (MergeFailureReason.TARGET_IS_LOCKED,
542 (MergeFailureReason.TARGET_IS_LOCKED,
543 'This pull request cannot be merged because the target repository is locked by user:123.'),
543 'This pull request cannot be merged because the target repository is locked by user:123.'),
544 (MergeFailureReason.MISSING_TARGET_REF,
544 (MergeFailureReason.MISSING_TARGET_REF,
545 'This pull request cannot be merged because the target reference `ref_name` is missing.'),
545 'This pull request cannot be merged because the target reference `ref_name` is missing.'),
546 (MergeFailureReason.MISSING_SOURCE_REF,
546 (MergeFailureReason.MISSING_SOURCE_REF,
547 'This pull request cannot be merged because the source reference `ref_name` is missing.'),
547 'This pull request cannot be merged because the source reference `ref_name` is missing.'),
548 (MergeFailureReason.SUBREPO_MERGE_FAILED,
548 (MergeFailureReason.SUBREPO_MERGE_FAILED,
549 'This pull request cannot be merged because of conflicts related to sub repositories.'),
549 'This pull request cannot be merged because of conflicts related to sub repositories.'),
550
550
551 ])
551 ])
552 def test_merge_response_message(mr_type, expected_msg):
552 def test_merge_response_message(mr_type, expected_msg):
553 merge_ref = Reference('type', 'ref_name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
553 merge_ref = Reference('type', 'ref_name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
554 metadata = {
554 metadata = {
555 'unresolved_files': 'CONFLICT_FILE',
555 'unresolved_files': 'CONFLICT_FILE',
556 'exception': "CRASH",
556 'exception': "CRASH",
557 'target': 'some-repo',
557 'target': 'some-repo',
558 'merge_commit': 'merge_commit',
558 'merge_commit': 'merge_commit',
559 'target_ref': merge_ref,
559 'target_ref': merge_ref,
560 'source_ref': merge_ref,
560 'source_ref': merge_ref,
561 'heads': ','.join(['a', 'b', 'c']),
561 'heads': ','.join(['a', 'b', 'c']),
562 'locked_by': 'user:123'
562 'locked_by': 'user:123'
563 }
563 }
564
564
565 merge_response = MergeResponse(True, True, merge_ref, mr_type, metadata=metadata)
565 merge_response = MergeResponse(True, True, merge_ref, mr_type, metadata=metadata)
566 assert merge_response.merge_status_message == expected_msg
566 assert merge_response.merge_status_message == expected_msg
567
567
568
568
569 @pytest.fixture()
569 @pytest.fixture()
570 def merge_extras(user_regular):
570 def merge_extras(request, user_regular):
571 """
571 """
572 Context for the vcs operation when running a merge.
572 Context for the vcs operation when running a merge.
573 """
573 """
574
574 extras = {
575 extras = {
575 'ip': '127.0.0.1',
576 'ip': '127.0.0.1',
576 'username': user_regular.username,
577 'username': user_regular.username,
577 'user_id': user_regular.user_id,
578 'user_id': user_regular.user_id,
578 'action': 'push',
579 'action': 'push',
579 'repository': 'fake_target_repo_name',
580 'repository': 'fake_target_repo_name',
580 'scm': 'git',
581 'scm': 'git',
581 'config': 'fake_config_ini_path',
582 'config': request.config.getini('pyramid_config'),
582 'repo_store': '',
583 'repo_store': '',
583 'make_lock': None,
584 'make_lock': None,
584 'locked_by': [None, None, None],
585 'locked_by': [None, None, None],
585 'server_url': 'http://test.example.com:5000',
586 'server_url': 'http://test.example.com:5000',
586 'hooks': ['push', 'pull'],
587 'hooks': ['push', 'pull'],
587 'is_shadow_repo': False,
588 'is_shadow_repo': False,
588 }
589 }
589 return extras
590 return extras
590
591
591
592
592 @pytest.mark.usefixtures('config_stub')
593 @pytest.mark.usefixtures('config_stub')
593 class TestUpdateCommentHandling(object):
594 class TestUpdateCommentHandling(object):
594
595
595 @pytest.fixture(autouse=True, scope='class')
596 @pytest.fixture(autouse=True, scope='class')
596 def enable_outdated_comments(self, request, baseapp):
597 def enable_outdated_comments(self, request, baseapp):
597 config_patch = mock.patch.dict(
598 config_patch = mock.patch.dict(
598 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True})
599 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True})
599 config_patch.start()
600 config_patch.start()
600
601
601 @request.addfinalizer
602 @request.addfinalizer
602 def cleanup():
603 def cleanup():
603 config_patch.stop()
604 config_patch.stop()
604
605
605 def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util):
606 def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util):
606 commits = [
607 commits = [
607 {'message': 'a'},
608 {'message': 'a'},
608 {'message': 'b', 'added': [FileNode(b'file_b', b'test_content\n')]},
609 {'message': 'b', 'added': [FileNode(b'file_b', b'test_content\n')]},
609 {'message': 'c', 'added': [FileNode(b'file_c', b'test_content\n')]},
610 {'message': 'c', 'added': [FileNode(b'file_c', b'test_content\n')]},
610 ]
611 ]
611 pull_request = pr_util.create_pull_request(
612 pull_request = pr_util.create_pull_request(
612 commits=commits, target_head='a', source_head='b', revisions=['b'])
613 commits=commits, target_head='a', source_head='b', revisions=['b'])
613 pr_util.create_inline_comment(file_path='file_b')
614 pr_util.create_inline_comment(file_path='file_b')
614 pr_util.add_one_commit(head='c')
615 pr_util.add_one_commit(head='c')
615
616
616 assert_inline_comments(pull_request, visible=1, outdated=0)
617 assert_inline_comments(pull_request, visible=1, outdated=0)
617
618
618 def test_comment_stays_unflagged_on_change_above(self, pr_util):
619 def test_comment_stays_unflagged_on_change_above(self, pr_util):
619 original_content = b''.join((b'line %d\n' % x for x in range(1, 11)))
620 original_content = b''.join((b'line %d\n' % x for x in range(1, 11)))
620 updated_content = b'new_line_at_top\n' + original_content
621 updated_content = b'new_line_at_top\n' + original_content
621 commits = [
622 commits = [
622 {'message': 'a'},
623 {'message': 'a'},
623 {'message': 'b', 'added': [FileNode(b'file_b', original_content)]},
624 {'message': 'b', 'added': [FileNode(b'file_b', original_content)]},
624 {'message': 'c', 'changed': [FileNode(b'file_b', updated_content)]},
625 {'message': 'c', 'changed': [FileNode(b'file_b', updated_content)]},
625 ]
626 ]
626 pull_request = pr_util.create_pull_request(
627 pull_request = pr_util.create_pull_request(
627 commits=commits, target_head='a', source_head='b', revisions=['b'])
628 commits=commits, target_head='a', source_head='b', revisions=['b'])
628
629
629 with outdated_comments_patcher():
630 with outdated_comments_patcher():
630 comment = pr_util.create_inline_comment(
631 comment = pr_util.create_inline_comment(
631 line_no=u'n8', file_path='file_b')
632 line_no=u'n8', file_path='file_b')
632 pr_util.add_one_commit(head='c')
633 pr_util.add_one_commit(head='c')
633
634
634 assert_inline_comments(pull_request, visible=1, outdated=0)
635 assert_inline_comments(pull_request, visible=1, outdated=0)
635 assert comment.line_no == u'n9'
636 assert comment.line_no == u'n9'
636
637
637 def test_comment_stays_unflagged_on_change_below(self, pr_util):
638 def test_comment_stays_unflagged_on_change_below(self, pr_util):
638 original_content = b''.join([b'line %d\n' % x for x in range(10)])
639 original_content = b''.join([b'line %d\n' % x for x in range(10)])
639 updated_content = original_content + b'new_line_at_end\n'
640 updated_content = original_content + b'new_line_at_end\n'
640 commits = [
641 commits = [
641 {'message': 'a'},
642 {'message': 'a'},
642 {'message': 'b', 'added': [FileNode(b'file_b', original_content)]},
643 {'message': 'b', 'added': [FileNode(b'file_b', original_content)]},
643 {'message': 'c', 'changed': [FileNode(b'file_b', updated_content)]},
644 {'message': 'c', 'changed': [FileNode(b'file_b', updated_content)]},
644 ]
645 ]
645 pull_request = pr_util.create_pull_request(
646 pull_request = pr_util.create_pull_request(
646 commits=commits, target_head='a', source_head='b', revisions=['b'])
647 commits=commits, target_head='a', source_head='b', revisions=['b'])
647 pr_util.create_inline_comment(file_path='file_b')
648 pr_util.create_inline_comment(file_path='file_b')
648 pr_util.add_one_commit(head='c')
649 pr_util.add_one_commit(head='c')
649
650
650 assert_inline_comments(pull_request, visible=1, outdated=0)
651 assert_inline_comments(pull_request, visible=1, outdated=0)
651
652
652 @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9'])
653 @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9'])
653 def test_comment_flagged_on_change_around_context(self, pr_util, line_no):
654 def test_comment_flagged_on_change_around_context(self, pr_util, line_no):
654 base_lines = [b'line %d\n' % x for x in range(1, 13)]
655 base_lines = [b'line %d\n' % x for x in range(1, 13)]
655 change_lines = list(base_lines)
656 change_lines = list(base_lines)
656 change_lines.insert(6, b'line 6a added\n')
657 change_lines.insert(6, b'line 6a added\n')
657
658
658 # Changes on the last line of sight
659 # Changes on the last line of sight
659 update_lines = list(change_lines)
660 update_lines = list(change_lines)
660 update_lines[0] = b'line 1 changed\n'
661 update_lines[0] = b'line 1 changed\n'
661 update_lines[-1] = b'line 12 changed\n'
662 update_lines[-1] = b'line 12 changed\n'
662
663
663 def file_b(lines):
664 def file_b(lines):
664 return FileNode(b'file_b', b''.join(lines))
665 return FileNode(b'file_b', b''.join(lines))
665
666
666 commits = [
667 commits = [
667 {'message': 'a', 'added': [file_b(base_lines)]},
668 {'message': 'a', 'added': [file_b(base_lines)]},
668 {'message': 'b', 'changed': [file_b(change_lines)]},
669 {'message': 'b', 'changed': [file_b(change_lines)]},
669 {'message': 'c', 'changed': [file_b(update_lines)]},
670 {'message': 'c', 'changed': [file_b(update_lines)]},
670 ]
671 ]
671
672
672 pull_request = pr_util.create_pull_request(
673 pull_request = pr_util.create_pull_request(
673 commits=commits, target_head='a', source_head='b', revisions=['b'])
674 commits=commits, target_head='a', source_head='b', revisions=['b'])
674 pr_util.create_inline_comment(line_no=line_no, file_path='file_b')
675 pr_util.create_inline_comment(line_no=line_no, file_path='file_b')
675
676
676 with outdated_comments_patcher():
677 with outdated_comments_patcher():
677 pr_util.add_one_commit(head='c')
678 pr_util.add_one_commit(head='c')
678 assert_inline_comments(pull_request, visible=0, outdated=1)
679 assert_inline_comments(pull_request, visible=0, outdated=1)
679
680
680 @pytest.mark.parametrize("change, content", [
681 @pytest.mark.parametrize("change, content", [
681 ('changed', b'changed\n'),
682 ('changed', b'changed\n'),
682 ('removed', b''),
683 ('removed', b''),
683 ], ids=['changed', b'removed'])
684 ], ids=['changed', b'removed'])
684 def test_comment_flagged_on_change(self, pr_util, change, content):
685 def test_comment_flagged_on_change(self, pr_util, change, content):
685 commits = [
686 commits = [
686 {'message': 'a'},
687 {'message': 'a'},
687 {'message': 'b', 'added': [FileNode(b'file_b', b'test_content\n')]},
688 {'message': 'b', 'added': [FileNode(b'file_b', b'test_content\n')]},
688 {'message': 'c', change: [FileNode(b'file_b', content)]},
689 {'message': 'c', change: [FileNode(b'file_b', content)]},
689 ]
690 ]
690 pull_request = pr_util.create_pull_request(
691 pull_request = pr_util.create_pull_request(
691 commits=commits, target_head='a', source_head='b', revisions=['b'])
692 commits=commits, target_head='a', source_head='b', revisions=['b'])
692 pr_util.create_inline_comment(file_path='file_b')
693 pr_util.create_inline_comment(file_path='file_b')
693
694
694 with outdated_comments_patcher():
695 with outdated_comments_patcher():
695 pr_util.add_one_commit(head='c')
696 pr_util.add_one_commit(head='c')
696 assert_inline_comments(pull_request, visible=0, outdated=1)
697 assert_inline_comments(pull_request, visible=0, outdated=1)
697
698
698
699
699 @pytest.mark.usefixtures('config_stub')
700 @pytest.mark.usefixtures('config_stub')
700 class TestUpdateChangedFiles(object):
701 class TestUpdateChangedFiles(object):
701
702
702 def test_no_changes_on_unchanged_diff(self, pr_util):
703 def test_no_changes_on_unchanged_diff(self, pr_util):
703 commits = [
704 commits = [
704 {'message': 'a'},
705 {'message': 'a'},
705 {'message': 'b',
706 {'message': 'b',
706 'added': [FileNode(b'file_b', b'test_content b\n')]},
707 'added': [FileNode(b'file_b', b'test_content b\n')]},
707 {'message': 'c',
708 {'message': 'c',
708 'added': [FileNode(b'file_c', b'test_content c\n')]},
709 'added': [FileNode(b'file_c', b'test_content c\n')]},
709 ]
710 ]
710 # open a PR from a to b, adding file_b
711 # open a PR from a to b, adding file_b
711 pull_request = pr_util.create_pull_request(
712 pull_request = pr_util.create_pull_request(
712 commits=commits, target_head='a', source_head='b', revisions=['b'],
713 commits=commits, target_head='a', source_head='b', revisions=['b'],
713 name_suffix='per-file-review')
714 name_suffix='per-file-review')
714
715
715 # modify PR adding new file file_c
716 # modify PR adding new file file_c
716 pr_util.add_one_commit(head='c')
717 pr_util.add_one_commit(head='c')
717
718
718 assert_pr_file_changes(
719 assert_pr_file_changes(
719 pull_request,
720 pull_request,
720 added=['file_c'],
721 added=['file_c'],
721 modified=[],
722 modified=[],
722 removed=[])
723 removed=[])
723
724
724 def test_modify_and_undo_modification_diff(self, pr_util):
725 def test_modify_and_undo_modification_diff(self, pr_util):
725 commits = [
726 commits = [
726 {'message': 'a'},
727 {'message': 'a'},
727 {'message': 'b',
728 {'message': 'b',
728 'added': [FileNode(b'file_b', b'test_content b\n')]},
729 'added': [FileNode(b'file_b', b'test_content b\n')]},
729 {'message': 'c',
730 {'message': 'c',
730 'changed': [FileNode(b'file_b', b'test_content b modified\n')]},
731 'changed': [FileNode(b'file_b', b'test_content b modified\n')]},
731 {'message': 'd',
732 {'message': 'd',
732 'changed': [FileNode(b'file_b', b'test_content b\n')]},
733 'changed': [FileNode(b'file_b', b'test_content b\n')]},
733 ]
734 ]
734 # open a PR from a to b, adding file_b
735 # open a PR from a to b, adding file_b
735 pull_request = pr_util.create_pull_request(
736 pull_request = pr_util.create_pull_request(
736 commits=commits, target_head='a', source_head='b', revisions=['b'],
737 commits=commits, target_head='a', source_head='b', revisions=['b'],
737 name_suffix='per-file-review')
738 name_suffix='per-file-review')
738
739
739 # modify PR modifying file file_b
740 # modify PR modifying file file_b
740 pr_util.add_one_commit(head='c')
741 pr_util.add_one_commit(head='c')
741
742
742 assert_pr_file_changes(
743 assert_pr_file_changes(
743 pull_request,
744 pull_request,
744 added=[],
745 added=[],
745 modified=['file_b'],
746 modified=['file_b'],
746 removed=[])
747 removed=[])
747
748
748 # move the head again to d, which rollbacks change,
749 # move the head again to d, which rollbacks change,
749 # meaning we should indicate no changes
750 # meaning we should indicate no changes
750 pr_util.add_one_commit(head='d')
751 pr_util.add_one_commit(head='d')
751
752
752 assert_pr_file_changes(
753 assert_pr_file_changes(
753 pull_request,
754 pull_request,
754 added=[],
755 added=[],
755 modified=[],
756 modified=[],
756 removed=[])
757 removed=[])
757
758
758 def test_updated_all_files_in_pr(self, pr_util):
759 def test_updated_all_files_in_pr(self, pr_util):
759 commits = [
760 commits = [
760 {'message': 'a'},
761 {'message': 'a'},
761 {'message': 'b', 'added': [
762 {'message': 'b', 'added': [
762 FileNode(b'file_a', b'test_content a\n'),
763 FileNode(b'file_a', b'test_content a\n'),
763 FileNode(b'file_b', b'test_content b\n'),
764 FileNode(b'file_b', b'test_content b\n'),
764 FileNode(b'file_c', b'test_content c\n')]},
765 FileNode(b'file_c', b'test_content c\n')]},
765 {'message': 'c', 'changed': [
766 {'message': 'c', 'changed': [
766 FileNode(b'file_a', b'test_content a changed\n'),
767 FileNode(b'file_a', b'test_content a changed\n'),
767 FileNode(b'file_b', b'test_content b changed\n'),
768 FileNode(b'file_b', b'test_content b changed\n'),
768 FileNode(b'file_c', b'test_content c changed\n')]},
769 FileNode(b'file_c', b'test_content c changed\n')]},
769 ]
770 ]
770 # open a PR from a to b, changing 3 files
771 # open a PR from a to b, changing 3 files
771 pull_request = pr_util.create_pull_request(
772 pull_request = pr_util.create_pull_request(
772 commits=commits, target_head='a', source_head='b', revisions=['b'],
773 commits=commits, target_head='a', source_head='b', revisions=['b'],
773 name_suffix='per-file-review')
774 name_suffix='per-file-review')
774
775
775 pr_util.add_one_commit(head='c')
776 pr_util.add_one_commit(head='c')
776
777
777 assert_pr_file_changes(
778 assert_pr_file_changes(
778 pull_request,
779 pull_request,
779 added=[],
780 added=[],
780 modified=['file_a', 'file_b', 'file_c'],
781 modified=['file_a', 'file_b', 'file_c'],
781 removed=[])
782 removed=[])
782
783
783 def test_updated_and_removed_all_files_in_pr(self, pr_util):
784 def test_updated_and_removed_all_files_in_pr(self, pr_util):
784 commits = [
785 commits = [
785 {'message': 'a'},
786 {'message': 'a'},
786 {'message': 'b', 'added': [
787 {'message': 'b', 'added': [
787 FileNode(b'file_a', b'test_content a\n'),
788 FileNode(b'file_a', b'test_content a\n'),
788 FileNode(b'file_b', b'test_content b\n'),
789 FileNode(b'file_b', b'test_content b\n'),
789 FileNode(b'file_c', b'test_content c\n')]},
790 FileNode(b'file_c', b'test_content c\n')]},
790 {'message': 'c', 'removed': [
791 {'message': 'c', 'removed': [
791 FileNode(b'file_a', b'test_content a changed\n'),
792 FileNode(b'file_a', b'test_content a changed\n'),
792 FileNode(b'file_b', b'test_content b changed\n'),
793 FileNode(b'file_b', b'test_content b changed\n'),
793 FileNode(b'file_c', b'test_content c changed\n')]},
794 FileNode(b'file_c', b'test_content c changed\n')]},
794 ]
795 ]
795 # open a PR from a to b, removing 3 files
796 # open a PR from a to b, removing 3 files
796 pull_request = pr_util.create_pull_request(
797 pull_request = pr_util.create_pull_request(
797 commits=commits, target_head='a', source_head='b', revisions=['b'],
798 commits=commits, target_head='a', source_head='b', revisions=['b'],
798 name_suffix='per-file-review')
799 name_suffix='per-file-review')
799
800
800 pr_util.add_one_commit(head='c')
801 pr_util.add_one_commit(head='c')
801
802
802 assert_pr_file_changes(
803 assert_pr_file_changes(
803 pull_request,
804 pull_request,
804 added=[],
805 added=[],
805 modified=[],
806 modified=[],
806 removed=['file_a', 'file_b', 'file_c'])
807 removed=['file_a', 'file_b', 'file_c'])
807
808
808
809
809 def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub):
810 def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub):
810 model = PullRequestModel()
811 model = PullRequestModel()
811 pull_request = pr_util.create_pull_request()
812 pull_request = pr_util.create_pull_request()
812 pr_util.update_source_repository()
813 pr_util.update_source_repository()
813
814
814 model.update_commits(pull_request, pull_request.author)
815 model.update_commits(pull_request, pull_request.author)
815
816
816 # Expect that it has a version entry now
817 # Expect that it has a version entry now
817 assert len(model.get_versions(pull_request)) == 1
818 assert len(model.get_versions(pull_request)) == 1
818
819
819
820
820 def test_update_skips_new_version_if_unchanged(pr_util, config_stub):
821 def test_update_skips_new_version_if_unchanged(pr_util, config_stub):
821 pull_request = pr_util.create_pull_request()
822 pull_request = pr_util.create_pull_request()
822 model = PullRequestModel()
823 model = PullRequestModel()
823 model.update_commits(pull_request, pull_request.author)
824 model.update_commits(pull_request, pull_request.author)
824
825
825 # Expect that it still has no versions
826 # Expect that it still has no versions
826 assert len(model.get_versions(pull_request)) == 0
827 assert len(model.get_versions(pull_request)) == 0
827
828
828
829
829 def test_update_assigns_comments_to_the_new_version(pr_util, config_stub):
830 def test_update_assigns_comments_to_the_new_version(pr_util, config_stub):
830 model = PullRequestModel()
831 model = PullRequestModel()
831 pull_request = pr_util.create_pull_request()
832 pull_request = pr_util.create_pull_request()
832 comment = pr_util.create_comment()
833 comment = pr_util.create_comment()
833 pr_util.update_source_repository()
834 pr_util.update_source_repository()
834
835
835 model.update_commits(pull_request, pull_request.author)
836 model.update_commits(pull_request, pull_request.author)
836
837
837 # Expect that the comment is linked to the pr version now
838 # Expect that the comment is linked to the pr version now
838 assert comment.pull_request_version == model.get_versions(pull_request)[0]
839 assert comment.pull_request_version == model.get_versions(pull_request)[0]
839
840
840
841
841 def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util, config_stub):
842 def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util, config_stub):
842 model = PullRequestModel()
843 model = PullRequestModel()
843 pull_request = pr_util.create_pull_request()
844 pull_request = pr_util.create_pull_request()
844 pr_util.update_source_repository()
845 pr_util.update_source_repository()
845 pr_util.update_source_repository()
846 pr_util.update_source_repository()
846
847
847 update_response = model.update_commits(pull_request, pull_request.author)
848 update_response = model.update_commits(pull_request, pull_request.author)
848
849
849 commit_id = update_response.common_ancestor_id
850 commit_id = update_response.common_ancestor_id
850 # Expect to find a new comment about the change
851 # Expect to find a new comment about the change
851 expected_message = textwrap.dedent(
852 expected_message = textwrap.dedent(
852 """\
853 """\
853 Pull request updated. Auto status change to |under_review|
854 Pull request updated. Auto status change to |under_review|
854
855
855 .. role:: added
856 .. role:: added
856 .. role:: removed
857 .. role:: removed
857 .. parsed-literal::
858 .. parsed-literal::
858
859
859 Changed commits:
860 Changed commits:
860 * :added:`1 added`
861 * :added:`1 added`
861 * :removed:`0 removed`
862 * :removed:`0 removed`
862
863
863 Changed files:
864 Changed files:
864 * `A file_2 <#a_c-{}-92ed3b5f07b4>`_
865 * `A file_2 <#a_c-{}-92ed3b5f07b4>`_
865
866
866 .. |under_review| replace:: *"Under Review"*"""
867 .. |under_review| replace:: *"Under Review"*"""
867 ).format(commit_id[:12])
868 ).format(commit_id[:12])
868 pull_request_comments = sorted(
869 pull_request_comments = sorted(
869 pull_request.comments, key=lambda c: c.modified_at)
870 pull_request.comments, key=lambda c: c.modified_at)
870 update_comment = pull_request_comments[-1]
871 update_comment = pull_request_comments[-1]
871 assert update_comment.text == expected_message
872 assert update_comment.text == expected_message
872
873
873
874
874 def test_create_version_from_snapshot_updates_attributes(pr_util, config_stub):
875 def test_create_version_from_snapshot_updates_attributes(pr_util, config_stub):
875 pull_request = pr_util.create_pull_request()
876 pull_request = pr_util.create_pull_request()
876
877
877 # Avoiding default values
878 # Avoiding default values
878 pull_request.status = PullRequest.STATUS_CLOSED
879 pull_request.status = PullRequest.STATUS_CLOSED
879 pull_request._last_merge_source_rev = "0" * 40
880 pull_request._last_merge_source_rev = "0" * 40
880 pull_request._last_merge_target_rev = "1" * 40
881 pull_request._last_merge_target_rev = "1" * 40
881 pull_request.last_merge_status = 1
882 pull_request.last_merge_status = 1
882 pull_request.merge_rev = "2" * 40
883 pull_request.merge_rev = "2" * 40
883
884
884 # Remember automatic values
885 # Remember automatic values
885 created_on = pull_request.created_on
886 created_on = pull_request.created_on
886 updated_on = pull_request.updated_on
887 updated_on = pull_request.updated_on
887
888
888 # Create a new version of the pull request
889 # Create a new version of the pull request
889 version = PullRequestModel()._create_version_from_snapshot(pull_request)
890 version = PullRequestModel()._create_version_from_snapshot(pull_request)
890
891
891 # Check attributes
892 # Check attributes
892 assert version.title == pr_util.create_parameters['title']
893 assert version.title == pr_util.create_parameters['title']
893 assert version.description == pr_util.create_parameters['description']
894 assert version.description == pr_util.create_parameters['description']
894 assert version.status == PullRequest.STATUS_CLOSED
895 assert version.status == PullRequest.STATUS_CLOSED
895
896
896 # versions get updated created_on
897 # versions get updated created_on
897 assert version.created_on != created_on
898 assert version.created_on != created_on
898
899
899 assert version.updated_on == updated_on
900 assert version.updated_on == updated_on
900 assert version.user_id == pull_request.user_id
901 assert version.user_id == pull_request.user_id
901 assert version.revisions == pr_util.create_parameters['revisions']
902 assert version.revisions == pr_util.create_parameters['revisions']
902 assert version.source_repo == pr_util.source_repository
903 assert version.source_repo == pr_util.source_repository
903 assert version.source_ref == pr_util.create_parameters['source_ref']
904 assert version.source_ref == pr_util.create_parameters['source_ref']
904 assert version.target_repo == pr_util.target_repository
905 assert version.target_repo == pr_util.target_repository
905 assert version.target_ref == pr_util.create_parameters['target_ref']
906 assert version.target_ref == pr_util.create_parameters['target_ref']
906 assert version._last_merge_source_rev == pull_request._last_merge_source_rev
907 assert version._last_merge_source_rev == pull_request._last_merge_source_rev
907 assert version._last_merge_target_rev == pull_request._last_merge_target_rev
908 assert version._last_merge_target_rev == pull_request._last_merge_target_rev
908 assert version.last_merge_status == pull_request.last_merge_status
909 assert version.last_merge_status == pull_request.last_merge_status
909 assert version.merge_rev == pull_request.merge_rev
910 assert version.merge_rev == pull_request.merge_rev
910 assert version.pull_request == pull_request
911 assert version.pull_request == pull_request
911
912
912
913
913 def test_link_comments_to_version_only_updates_unlinked_comments(pr_util, config_stub):
914 def test_link_comments_to_version_only_updates_unlinked_comments(pr_util, config_stub):
914 version1 = pr_util.create_version_of_pull_request()
915 version1 = pr_util.create_version_of_pull_request()
915 comment_linked = pr_util.create_comment(linked_to=version1)
916 comment_linked = pr_util.create_comment(linked_to=version1)
916 comment_unlinked = pr_util.create_comment()
917 comment_unlinked = pr_util.create_comment()
917 version2 = pr_util.create_version_of_pull_request()
918 version2 = pr_util.create_version_of_pull_request()
918
919
919 PullRequestModel()._link_comments_to_version(version2)
920 PullRequestModel()._link_comments_to_version(version2)
920 Session().commit()
921 Session().commit()
921
922
922 # Expect that only the new comment is linked to version2
923 # Expect that only the new comment is linked to version2
923 assert (
924 assert (
924 comment_unlinked.pull_request_version_id ==
925 comment_unlinked.pull_request_version_id ==
925 version2.pull_request_version_id)
926 version2.pull_request_version_id)
926 assert (
927 assert (
927 comment_linked.pull_request_version_id ==
928 comment_linked.pull_request_version_id ==
928 version1.pull_request_version_id)
929 version1.pull_request_version_id)
929 assert (
930 assert (
930 comment_unlinked.pull_request_version_id !=
931 comment_unlinked.pull_request_version_id !=
931 comment_linked.pull_request_version_id)
932 comment_linked.pull_request_version_id)
932
933
933
934
934 def test_calculate_commits():
935 def test_calculate_commits():
935 old_ids = [1, 2, 3]
936 old_ids = [1, 2, 3]
936 new_ids = [1, 3, 4, 5]
937 new_ids = [1, 3, 4, 5]
937 change = PullRequestModel()._calculate_commit_id_changes(old_ids, new_ids)
938 change = PullRequestModel()._calculate_commit_id_changes(old_ids, new_ids)
938 assert change.added == [4, 5]
939 assert change.added == [4, 5]
939 assert change.common == [1, 3]
940 assert change.common == [1, 3]
940 assert change.removed == [2]
941 assert change.removed == [2]
941 assert change.total == [1, 3, 4, 5]
942 assert change.total == [1, 3, 4, 5]
942
943
943
944
944 def assert_inline_comments(pull_request, visible=None, outdated=None):
945 def assert_inline_comments(pull_request, visible=None, outdated=None):
945 if visible is not None:
946 if visible is not None:
946 inline_comments = CommentsModel().get_inline_comments(
947 inline_comments = CommentsModel().get_inline_comments(
947 pull_request.target_repo.repo_id, pull_request=pull_request)
948 pull_request.target_repo.repo_id, pull_request=pull_request)
948 inline_cnt = len(CommentsModel().get_inline_comments_as_list(
949 inline_cnt = len(CommentsModel().get_inline_comments_as_list(
949 inline_comments))
950 inline_comments))
950 assert inline_cnt == visible
951 assert inline_cnt == visible
951 if outdated is not None:
952 if outdated is not None:
952 outdated_comments = CommentsModel().get_outdated_comments(
953 outdated_comments = CommentsModel().get_outdated_comments(
953 pull_request.target_repo.repo_id, pull_request)
954 pull_request.target_repo.repo_id, pull_request)
954 assert len(outdated_comments) == outdated
955 assert len(outdated_comments) == outdated
955
956
956
957
957 def assert_pr_file_changes(
958 def assert_pr_file_changes(
958 pull_request, added=None, modified=None, removed=None):
959 pull_request, added=None, modified=None, removed=None):
959 pr_versions = PullRequestModel().get_versions(pull_request)
960 pr_versions = PullRequestModel().get_versions(pull_request)
960 # always use first version, ie original PR to calculate changes
961 # always use first version, ie original PR to calculate changes
961 pull_request_version = pr_versions[0]
962 pull_request_version = pr_versions[0]
962 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(
963 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(
963 pull_request, pull_request_version)
964 pull_request, pull_request_version)
964 file_changes = PullRequestModel()._calculate_file_changes(
965 file_changes = PullRequestModel()._calculate_file_changes(
965 old_diff_data, new_diff_data)
966 old_diff_data, new_diff_data)
966
967
967 assert added == file_changes.added, \
968 assert added == file_changes.added, \
968 'expected added:%s vs value:%s' % (added, file_changes.added)
969 'expected added:%s vs value:%s' % (added, file_changes.added)
969 assert modified == file_changes.modified, \
970 assert modified == file_changes.modified, \
970 'expected modified:%s vs value:%s' % (modified, file_changes.modified)
971 'expected modified:%s vs value:%s' % (modified, file_changes.modified)
971 assert removed == file_changes.removed, \
972 assert removed == file_changes.removed, \
972 'expected removed:%s vs value:%s' % (removed, file_changes.removed)
973 'expected removed:%s vs value:%s' % (removed, file_changes.removed)
973
974
974
975
975 def outdated_comments_patcher(use_outdated=True):
976 def outdated_comments_patcher(use_outdated=True):
976 return mock.patch.object(
977 return mock.patch.object(
977 CommentsModel, 'use_outdated_comments',
978 CommentsModel, 'use_outdated_comments',
978 return_value=use_outdated)
979 return_value=use_outdated)
General Comments 0
You need to be logged in to leave comments. Login now