##// END OF EJS Templates
tests: fixed some pytest deprecated calls, and warnings.
dan -
r3098:97c1a8b7 default
parent child Browse files
Show More
@@ -1,203 +1,203 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import mock
21 import mock
22 import pytest
22 import pytest
23
23
24 from rhodecode.model.repo import RepoModel
24 from rhodecode.model.repo import RepoModel
25 from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN
25 from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN
26 from rhodecode.api.tests.utils import (
26 from rhodecode.api.tests.utils import (
27 build_data, api_call, assert_error, assert_ok, crash, jsonify)
27 build_data, api_call, assert_error, assert_ok, crash, jsonify)
28 from rhodecode.tests.fixture import Fixture
28 from rhodecode.tests.fixture import Fixture
29 from rhodecode.tests.plugin import http_host_stub, http_host_only_stub
29 from rhodecode.tests.plugin import plain_http_host_only_stub
30
30
31 fixture = Fixture()
31 fixture = Fixture()
32
32
33 UPDATE_REPO_NAME = 'api_update_me'
33 UPDATE_REPO_NAME = 'api_update_me'
34
34
35
35
36 class SAME_AS_UPDATES(object):
36 class SAME_AS_UPDATES(object):
37 """ Constant used for tests below """
37 """ Constant used for tests below """
38
38
39
39
40 @pytest.mark.usefixtures("testuser_api", "app")
40 @pytest.mark.usefixtures("testuser_api", "app")
41 class TestApiUpdateRepo(object):
41 class TestApiUpdateRepo(object):
42
42
43 @pytest.mark.parametrize("updates, expected", [
43 @pytest.mark.parametrize("updates, expected", [
44 ({'owner': TEST_USER_REGULAR_LOGIN},
44 ({'owner': TEST_USER_REGULAR_LOGIN},
45 SAME_AS_UPDATES),
45 SAME_AS_UPDATES),
46
46
47 ({'description': 'new description'},
47 ({'description': 'new description'},
48 SAME_AS_UPDATES),
48 SAME_AS_UPDATES),
49
49
50 ({'clone_uri': 'http://foo.com/repo'},
50 ({'clone_uri': 'http://foo.com/repo'},
51 SAME_AS_UPDATES),
51 SAME_AS_UPDATES),
52
52
53 ({'clone_uri': None},
53 ({'clone_uri': None},
54 {'clone_uri': ''}),
54 {'clone_uri': ''}),
55
55
56 ({'clone_uri': ''},
56 ({'clone_uri': ''},
57 {'clone_uri': ''}),
57 {'clone_uri': ''}),
58
58
59 ({'clone_uri': 'http://example.com/repo_pull'},
59 ({'clone_uri': 'http://example.com/repo_pull'},
60 {'clone_uri': 'http://example.com/repo_pull'}),
60 {'clone_uri': 'http://example.com/repo_pull'}),
61
61
62 ({'push_uri': ''},
62 ({'push_uri': ''},
63 {'push_uri': ''}),
63 {'push_uri': ''}),
64
64
65 ({'push_uri': 'http://example.com/repo_push'},
65 ({'push_uri': 'http://example.com/repo_push'},
66 {'push_uri': 'http://example.com/repo_push'}),
66 {'push_uri': 'http://example.com/repo_push'}),
67
67
68 ({'landing_rev': 'rev:tip'},
68 ({'landing_rev': 'rev:tip'},
69 {'landing_rev': ['rev', 'tip']}),
69 {'landing_rev': ['rev', 'tip']}),
70
70
71 ({'enable_statistics': True},
71 ({'enable_statistics': True},
72 SAME_AS_UPDATES),
72 SAME_AS_UPDATES),
73
73
74 ({'enable_locking': True},
74 ({'enable_locking': True},
75 SAME_AS_UPDATES),
75 SAME_AS_UPDATES),
76
76
77 ({'enable_downloads': True},
77 ({'enable_downloads': True},
78 SAME_AS_UPDATES),
78 SAME_AS_UPDATES),
79
79
80 ({'repo_name': 'new_repo_name'},
80 ({'repo_name': 'new_repo_name'},
81 {
81 {
82 'repo_name': 'new_repo_name',
82 'repo_name': 'new_repo_name',
83 'url': 'http://{}/new_repo_name'.format(http_host_only_stub())
83 'url': 'http://{}/new_repo_name'.format(plain_http_host_only_stub())
84 }),
84 }),
85
85
86 ({'repo_name': 'test_group_for_update/{}'.format(UPDATE_REPO_NAME),
86 ({'repo_name': 'test_group_for_update/{}'.format(UPDATE_REPO_NAME),
87 '_group': 'test_group_for_update'},
87 '_group': 'test_group_for_update'},
88 {
88 {
89 'repo_name': 'test_group_for_update/{}'.format(UPDATE_REPO_NAME),
89 'repo_name': 'test_group_for_update/{}'.format(UPDATE_REPO_NAME),
90 'url': 'http://{}/test_group_for_update/{}'.format(
90 'url': 'http://{}/test_group_for_update/{}'.format(
91 http_host_only_stub(), UPDATE_REPO_NAME)
91 plain_http_host_only_stub(), UPDATE_REPO_NAME)
92 }),
92 }),
93 ])
93 ])
94 def test_api_update_repo(self, updates, expected, backend):
94 def test_api_update_repo(self, updates, expected, backend):
95 repo_name = UPDATE_REPO_NAME
95 repo_name = UPDATE_REPO_NAME
96 repo = fixture.create_repo(repo_name, repo_type=backend.alias)
96 repo = fixture.create_repo(repo_name, repo_type=backend.alias)
97 if updates.get('_group'):
97 if updates.get('_group'):
98 fixture.create_repo_group(updates['_group'])
98 fixture.create_repo_group(updates['_group'])
99
99
100 expected_api_data = repo.get_api_data(include_secrets=True)
100 expected_api_data = repo.get_api_data(include_secrets=True)
101 if expected is SAME_AS_UPDATES:
101 if expected is SAME_AS_UPDATES:
102 expected_api_data.update(updates)
102 expected_api_data.update(updates)
103 else:
103 else:
104 expected_api_data.update(expected)
104 expected_api_data.update(expected)
105
105
106 id_, params = build_data(
106 id_, params = build_data(
107 self.apikey, 'update_repo', repoid=repo_name, **updates)
107 self.apikey, 'update_repo', repoid=repo_name, **updates)
108
108
109 with mock.patch('rhodecode.model.validation_schema.validators.url_validator'):
109 with mock.patch('rhodecode.model.validation_schema.validators.url_validator'):
110 response = api_call(self.app, params)
110 response = api_call(self.app, params)
111
111
112 if updates.get('repo_name'):
112 if updates.get('repo_name'):
113 repo_name = updates['repo_name']
113 repo_name = updates['repo_name']
114
114
115 try:
115 try:
116 expected = {
116 expected = {
117 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo_name),
117 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo_name),
118 'repository': jsonify(expected_api_data)
118 'repository': jsonify(expected_api_data)
119 }
119 }
120 assert_ok(id_, expected, given=response.body)
120 assert_ok(id_, expected, given=response.body)
121 finally:
121 finally:
122 fixture.destroy_repo(repo_name)
122 fixture.destroy_repo(repo_name)
123 if updates.get('_group'):
123 if updates.get('_group'):
124 fixture.destroy_repo_group(updates['_group'])
124 fixture.destroy_repo_group(updates['_group'])
125
125
126 def test_api_update_repo_fork_of_field(self, backend):
126 def test_api_update_repo_fork_of_field(self, backend):
127 master_repo = backend.create_repo()
127 master_repo = backend.create_repo()
128 repo = backend.create_repo()
128 repo = backend.create_repo()
129 updates = {
129 updates = {
130 'fork_of': master_repo.repo_name,
130 'fork_of': master_repo.repo_name,
131 'fork_of_id': master_repo.repo_id
131 'fork_of_id': master_repo.repo_id
132 }
132 }
133 expected_api_data = repo.get_api_data(include_secrets=True)
133 expected_api_data = repo.get_api_data(include_secrets=True)
134 expected_api_data.update(updates)
134 expected_api_data.update(updates)
135
135
136 id_, params = build_data(
136 id_, params = build_data(
137 self.apikey, 'update_repo', repoid=repo.repo_name, **updates)
137 self.apikey, 'update_repo', repoid=repo.repo_name, **updates)
138 response = api_call(self.app, params)
138 response = api_call(self.app, params)
139 expected = {
139 expected = {
140 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo.repo_name),
140 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo.repo_name),
141 'repository': jsonify(expected_api_data)
141 'repository': jsonify(expected_api_data)
142 }
142 }
143 assert_ok(id_, expected, given=response.body)
143 assert_ok(id_, expected, given=response.body)
144 result = response.json['result']['repository']
144 result = response.json['result']['repository']
145 assert result['fork_of'] == master_repo.repo_name
145 assert result['fork_of'] == master_repo.repo_name
146 assert result['fork_of_id'] == master_repo.repo_id
146 assert result['fork_of_id'] == master_repo.repo_id
147
147
148 def test_api_update_repo_fork_of_not_found(self, backend):
148 def test_api_update_repo_fork_of_not_found(self, backend):
149 master_repo_name = 'fake-parent-repo'
149 master_repo_name = 'fake-parent-repo'
150 repo = backend.create_repo()
150 repo = backend.create_repo()
151 updates = {
151 updates = {
152 'fork_of': master_repo_name
152 'fork_of': master_repo_name
153 }
153 }
154 id_, params = build_data(
154 id_, params = build_data(
155 self.apikey, 'update_repo', repoid=repo.repo_name, **updates)
155 self.apikey, 'update_repo', repoid=repo.repo_name, **updates)
156 response = api_call(self.app, params)
156 response = api_call(self.app, params)
157 expected = {
157 expected = {
158 'repo_fork_of': 'Fork with id `{}` does not exists'.format(
158 'repo_fork_of': 'Fork with id `{}` does not exists'.format(
159 master_repo_name)}
159 master_repo_name)}
160 assert_error(id_, expected, given=response.body)
160 assert_error(id_, expected, given=response.body)
161
161
162 def test_api_update_repo_with_repo_group_not_existing(self):
162 def test_api_update_repo_with_repo_group_not_existing(self):
163 repo_name = 'admin_owned'
163 repo_name = 'admin_owned'
164 fake_repo_group = 'test_group_for_update'
164 fake_repo_group = 'test_group_for_update'
165 fixture.create_repo(repo_name)
165 fixture.create_repo(repo_name)
166 updates = {'repo_name': '{}/{}'.format(fake_repo_group, repo_name)}
166 updates = {'repo_name': '{}/{}'.format(fake_repo_group, repo_name)}
167 id_, params = build_data(
167 id_, params = build_data(
168 self.apikey, 'update_repo', repoid=repo_name, **updates)
168 self.apikey, 'update_repo', repoid=repo_name, **updates)
169 response = api_call(self.app, params)
169 response = api_call(self.app, params)
170 try:
170 try:
171 expected = {
171 expected = {
172 'repo_group': 'Repository group `{}` does not exist'.format(fake_repo_group)
172 'repo_group': 'Repository group `{}` does not exist'.format(fake_repo_group)
173 }
173 }
174 assert_error(id_, expected, given=response.body)
174 assert_error(id_, expected, given=response.body)
175 finally:
175 finally:
176 fixture.destroy_repo(repo_name)
176 fixture.destroy_repo(repo_name)
177
177
178 def test_api_update_repo_regular_user_not_allowed(self):
178 def test_api_update_repo_regular_user_not_allowed(self):
179 repo_name = 'admin_owned'
179 repo_name = 'admin_owned'
180 fixture.create_repo(repo_name)
180 fixture.create_repo(repo_name)
181 updates = {'active': False}
181 updates = {'active': False}
182 id_, params = build_data(
182 id_, params = build_data(
183 self.apikey_regular, 'update_repo', repoid=repo_name, **updates)
183 self.apikey_regular, 'update_repo', repoid=repo_name, **updates)
184 response = api_call(self.app, params)
184 response = api_call(self.app, params)
185 try:
185 try:
186 expected = 'repository `%s` does not exist' % (repo_name,)
186 expected = 'repository `%s` does not exist' % (repo_name,)
187 assert_error(id_, expected, given=response.body)
187 assert_error(id_, expected, given=response.body)
188 finally:
188 finally:
189 fixture.destroy_repo(repo_name)
189 fixture.destroy_repo(repo_name)
190
190
191 @mock.patch.object(RepoModel, 'update', crash)
191 @mock.patch.object(RepoModel, 'update', crash)
192 def test_api_update_repo_exception_occurred(self, backend):
192 def test_api_update_repo_exception_occurred(self, backend):
193 repo_name = UPDATE_REPO_NAME
193 repo_name = UPDATE_REPO_NAME
194 fixture.create_repo(repo_name, repo_type=backend.alias)
194 fixture.create_repo(repo_name, repo_type=backend.alias)
195 id_, params = build_data(
195 id_, params = build_data(
196 self.apikey, 'update_repo', repoid=repo_name,
196 self.apikey, 'update_repo', repoid=repo_name,
197 owner=TEST_USER_ADMIN_LOGIN,)
197 owner=TEST_USER_ADMIN_LOGIN,)
198 response = api_call(self.app, params)
198 response = api_call(self.app, params)
199 try:
199 try:
200 expected = 'failed to update repo `%s`' % (repo_name,)
200 expected = 'failed to update repo `%s`' % (repo_name,)
201 assert_error(id_, expected, given=response.body)
201 assert_error(id_, expected, given=response.body)
202 finally:
202 finally:
203 fixture.destroy_repo(repo_name)
203 fixture.destroy_repo(repo_name)
@@ -1,62 +1,70 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2018 RhodeCode GmbH
3 # Copyright (C) 2016-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22 import pytest
22 import pytest
23 from pyramid.compat import configparser
23 from pyramid.compat import configparser
24
24
25 from rhodecode.apps.ssh_support.lib.ssh_wrapper import SshWrapper
25 from rhodecode.apps.ssh_support.lib.ssh_wrapper import SshWrapper
26 from rhodecode.lib.utils2 import AttributeDict
26 from rhodecode.lib.utils2 import AttributeDict
27
27
28
28
29 @pytest.fixture
29 @pytest.fixture
30 def dummy_conf_file(tmpdir):
30 def dummy_conf_file(tmpdir):
31 conf = configparser.ConfigParser()
31 conf = configparser.ConfigParser()
32 conf.add_section('app:main')
32 conf.add_section('app:main')
33 conf.set('app:main', 'ssh.executable.hg', '/usr/bin/hg')
33 conf.set('app:main', 'ssh.executable.hg', '/usr/bin/hg')
34 conf.set('app:main', 'ssh.executable.git', '/usr/bin/git')
34 conf.set('app:main', 'ssh.executable.git', '/usr/bin/git')
35 conf.set('app:main', 'ssh.executable.svn', '/usr/bin/svnserve')
35 conf.set('app:main', 'ssh.executable.svn', '/usr/bin/svnserve')
36
36
37 f_path = os.path.join(str(tmpdir), 'ssh_wrapper_test.ini')
37 f_path = os.path.join(str(tmpdir), 'ssh_wrapper_test.ini')
38 with open(f_path, 'wb') as f:
38 with open(f_path, 'wb') as f:
39 conf.write(f)
39 conf.write(f)
40
40
41 return os.path.join(f_path)
41 return os.path.join(f_path)
42
42
43
43
44 @pytest.fixture
44 def plain_dummy_env():
45 def dummy_env():
46 return {
45 return {
47 'request':
46 'request':
48 AttributeDict(host_url='http://localhost', script_name='/')
47 AttributeDict(host_url='http://localhost', script_name='/')
49 }
48 }
50
49
51
50
52 @pytest.fixture
51 @pytest.fixture
52 def dummy_env():
53 return plain_dummy_env()
54
55
56 def plain_dummy_user():
57 return AttributeDict(username='test_user')
58
59
60 @pytest.fixture
53 def dummy_user():
61 def dummy_user():
54 return AttributeDict(username='test_user')
62 return plain_dummy_user()
55
63
56
64
57 @pytest.fixture
65 @pytest.fixture
58 def ssh_wrapper(app, dummy_conf_file, dummy_env):
66 def ssh_wrapper(app, dummy_conf_file, dummy_env):
59 conn_info = '127.0.0.1 22 10.0.0.1 443'
67 conn_info = '127.0.0.1 22 10.0.0.1 443'
60 return SshWrapper(
68 return SshWrapper(
61 'random command', conn_info, 'auto', 'admin', '1', key_id='1',
69 'random command', conn_info, 'auto', 'admin', '1', key_id='1',
62 shell=False, ini_path=dummy_conf_file, env=dummy_env)
70 shell=False, ini_path=dummy_conf_file, env=dummy_env)
@@ -1,152 +1,152 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2018 RhodeCode GmbH
3 # Copyright (C) 2016-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import json
21 import json
22 import mock
22 import mock
23 import pytest
23 import pytest
24
24
25 from rhodecode.apps.ssh_support.lib.backends.git import GitServer
25 from rhodecode.apps.ssh_support.lib.backends.git import GitServer
26 from rhodecode.apps.ssh_support.tests.conftest import dummy_env, dummy_user
26 from rhodecode.apps.ssh_support.tests.conftest import plain_dummy_env, plain_dummy_user
27
27
28
28
29 class GitServerCreator(object):
29 class GitServerCreator(object):
30 root = '/tmp/repo/path/'
30 root = '/tmp/repo/path/'
31 git_path = '/usr/local/bin/git'
31 git_path = '/usr/local/bin/git'
32 config_data = {
32 config_data = {
33 'app:main': {
33 'app:main': {
34 'ssh.executable.git': git_path,
34 'ssh.executable.git': git_path,
35 'vcs.hooks.protocol': 'http',
35 'vcs.hooks.protocol': 'http',
36 }
36 }
37 }
37 }
38 repo_name = 'test_git'
38 repo_name = 'test_git'
39 repo_mode = 'receive-pack'
39 repo_mode = 'receive-pack'
40 user = dummy_user()
40 user = plain_dummy_user()
41
41
42 def __init__(self):
42 def __init__(self):
43 def config_get(part, key):
43 def config_get(part, key):
44 return self.config_data.get(part, {}).get(key)
44 return self.config_data.get(part, {}).get(key)
45 self.config_mock = mock.Mock()
45 self.config_mock = mock.Mock()
46 self.config_mock.get = mock.Mock(side_effect=config_get)
46 self.config_mock.get = mock.Mock(side_effect=config_get)
47
47
48 def create(self, **kwargs):
48 def create(self, **kwargs):
49 parameters = {
49 parameters = {
50 'store': self.root,
50 'store': self.root,
51 'ini_path': '',
51 'ini_path': '',
52 'user': self.user,
52 'user': self.user,
53 'repo_name': self.repo_name,
53 'repo_name': self.repo_name,
54 'repo_mode': self.repo_mode,
54 'repo_mode': self.repo_mode,
55 'user_permissions': {
55 'user_permissions': {
56 self.repo_name: 'repository.admin'
56 self.repo_name: 'repository.admin'
57 },
57 },
58 'config': self.config_mock,
58 'config': self.config_mock,
59 'env': dummy_env()
59 'env': plain_dummy_env()
60 }
60 }
61 parameters.update(kwargs)
61 parameters.update(kwargs)
62 server = GitServer(**parameters)
62 server = GitServer(**parameters)
63 return server
63 return server
64
64
65
65
66 @pytest.fixture
66 @pytest.fixture
67 def git_server(app):
67 def git_server(app):
68 return GitServerCreator()
68 return GitServerCreator()
69
69
70
70
71 class TestGitServer(object):
71 class TestGitServer(object):
72
72
73 def test_command(self, git_server):
73 def test_command(self, git_server):
74 server = git_server.create()
74 server = git_server.create()
75 expected_command = (
75 expected_command = (
76 'cd {root}; {git_path} {repo_mode} \'{root}{repo_name}\''.format(
76 'cd {root}; {git_path} {repo_mode} \'{root}{repo_name}\''.format(
77 root=git_server.root, git_path=git_server.git_path,
77 root=git_server.root, git_path=git_server.git_path,
78 repo_mode=git_server.repo_mode, repo_name=git_server.repo_name)
78 repo_mode=git_server.repo_mode, repo_name=git_server.repo_name)
79 )
79 )
80 assert expected_command == server.tunnel.command()
80 assert expected_command == server.tunnel.command()
81
81
82 @pytest.mark.parametrize('permissions, action, code', [
82 @pytest.mark.parametrize('permissions, action, code', [
83 ({}, 'pull', -2),
83 ({}, 'pull', -2),
84 ({'test_git': 'repository.read'}, 'pull', 0),
84 ({'test_git': 'repository.read'}, 'pull', 0),
85 ({'test_git': 'repository.read'}, 'push', -2),
85 ({'test_git': 'repository.read'}, 'push', -2),
86 ({'test_git': 'repository.write'}, 'push', 0),
86 ({'test_git': 'repository.write'}, 'push', 0),
87 ({'test_git': 'repository.admin'}, 'push', 0),
87 ({'test_git': 'repository.admin'}, 'push', 0),
88
88
89 ])
89 ])
90 def test_permission_checks(self, git_server, permissions, action, code):
90 def test_permission_checks(self, git_server, permissions, action, code):
91 server = git_server.create(user_permissions=permissions)
91 server = git_server.create(user_permissions=permissions)
92 result = server._check_permissions(action)
92 result = server._check_permissions(action)
93 assert result is code
93 assert result is code
94
94
95 @pytest.mark.parametrize('permissions, value', [
95 @pytest.mark.parametrize('permissions, value', [
96 ({}, False),
96 ({}, False),
97 ({'test_git': 'repository.read'}, False),
97 ({'test_git': 'repository.read'}, False),
98 ({'test_git': 'repository.write'}, True),
98 ({'test_git': 'repository.write'}, True),
99 ({'test_git': 'repository.admin'}, True),
99 ({'test_git': 'repository.admin'}, True),
100
100
101 ])
101 ])
102 def test_has_write_permissions(self, git_server, permissions, value):
102 def test_has_write_permissions(self, git_server, permissions, value):
103 server = git_server.create(user_permissions=permissions)
103 server = git_server.create(user_permissions=permissions)
104 result = server.has_write_perm()
104 result = server.has_write_perm()
105 assert result is value
105 assert result is value
106
106
107 def test_run_returns_executes_command(self, git_server):
107 def test_run_returns_executes_command(self, git_server):
108 server = git_server.create()
108 server = git_server.create()
109 from rhodecode.apps.ssh_support.lib.backends.git import GitTunnelWrapper
109 from rhodecode.apps.ssh_support.lib.backends.git import GitTunnelWrapper
110 with mock.patch.object(GitTunnelWrapper, 'create_hooks_env') as _patch:
110 with mock.patch.object(GitTunnelWrapper, 'create_hooks_env') as _patch:
111 _patch.return_value = 0
111 _patch.return_value = 0
112 with mock.patch.object(GitTunnelWrapper, 'command', return_value='date'):
112 with mock.patch.object(GitTunnelWrapper, 'command', return_value='date'):
113 exit_code = server.run()
113 exit_code = server.run()
114
114
115 assert exit_code == (0, False)
115 assert exit_code == (0, False)
116
116
117 @pytest.mark.parametrize(
117 @pytest.mark.parametrize(
118 'repo_mode, action', [
118 'repo_mode, action', [
119 ['receive-pack', 'push'],
119 ['receive-pack', 'push'],
120 ['upload-pack', 'pull']
120 ['upload-pack', 'pull']
121 ])
121 ])
122 def test_update_environment(self, git_server, repo_mode, action):
122 def test_update_environment(self, git_server, repo_mode, action):
123 server = git_server.create(repo_mode=repo_mode)
123 server = git_server.create(repo_mode=repo_mode)
124 store = server.store
124 store = server.store
125
125
126 with mock.patch('os.environ', {'SSH_CLIENT': '10.10.10.10 b'}):
126 with mock.patch('os.environ', {'SSH_CLIENT': '10.10.10.10 b'}):
127 with mock.patch('os.putenv') as putenv_mock:
127 with mock.patch('os.putenv') as putenv_mock:
128 server.update_environment(action)
128 server.update_environment(action)
129
129
130 expected_data = {
130 expected_data = {
131 'username': git_server.user.username,
131 'username': git_server.user.username,
132 'user_id': git_server.user.user_id,
132 'user_id': git_server.user.user_id,
133 'scm': 'git',
133 'scm': 'git',
134 'repository': git_server.repo_name,
134 'repository': git_server.repo_name,
135 'make_lock': None,
135 'make_lock': None,
136 'action': action,
136 'action': action,
137 'ip': '10.10.10.10',
137 'ip': '10.10.10.10',
138 'locked_by': [None, None],
138 'locked_by': [None, None],
139 'config': '',
139 'config': '',
140 'repo_store': store,
140 'repo_store': store,
141 'server_url': None,
141 'server_url': None,
142 'hooks': ['push', 'pull'],
142 'hooks': ['push', 'pull'],
143 'is_shadow_repo': False,
143 'is_shadow_repo': False,
144 'hooks_module': 'rhodecode.lib.hooks_daemon',
144 'hooks_module': 'rhodecode.lib.hooks_daemon',
145 'check_branch_perms': False,
145 'check_branch_perms': False,
146 'detect_force_push': False,
146 'detect_force_push': False,
147 'user_agent': u'ssh-user-agent',
147 'user_agent': u'ssh-user-agent',
148 'SSH': True,
148 'SSH': True,
149 'SSH_PERMISSIONS': 'repository.admin',
149 'SSH_PERMISSIONS': 'repository.admin',
150 }
150 }
151 args, kwargs = putenv_mock.call_args
151 args, kwargs = putenv_mock.call_args
152 assert json.loads(args[1]) == expected_data
152 assert json.loads(args[1]) == expected_data
@@ -1,116 +1,116 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2018 RhodeCode GmbH
3 # Copyright (C) 2016-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import mock
21 import mock
22 import pytest
22 import pytest
23
23
24 from rhodecode.apps.ssh_support.lib.backends.hg import MercurialServer
24 from rhodecode.apps.ssh_support.lib.backends.hg import MercurialServer
25 from rhodecode.apps.ssh_support.tests.conftest import dummy_env, dummy_user
25 from rhodecode.apps.ssh_support.tests.conftest import plain_dummy_env, plain_dummy_user
26
26
27
27
28 class MercurialServerCreator(object):
28 class MercurialServerCreator(object):
29 root = '/tmp/repo/path/'
29 root = '/tmp/repo/path/'
30 hg_path = '/usr/local/bin/hg'
30 hg_path = '/usr/local/bin/hg'
31
31
32 config_data = {
32 config_data = {
33 'app:main': {
33 'app:main': {
34 'ssh.executable.hg': hg_path,
34 'ssh.executable.hg': hg_path,
35 'vcs.hooks.protocol': 'http',
35 'vcs.hooks.protocol': 'http',
36 }
36 }
37 }
37 }
38 repo_name = 'test_hg'
38 repo_name = 'test_hg'
39 user = dummy_user()
39 user = plain_dummy_user()
40
40
41 def __init__(self):
41 def __init__(self):
42 def config_get(part, key):
42 def config_get(part, key):
43 return self.config_data.get(part, {}).get(key)
43 return self.config_data.get(part, {}).get(key)
44 self.config_mock = mock.Mock()
44 self.config_mock = mock.Mock()
45 self.config_mock.get = mock.Mock(side_effect=config_get)
45 self.config_mock.get = mock.Mock(side_effect=config_get)
46
46
47 def create(self, **kwargs):
47 def create(self, **kwargs):
48 parameters = {
48 parameters = {
49 'store': self.root,
49 'store': self.root,
50 'ini_path': '',
50 'ini_path': '',
51 'user': self.user,
51 'user': self.user,
52 'repo_name': self.repo_name,
52 'repo_name': self.repo_name,
53 'user_permissions': {
53 'user_permissions': {
54 'test_hg': 'repository.admin'
54 'test_hg': 'repository.admin'
55 },
55 },
56 'config': self.config_mock,
56 'config': self.config_mock,
57 'env': dummy_env()
57 'env': plain_dummy_env()
58 }
58 }
59 parameters.update(kwargs)
59 parameters.update(kwargs)
60 server = MercurialServer(**parameters)
60 server = MercurialServer(**parameters)
61 return server
61 return server
62
62
63
63
64 @pytest.fixture
64 @pytest.fixture
65 def hg_server(app):
65 def hg_server(app):
66 return MercurialServerCreator()
66 return MercurialServerCreator()
67
67
68
68
69 class TestMercurialServer(object):
69 class TestMercurialServer(object):
70
70
71 def test_command(self, hg_server):
71 def test_command(self, hg_server):
72 server = hg_server.create()
72 server = hg_server.create()
73 expected_command = (
73 expected_command = (
74 'cd {root}; {hg_path} -R {root}{repo_name} serve --stdio'.format(
74 'cd {root}; {hg_path} -R {root}{repo_name} serve --stdio'.format(
75 root=hg_server.root, hg_path=hg_server.hg_path,
75 root=hg_server.root, hg_path=hg_server.hg_path,
76 repo_name=hg_server.repo_name)
76 repo_name=hg_server.repo_name)
77 )
77 )
78 assert expected_command == server.tunnel.command()
78 assert expected_command == server.tunnel.command()
79
79
80 @pytest.mark.parametrize('permissions, action, code', [
80 @pytest.mark.parametrize('permissions, action, code', [
81 ({}, 'pull', -2),
81 ({}, 'pull', -2),
82 ({'test_hg': 'repository.read'}, 'pull', 0),
82 ({'test_hg': 'repository.read'}, 'pull', 0),
83 ({'test_hg': 'repository.read'}, 'push', -2),
83 ({'test_hg': 'repository.read'}, 'push', -2),
84 ({'test_hg': 'repository.write'}, 'push', 0),
84 ({'test_hg': 'repository.write'}, 'push', 0),
85 ({'test_hg': 'repository.admin'}, 'push', 0),
85 ({'test_hg': 'repository.admin'}, 'push', 0),
86
86
87 ])
87 ])
88 def test_permission_checks(self, hg_server, permissions, action, code):
88 def test_permission_checks(self, hg_server, permissions, action, code):
89 server = hg_server.create(user_permissions=permissions)
89 server = hg_server.create(user_permissions=permissions)
90 result = server._check_permissions(action)
90 result = server._check_permissions(action)
91 assert result is code
91 assert result is code
92
92
93 @pytest.mark.parametrize('permissions, value', [
93 @pytest.mark.parametrize('permissions, value', [
94 ({}, False),
94 ({}, False),
95 ({'test_hg': 'repository.read'}, False),
95 ({'test_hg': 'repository.read'}, False),
96 ({'test_hg': 'repository.write'}, True),
96 ({'test_hg': 'repository.write'}, True),
97 ({'test_hg': 'repository.admin'}, True),
97 ({'test_hg': 'repository.admin'}, True),
98
98
99 ])
99 ])
100 def test_has_write_permissions(self, hg_server, permissions, value):
100 def test_has_write_permissions(self, hg_server, permissions, value):
101 server = hg_server.create(user_permissions=permissions)
101 server = hg_server.create(user_permissions=permissions)
102 result = server.has_write_perm()
102 result = server.has_write_perm()
103 assert result is value
103 assert result is value
104
104
105 def test_run_returns_executes_command(self, hg_server):
105 def test_run_returns_executes_command(self, hg_server):
106 server = hg_server.create()
106 server = hg_server.create()
107 from rhodecode.apps.ssh_support.lib.backends.hg import MercurialTunnelWrapper
107 from rhodecode.apps.ssh_support.lib.backends.hg import MercurialTunnelWrapper
108 with mock.patch.object(MercurialTunnelWrapper, 'create_hooks_env') as _patch:
108 with mock.patch.object(MercurialTunnelWrapper, 'create_hooks_env') as _patch:
109 _patch.return_value = 0
109 _patch.return_value = 0
110 with mock.patch.object(MercurialTunnelWrapper, 'command', return_value='date'):
110 with mock.patch.object(MercurialTunnelWrapper, 'command', return_value='date'):
111 exit_code = server.run()
111 exit_code = server.run()
112
112
113 assert exit_code == (0, False)
113 assert exit_code == (0, False)
114
114
115
115
116
116
@@ -1,124 +1,124 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2018 RhodeCode GmbH
3 # Copyright (C) 2016-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import mock
21 import mock
22 import pytest
22 import pytest
23
23
24 from rhodecode.apps.ssh_support.lib.backends.svn import SubversionServer
24 from rhodecode.apps.ssh_support.lib.backends.svn import SubversionServer
25 from rhodecode.apps.ssh_support.tests.conftest import dummy_env, dummy_user
25 from rhodecode.apps.ssh_support.tests.conftest import plain_dummy_env, plain_dummy_user
26
26
27
27
28 class SubversionServerCreator(object):
28 class SubversionServerCreator(object):
29 root = '/tmp/repo/path/'
29 root = '/tmp/repo/path/'
30 svn_path = '/usr/local/bin/svnserve'
30 svn_path = '/usr/local/bin/svnserve'
31 config_data = {
31 config_data = {
32 'app:main': {
32 'app:main': {
33 'ssh.executable.svn': svn_path,
33 'ssh.executable.svn': svn_path,
34 'vcs.hooks.protocol': 'http',
34 'vcs.hooks.protocol': 'http',
35 }
35 }
36 }
36 }
37 repo_name = 'test-svn'
37 repo_name = 'test-svn'
38 user = dummy_user()
38 user = plain_dummy_user()
39
39
40 def __init__(self):
40 def __init__(self):
41 def config_get(part, key):
41 def config_get(part, key):
42 return self.config_data.get(part, {}).get(key)
42 return self.config_data.get(part, {}).get(key)
43 self.config_mock = mock.Mock()
43 self.config_mock = mock.Mock()
44 self.config_mock.get = mock.Mock(side_effect=config_get)
44 self.config_mock.get = mock.Mock(side_effect=config_get)
45
45
46 def create(self, **kwargs):
46 def create(self, **kwargs):
47 parameters = {
47 parameters = {
48 'store': self.root,
48 'store': self.root,
49 'repo_name': self.repo_name,
49 'repo_name': self.repo_name,
50 'ini_path': '',
50 'ini_path': '',
51 'user': self.user,
51 'user': self.user,
52 'user_permissions': {
52 'user_permissions': {
53 self.repo_name: 'repository.admin'
53 self.repo_name: 'repository.admin'
54 },
54 },
55 'config': self.config_mock,
55 'config': self.config_mock,
56 'env': dummy_env()
56 'env': plain_dummy_env()
57 }
57 }
58
58
59 parameters.update(kwargs)
59 parameters.update(kwargs)
60 server = SubversionServer(**parameters)
60 server = SubversionServer(**parameters)
61 return server
61 return server
62
62
63
63
64 @pytest.fixture
64 @pytest.fixture
65 def svn_server(app):
65 def svn_server(app):
66 return SubversionServerCreator()
66 return SubversionServerCreator()
67
67
68
68
69 class TestSubversionServer(object):
69 class TestSubversionServer(object):
70 def test_command(self, svn_server):
70 def test_command(self, svn_server):
71 server = svn_server.create()
71 server = svn_server.create()
72 expected_command = [
72 expected_command = [
73 svn_server.svn_path, '-t', '--config-file',
73 svn_server.svn_path, '-t', '--config-file',
74 server.tunnel.svn_conf_path, '-r', svn_server.root
74 server.tunnel.svn_conf_path, '-r', svn_server.root
75 ]
75 ]
76
76
77 assert expected_command == server.tunnel.command()
77 assert expected_command == server.tunnel.command()
78
78
79 @pytest.mark.parametrize('permissions, action, code', [
79 @pytest.mark.parametrize('permissions, action, code', [
80 ({}, 'pull', -2),
80 ({}, 'pull', -2),
81 ({'test-svn': 'repository.read'}, 'pull', 0),
81 ({'test-svn': 'repository.read'}, 'pull', 0),
82 ({'test-svn': 'repository.read'}, 'push', -2),
82 ({'test-svn': 'repository.read'}, 'push', -2),
83 ({'test-svn': 'repository.write'}, 'push', 0),
83 ({'test-svn': 'repository.write'}, 'push', 0),
84 ({'test-svn': 'repository.admin'}, 'push', 0),
84 ({'test-svn': 'repository.admin'}, 'push', 0),
85
85
86 ])
86 ])
87 def test_permission_checks(self, svn_server, permissions, action, code):
87 def test_permission_checks(self, svn_server, permissions, action, code):
88 server = svn_server.create(user_permissions=permissions)
88 server = svn_server.create(user_permissions=permissions)
89 result = server._check_permissions(action)
89 result = server._check_permissions(action)
90 assert result is code
90 assert result is code
91
91
92 def test_run_returns_executes_command(self, svn_server):
92 def test_run_returns_executes_command(self, svn_server):
93 server = svn_server.create()
93 server = svn_server.create()
94 from rhodecode.apps.ssh_support.lib.backends.svn import SubversionTunnelWrapper
94 from rhodecode.apps.ssh_support.lib.backends.svn import SubversionTunnelWrapper
95 with mock.patch.object(
95 with mock.patch.object(
96 SubversionTunnelWrapper, 'get_first_client_response',
96 SubversionTunnelWrapper, 'get_first_client_response',
97 return_value={'url': 'http://server/test-svn'}):
97 return_value={'url': 'http://server/test-svn'}):
98 with mock.patch.object(
98 with mock.patch.object(
99 SubversionTunnelWrapper, 'patch_first_client_response',
99 SubversionTunnelWrapper, 'patch_first_client_response',
100 return_value=0):
100 return_value=0):
101 with mock.patch.object(
101 with mock.patch.object(
102 SubversionTunnelWrapper, 'sync',
102 SubversionTunnelWrapper, 'sync',
103 return_value=0):
103 return_value=0):
104 with mock.patch.object(
104 with mock.patch.object(
105 SubversionTunnelWrapper, 'command',
105 SubversionTunnelWrapper, 'command',
106 return_value=['date']):
106 return_value=['date']):
107
107
108 exit_code = server.run()
108 exit_code = server.run()
109 # SVN has this differently configured, and we get in our mock env
109 # SVN has this differently configured, and we get in our mock env
110 # None as return code
110 # None as return code
111 assert exit_code == (None, False)
111 assert exit_code == (None, False)
112
112
113 def test_run_returns_executes_command_that_cannot_extract_repo_name(self, svn_server):
113 def test_run_returns_executes_command_that_cannot_extract_repo_name(self, svn_server):
114 server = svn_server.create()
114 server = svn_server.create()
115 from rhodecode.apps.ssh_support.lib.backends.svn import SubversionTunnelWrapper
115 from rhodecode.apps.ssh_support.lib.backends.svn import SubversionTunnelWrapper
116 with mock.patch.object(
116 with mock.patch.object(
117 SubversionTunnelWrapper, 'command',
117 SubversionTunnelWrapper, 'command',
118 return_value=['date']):
118 return_value=['date']):
119 with mock.patch.object(
119 with mock.patch.object(
120 SubversionTunnelWrapper, 'get_first_client_response',
120 SubversionTunnelWrapper, 'get_first_client_response',
121 return_value=None):
121 return_value=None):
122 exit_code = server.run()
122 exit_code = server.run()
123
123
124 assert exit_code == (1, False)
124 assert exit_code == (1, False)
@@ -1,245 +1,245 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22 import time
22 import time
23 import logging
23 import logging
24 import datetime
24 import datetime
25 import hashlib
25 import hashlib
26 import tempfile
26 import tempfile
27 from os.path import join as jn
27 from os.path import join as jn
28
28
29 from tempfile import _RandomNameSequence
29 from tempfile import _RandomNameSequence
30
30
31 import pytest
31 import pytest
32
32
33 from rhodecode.model.db import User
33 from rhodecode.model.db import User
34 from rhodecode.lib import auth
34 from rhodecode.lib import auth
35 from rhodecode.lib import helpers as h
35 from rhodecode.lib import helpers as h
36 from rhodecode.lib.helpers import flash, link_to
36 from rhodecode.lib.helpers import flash, link_to
37 from rhodecode.lib.utils2 import safe_str
37 from rhodecode.lib.utils2 import safe_str
38
38
39
39
40 log = logging.getLogger(__name__)
40 log = logging.getLogger(__name__)
41
41
42 __all__ = [
42 __all__ = [
43 'get_new_dir', 'TestController',
43 'get_new_dir', 'TestController',
44 'link_to', 'clear_cache_regions',
44 'link_to', 'clear_cache_regions',
45 'assert_session_flash', 'login_user', 'no_newline_id_generator',
45 'assert_session_flash', 'login_user', 'no_newline_id_generator',
46 'TESTS_TMP_PATH', 'HG_REPO', 'GIT_REPO', 'SVN_REPO',
46 'TESTS_TMP_PATH', 'HG_REPO', 'GIT_REPO', 'SVN_REPO',
47 'NEW_HG_REPO', 'NEW_GIT_REPO',
47 'NEW_HG_REPO', 'NEW_GIT_REPO',
48 'HG_FORK', 'GIT_FORK', 'TEST_USER_ADMIN_LOGIN', 'TEST_USER_ADMIN_PASS',
48 'HG_FORK', 'GIT_FORK', 'TEST_USER_ADMIN_LOGIN', 'TEST_USER_ADMIN_PASS',
49 'TEST_USER_REGULAR_LOGIN', 'TEST_USER_REGULAR_PASS',
49 'TEST_USER_REGULAR_LOGIN', 'TEST_USER_REGULAR_PASS',
50 'TEST_USER_REGULAR_EMAIL', 'TEST_USER_REGULAR2_LOGIN',
50 'TEST_USER_REGULAR_EMAIL', 'TEST_USER_REGULAR2_LOGIN',
51 'TEST_USER_REGULAR2_PASS', 'TEST_USER_REGULAR2_EMAIL', 'TEST_HG_REPO',
51 'TEST_USER_REGULAR2_PASS', 'TEST_USER_REGULAR2_EMAIL', 'TEST_HG_REPO',
52 'TEST_HG_REPO_CLONE', 'TEST_HG_REPO_PULL', 'TEST_GIT_REPO',
52 'TEST_HG_REPO_CLONE', 'TEST_HG_REPO_PULL', 'TEST_GIT_REPO',
53 'TEST_GIT_REPO_CLONE', 'TEST_GIT_REPO_PULL', 'SCM_TESTS',
53 'TEST_GIT_REPO_CLONE', 'TEST_GIT_REPO_PULL', 'SCM_TESTS',
54 ]
54 ]
55
55
56
56
57 # SOME GLOBALS FOR TESTS
57 # SOME GLOBALS FOR TESTS
58 TEST_DIR = tempfile.gettempdir()
58 TEST_DIR = tempfile.gettempdir()
59
59
60 TESTS_TMP_PATH = jn(TEST_DIR, 'rc_test_%s' % _RandomNameSequence().next())
60 TESTS_TMP_PATH = jn(TEST_DIR, 'rc_test_%s' % _RandomNameSequence().next())
61 TEST_USER_ADMIN_LOGIN = 'test_admin'
61 TEST_USER_ADMIN_LOGIN = 'test_admin'
62 TEST_USER_ADMIN_PASS = 'test12'
62 TEST_USER_ADMIN_PASS = 'test12'
63 TEST_USER_ADMIN_EMAIL = 'test_admin@mail.com'
63 TEST_USER_ADMIN_EMAIL = 'test_admin@mail.com'
64
64
65 TEST_USER_REGULAR_LOGIN = 'test_regular'
65 TEST_USER_REGULAR_LOGIN = 'test_regular'
66 TEST_USER_REGULAR_PASS = 'test12'
66 TEST_USER_REGULAR_PASS = 'test12'
67 TEST_USER_REGULAR_EMAIL = 'test_regular@mail.com'
67 TEST_USER_REGULAR_EMAIL = 'test_regular@mail.com'
68
68
69 TEST_USER_REGULAR2_LOGIN = 'test_regular2'
69 TEST_USER_REGULAR2_LOGIN = 'test_regular2'
70 TEST_USER_REGULAR2_PASS = 'test12'
70 TEST_USER_REGULAR2_PASS = 'test12'
71 TEST_USER_REGULAR2_EMAIL = 'test_regular2@mail.com'
71 TEST_USER_REGULAR2_EMAIL = 'test_regular2@mail.com'
72
72
73 HG_REPO = 'vcs_test_hg'
73 HG_REPO = 'vcs_test_hg'
74 GIT_REPO = 'vcs_test_git'
74 GIT_REPO = 'vcs_test_git'
75 SVN_REPO = 'vcs_test_svn'
75 SVN_REPO = 'vcs_test_svn'
76
76
77 NEW_HG_REPO = 'vcs_test_hg_new'
77 NEW_HG_REPO = 'vcs_test_hg_new'
78 NEW_GIT_REPO = 'vcs_test_git_new'
78 NEW_GIT_REPO = 'vcs_test_git_new'
79
79
80 HG_FORK = 'vcs_test_hg_fork'
80 HG_FORK = 'vcs_test_hg_fork'
81 GIT_FORK = 'vcs_test_git_fork'
81 GIT_FORK = 'vcs_test_git_fork'
82
82
83 ## VCS
83 ## VCS
84 SCM_TESTS = ['hg', 'git']
84 SCM_TESTS = ['hg', 'git']
85 uniq_suffix = str(int(time.mktime(datetime.datetime.now().timetuple())))
85 uniq_suffix = str(int(time.mktime(datetime.datetime.now().timetuple())))
86
86
87 TEST_GIT_REPO = jn(TESTS_TMP_PATH, GIT_REPO)
87 TEST_GIT_REPO = jn(TESTS_TMP_PATH, GIT_REPO)
88 TEST_GIT_REPO_CLONE = jn(TESTS_TMP_PATH, 'vcsgitclone%s' % uniq_suffix)
88 TEST_GIT_REPO_CLONE = jn(TESTS_TMP_PATH, 'vcsgitclone%s' % uniq_suffix)
89 TEST_GIT_REPO_PULL = jn(TESTS_TMP_PATH, 'vcsgitpull%s' % uniq_suffix)
89 TEST_GIT_REPO_PULL = jn(TESTS_TMP_PATH, 'vcsgitpull%s' % uniq_suffix)
90
90
91 TEST_HG_REPO = jn(TESTS_TMP_PATH, HG_REPO)
91 TEST_HG_REPO = jn(TESTS_TMP_PATH, HG_REPO)
92 TEST_HG_REPO_CLONE = jn(TESTS_TMP_PATH, 'vcshgclone%s' % uniq_suffix)
92 TEST_HG_REPO_CLONE = jn(TESTS_TMP_PATH, 'vcshgclone%s' % uniq_suffix)
93 TEST_HG_REPO_PULL = jn(TESTS_TMP_PATH, 'vcshgpull%s' % uniq_suffix)
93 TEST_HG_REPO_PULL = jn(TESTS_TMP_PATH, 'vcshgpull%s' % uniq_suffix)
94
94
95 TEST_REPO_PREFIX = 'vcs-test'
95 TEST_REPO_PREFIX = 'vcs-test'
96
96
97
97
98 def clear_cache_regions(regions=None):
98 def clear_cache_regions(regions=None):
99 # dogpile
99 # dogpile
100 from rhodecode.lib.rc_cache import region_meta
100 from rhodecode.lib.rc_cache import region_meta
101 for region_name, region in region_meta.dogpile_cache_regions.items():
101 for region_name, region in region_meta.dogpile_cache_regions.items():
102 if not regions or region_name in regions:
102 if not regions or region_name in regions:
103 region.invalidate()
103 region.invalidate()
104
104
105
105
106 def get_new_dir(title):
106 def get_new_dir(title):
107 """
107 """
108 Returns always new directory path.
108 Returns always new directory path.
109 """
109 """
110 from rhodecode.tests.vcs.utils import get_normalized_path
110 from rhodecode.tests.vcs.utils import get_normalized_path
111 name_parts = [TEST_REPO_PREFIX]
111 name_parts = [TEST_REPO_PREFIX]
112 if title:
112 if title:
113 name_parts.append(title)
113 name_parts.append(title)
114 hex_str = hashlib.sha1('%s %s' % (os.getpid(), time.time())).hexdigest()
114 hex_str = hashlib.sha1('%s %s' % (os.getpid(), time.time())).hexdigest()
115 name_parts.append(hex_str)
115 name_parts.append(hex_str)
116 name = '-'.join(name_parts)
116 name = '-'.join(name_parts)
117 path = os.path.join(TEST_DIR, name)
117 path = os.path.join(TEST_DIR, name)
118 return get_normalized_path(path)
118 return get_normalized_path(path)
119
119
120
120
121 def repo_id_generator(name):
121 def repo_id_generator(name):
122 numeric_hash = 0
122 numeric_hash = 0
123 for char in name:
123 for char in name:
124 numeric_hash += (ord(char))
124 numeric_hash += (ord(char))
125 return numeric_hash
125 return numeric_hash
126
126
127
127
128 @pytest.mark.usefixtures('app', 'index_location')
128 @pytest.mark.usefixtures('app', 'index_location')
129 class TestController(object):
129 class TestController(object):
130
130
131 maxDiff = None
131 maxDiff = None
132
132
133 def log_user(self, username=TEST_USER_ADMIN_LOGIN,
133 def log_user(self, username=TEST_USER_ADMIN_LOGIN,
134 password=TEST_USER_ADMIN_PASS):
134 password=TEST_USER_ADMIN_PASS):
135 self._logged_username = username
135 self._logged_username = username
136 self._session = login_user_session(self.app, username, password)
136 self._session = login_user_session(self.app, username, password)
137 self.csrf_token = auth.get_csrf_token(self._session)
137 self.csrf_token = auth.get_csrf_token(self._session)
138
138
139 return self._session['rhodecode_user']
139 return self._session['rhodecode_user']
140
140
141 def logout_user(self):
141 def logout_user(self):
142 logout_user_session(self.app, auth.get_csrf_token(self._session))
142 logout_user_session(self.app, auth.get_csrf_token(self._session))
143 self.csrf_token = None
143 self.csrf_token = None
144 self._logged_username = None
144 self._logged_username = None
145 self._session = None
145 self._session = None
146
146
147 def _get_logged_user(self):
147 def _get_logged_user(self):
148 return User.get_by_username(self._logged_username)
148 return User.get_by_username(self._logged_username)
149
149
150
150
151 def login_user_session(
151 def login_user_session(
152 app, username=TEST_USER_ADMIN_LOGIN, password=TEST_USER_ADMIN_PASS):
152 app, username=TEST_USER_ADMIN_LOGIN, password=TEST_USER_ADMIN_PASS):
153
153
154 response = app.post(
154 response = app.post(
155 h.route_path('login'),
155 h.route_path('login'),
156 {'username': username, 'password': password})
156 {'username': username, 'password': password})
157 if 'invalid user name' in response.body:
157 if 'invalid user name' in response.body:
158 pytest.fail('could not login using %s %s' % (username, password))
158 pytest.fail('could not login using %s %s' % (username, password))
159
159
160 assert response.status == '302 Found'
160 assert response.status == '302 Found'
161 response = response.follow()
161 response = response.follow()
162 assert response.status == '200 OK'
162 assert response.status == '200 OK'
163
163
164 session = response.get_session_from_response()
164 session = response.get_session_from_response()
165 assert 'rhodecode_user' in session
165 assert 'rhodecode_user' in session
166 rc_user = session['rhodecode_user']
166 rc_user = session['rhodecode_user']
167 assert rc_user.get('username') == username
167 assert rc_user.get('username') == username
168 assert rc_user.get('is_authenticated')
168 assert rc_user.get('is_authenticated')
169
169
170 return session
170 return session
171
171
172
172
173 def logout_user_session(app, csrf_token):
173 def logout_user_session(app, csrf_token):
174 app.post(h.route_path('logout'), {'csrf_token': csrf_token}, status=302)
174 app.post(h.route_path('logout'), {'csrf_token': csrf_token}, status=302)
175
175
176
176
177 def login_user(app, username=TEST_USER_ADMIN_LOGIN,
177 def login_user(app, username=TEST_USER_ADMIN_LOGIN,
178 password=TEST_USER_ADMIN_PASS):
178 password=TEST_USER_ADMIN_PASS):
179 return login_user_session(app, username, password)['rhodecode_user']
179 return login_user_session(app, username, password)['rhodecode_user']
180
180
181
181
182 def assert_session_flash(response, msg=None, category=None, no_=None):
182 def assert_session_flash(response, msg=None, category=None, no_=None):
183 """
183 """
184 Assert on a flash message in the current session.
184 Assert on a flash message in the current session.
185
185
186 :param response: Response from give calll, it will contain flash
186 :param response: Response from give calll, it will contain flash
187 messages or bound session with them.
187 messages or bound session with them.
188 :param msg: The expected message. Will be evaluated if a
188 :param msg: The expected message. Will be evaluated if a
189 :class:`LazyString` is passed in.
189 :class:`LazyString` is passed in.
190 :param category: Optional. If passed, the message category will be
190 :param category: Optional. If passed, the message category will be
191 checked as well.
191 checked as well.
192 :param no_: Optional. If passed, the message will be checked to NOT
192 :param no_: Optional. If passed, the message will be checked to NOT
193 be in the flash session
193 be in the flash session
194 """
194 """
195 if msg is None and no_ is None:
195 if msg is None and no_ is None:
196 raise ValueError("Parameter msg or no_ is required.")
196 raise ValueError("Parameter msg or no_ is required.")
197
197
198 if msg and no_:
198 if msg and no_:
199 raise ValueError("Please specify either msg or no_, but not both")
199 raise ValueError("Please specify either msg or no_, but not both")
200
200
201 session = response.get_session_from_response()
201 session = response.get_session_from_response()
202 messages = flash.pop_messages(session=session)
202 messages = flash.pop_messages(session=session)
203 msg = _eval_if_lazy(msg)
203 msg = _eval_if_lazy(msg)
204
204
205 if no_:
205 if no_:
206 error_msg = 'unable to detect no_ message `%s` in empty flash list' % no_
206 error_msg = 'unable to detect no_ message `%s` in empty flash list' % no_
207 else:
207 else:
208 error_msg = 'unable to find message `%s` in empty flash list' % msg
208 error_msg = 'unable to find message `%s` in empty flash list' % msg
209 assert messages, error_msg
209 assert messages, error_msg
210 message = messages[0]
210 message = messages[0]
211
211
212 message_text = _eval_if_lazy(message.message) or ''
212 message_text = _eval_if_lazy(message.message) or ''
213
213
214 if no_:
214 if no_:
215 if no_ in message_text:
215 if no_ in message_text:
216 msg = u'msg `%s` found in session flash.' % (no_,)
216 msg = u'msg `%s` found in session flash.' % (no_,)
217 pytest.fail(safe_str(msg))
217 pytest.fail(safe_str(msg))
218 else:
218 else:
219 if msg not in message_text:
219 if msg not in message_text:
220 fail_msg = u'msg `%s` not found in session ' \
220 fail_msg = u'msg `%s` not found in session ' \
221 u'flash: got `%s` (type:%s) instead' % (
221 u'flash: got `%s` (type:%s) instead' % (
222 msg, message_text, type(message_text))
222 msg, message_text, type(message_text))
223
223
224 pytest.fail(safe_str(fail_msg))
224 pytest.fail(safe_str(fail_msg))
225 if category:
225 if category:
226 assert category == message.category
226 assert category == message.category
227
227
228
228
229 def _eval_if_lazy(value):
229 def _eval_if_lazy(value):
230 return value.eval() if hasattr(value, 'eval') else value
230 return value.eval() if hasattr(value, 'eval') else value
231
231
232
232
233 def no_newline_id_generator(test_name):
233 def no_newline_id_generator(test_name):
234 """
234 """
235 Generates a test name without spaces or newlines characters. Used for
235 Generates a test name without spaces or newlines characters. Used for
236 nicer output of progress of test
236 nicer output of progress of test
237 """
237 """
238 org_name = test_name
238 org_name = test_name
239 test_name = str(test_name)\
239 test_name = safe_str(test_name)\
240 .replace('\n', '_N') \
240 .replace('\n', '_N') \
241 .replace('\r', '_N') \
241 .replace('\r', '_N') \
242 .replace('\t', '_T') \
242 .replace('\t', '_T') \
243 .replace(' ', '_S')
243 .replace(' ', '_S')
244
244
245 return test_name or 'test-with-empty-name'
245 return test_name or 'test-with-empty-name'
@@ -1,292 +1,292 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 from subprocess32 import Popen, PIPE
21 from subprocess32 import Popen, PIPE
22 import os
22 import os
23 import shutil
23 import shutil
24 import sys
24 import sys
25 import tempfile
25 import tempfile
26
26
27 import pytest
27 import pytest
28 from sqlalchemy.engine import url
28 from sqlalchemy.engine import url
29
29
30 from rhodecode.tests.fixture import TestINI
30 from rhodecode.tests.fixture import TestINI
31
31
32
32
33 def _get_dbs_from_metafunc(metafunc):
33 def _get_dbs_from_metafunc(metafunc):
34 if hasattr(metafunc.function, 'dbs'):
34 if hasattr(metafunc.function, 'dbs'):
35 # Supported backends by this test function, created from
35 # Supported backends by this test function, created from
36 # pytest.mark.dbs
36 # pytest.mark.dbs
37 backends = metafunc.function.dbs.args
37 backends = metafunc.definition.get_closest_marker('dbs').args
38 else:
38 else:
39 backends = metafunc.config.getoption('--dbs')
39 backends = metafunc.config.getoption('--dbs')
40 return backends
40 return backends
41
41
42
42
43 def pytest_generate_tests(metafunc):
43 def pytest_generate_tests(metafunc):
44 # Support test generation based on --dbs parameter
44 # Support test generation based on --dbs parameter
45 if 'db_backend' in metafunc.fixturenames:
45 if 'db_backend' in metafunc.fixturenames:
46 requested_backends = set(metafunc.config.getoption('--dbs'))
46 requested_backends = set(metafunc.config.getoption('--dbs'))
47 backends = _get_dbs_from_metafunc(metafunc)
47 backends = _get_dbs_from_metafunc(metafunc)
48 backends = requested_backends.intersection(backends)
48 backends = requested_backends.intersection(backends)
49 # TODO: johbo: Disabling a backend did not work out with
49 # TODO: johbo: Disabling a backend did not work out with
50 # parametrization, find better way to achieve this.
50 # parametrization, find better way to achieve this.
51 if not backends:
51 if not backends:
52 metafunc.function._skip = True
52 metafunc.function._skip = True
53 metafunc.parametrize('db_backend_name', backends)
53 metafunc.parametrize('db_backend_name', backends)
54
54
55
55
56 def pytest_collection_modifyitems(session, config, items):
56 def pytest_collection_modifyitems(session, config, items):
57 remaining = [
57 remaining = [
58 i for i in items if not getattr(i.obj, '_skip', False)]
58 i for i in items if not getattr(i.obj, '_skip', False)]
59 items[:] = remaining
59 items[:] = remaining
60
60
61
61
62 @pytest.fixture
62 @pytest.fixture
63 def db_backend(
63 def db_backend(
64 request, db_backend_name, ini_config, tmpdir_factory):
64 request, db_backend_name, ini_config, tmpdir_factory):
65 basetemp = tmpdir_factory.getbasetemp().strpath
65 basetemp = tmpdir_factory.getbasetemp().strpath
66 klass = _get_backend(db_backend_name)
66 klass = _get_backend(db_backend_name)
67
67
68 option_name = '--{}-connection-string'.format(db_backend_name)
68 option_name = '--{}-connection-string'.format(db_backend_name)
69 connection_string = request.config.getoption(option_name) or None
69 connection_string = request.config.getoption(option_name) or None
70
70
71 return klass(
71 return klass(
72 config_file=ini_config, basetemp=basetemp,
72 config_file=ini_config, basetemp=basetemp,
73 connection_string=connection_string)
73 connection_string=connection_string)
74
74
75
75
76 def _get_backend(backend_type):
76 def _get_backend(backend_type):
77 return {
77 return {
78 'sqlite': SQLiteDBBackend,
78 'sqlite': SQLiteDBBackend,
79 'postgres': PostgresDBBackend,
79 'postgres': PostgresDBBackend,
80 'mysql': MySQLDBBackend,
80 'mysql': MySQLDBBackend,
81 '': EmptyDBBackend
81 '': EmptyDBBackend
82 }[backend_type]
82 }[backend_type]
83
83
84
84
85 class DBBackend(object):
85 class DBBackend(object):
86 _store = os.path.dirname(os.path.abspath(__file__))
86 _store = os.path.dirname(os.path.abspath(__file__))
87 _type = None
87 _type = None
88 _base_ini_config = [{'app:main': {'vcs.start_server': 'false',
88 _base_ini_config = [{'app:main': {'vcs.start_server': 'false',
89 'startup.import_repos': 'false',
89 'startup.import_repos': 'false',
90 'is_test': 'False'}}]
90 'is_test': 'False'}}]
91 _db_url = [{'app:main': {'sqlalchemy.db1.url': ''}}]
91 _db_url = [{'app:main': {'sqlalchemy.db1.url': ''}}]
92 _base_db_name = 'rhodecode_test_db_backend'
92 _base_db_name = 'rhodecode_test_db_backend'
93
93
94 def __init__(
94 def __init__(
95 self, config_file, db_name=None, basetemp=None,
95 self, config_file, db_name=None, basetemp=None,
96 connection_string=None):
96 connection_string=None):
97
97
98 from rhodecode.lib.vcs.backends.hg import largefiles_store
98 from rhodecode.lib.vcs.backends.hg import largefiles_store
99 from rhodecode.lib.vcs.backends.git import lfs_store
99 from rhodecode.lib.vcs.backends.git import lfs_store
100
100
101 self.fixture_store = os.path.join(self._store, self._type)
101 self.fixture_store = os.path.join(self._store, self._type)
102 self.db_name = db_name or self._base_db_name
102 self.db_name = db_name or self._base_db_name
103 self._base_ini_file = config_file
103 self._base_ini_file = config_file
104 self.stderr = ''
104 self.stderr = ''
105 self.stdout = ''
105 self.stdout = ''
106 self._basetemp = basetemp or tempfile.gettempdir()
106 self._basetemp = basetemp or tempfile.gettempdir()
107 self._repos_location = os.path.join(self._basetemp, 'rc_test_repos')
107 self._repos_location = os.path.join(self._basetemp, 'rc_test_repos')
108 self._repos_hg_largefiles_store = largefiles_store(self._basetemp)
108 self._repos_hg_largefiles_store = largefiles_store(self._basetemp)
109 self._repos_git_lfs_store = lfs_store(self._basetemp)
109 self._repos_git_lfs_store = lfs_store(self._basetemp)
110 self.connection_string = connection_string
110 self.connection_string = connection_string
111
111
112 @property
112 @property
113 def connection_string(self):
113 def connection_string(self):
114 return self._connection_string
114 return self._connection_string
115
115
116 @connection_string.setter
116 @connection_string.setter
117 def connection_string(self, new_connection_string):
117 def connection_string(self, new_connection_string):
118 if not new_connection_string:
118 if not new_connection_string:
119 new_connection_string = self.get_default_connection_string()
119 new_connection_string = self.get_default_connection_string()
120 else:
120 else:
121 new_connection_string = new_connection_string.format(
121 new_connection_string = new_connection_string.format(
122 db_name=self.db_name)
122 db_name=self.db_name)
123 url_parts = url.make_url(new_connection_string)
123 url_parts = url.make_url(new_connection_string)
124 self._connection_string = new_connection_string
124 self._connection_string = new_connection_string
125 self.user = url_parts.username
125 self.user = url_parts.username
126 self.password = url_parts.password
126 self.password = url_parts.password
127 self.host = url_parts.host
127 self.host = url_parts.host
128
128
129 def get_default_connection_string(self):
129 def get_default_connection_string(self):
130 raise NotImplementedError('default connection_string is required.')
130 raise NotImplementedError('default connection_string is required.')
131
131
132 def execute(self, cmd, env=None, *args):
132 def execute(self, cmd, env=None, *args):
133 """
133 """
134 Runs command on the system with given ``args``.
134 Runs command on the system with given ``args``.
135 """
135 """
136
136
137 command = cmd + ' ' + ' '.join(args)
137 command = cmd + ' ' + ' '.join(args)
138 sys.stdout.write(command)
138 sys.stdout.write(command)
139
139
140 # Tell Python to use UTF-8 encoding out stdout
140 # Tell Python to use UTF-8 encoding out stdout
141 _env = os.environ.copy()
141 _env = os.environ.copy()
142 _env['PYTHONIOENCODING'] = 'UTF-8'
142 _env['PYTHONIOENCODING'] = 'UTF-8'
143 if env:
143 if env:
144 _env.update(env)
144 _env.update(env)
145 self.p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, env=_env)
145 self.p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, env=_env)
146 self.stdout, self.stderr = self.p.communicate()
146 self.stdout, self.stderr = self.p.communicate()
147 sys.stdout.write('COMMAND:'+command+'\n')
147 sys.stdout.write('COMMAND:'+command+'\n')
148 sys.stdout.write(self.stdout)
148 sys.stdout.write(self.stdout)
149 return self.stdout, self.stderr
149 return self.stdout, self.stderr
150
150
151 def assert_returncode_success(self):
151 def assert_returncode_success(self):
152 if not self.p.returncode == 0:
152 if not self.p.returncode == 0:
153 print(self.stderr)
153 print(self.stderr)
154 raise AssertionError('non 0 retcode:{}'.format(self.p.returncode))
154 raise AssertionError('non 0 retcode:{}'.format(self.p.returncode))
155
155
156 def assert_correct_output(self, stdout, version):
156 def assert_correct_output(self, stdout, version):
157 assert 'UPGRADE FOR STEP {} COMPLETED'.format(version) in stdout
157 assert 'UPGRADE FOR STEP {} COMPLETED'.format(version) in stdout
158
158
159 def setup_rhodecode_db(self, ini_params=None, env=None):
159 def setup_rhodecode_db(self, ini_params=None, env=None):
160 if not ini_params:
160 if not ini_params:
161 ini_params = self._base_ini_config
161 ini_params = self._base_ini_config
162
162
163 ini_params.extend(self._db_url)
163 ini_params.extend(self._db_url)
164 with TestINI(self._base_ini_file, ini_params,
164 with TestINI(self._base_ini_file, ini_params,
165 self._type, destroy=True) as _ini_file:
165 self._type, destroy=True) as _ini_file:
166
166
167 if not os.path.isdir(self._repos_location):
167 if not os.path.isdir(self._repos_location):
168 os.makedirs(self._repos_location)
168 os.makedirs(self._repos_location)
169 if not os.path.isdir(self._repos_hg_largefiles_store):
169 if not os.path.isdir(self._repos_hg_largefiles_store):
170 os.makedirs(self._repos_hg_largefiles_store)
170 os.makedirs(self._repos_hg_largefiles_store)
171 if not os.path.isdir(self._repos_git_lfs_store):
171 if not os.path.isdir(self._repos_git_lfs_store):
172 os.makedirs(self._repos_git_lfs_store)
172 os.makedirs(self._repos_git_lfs_store)
173
173
174 return self.execute(
174 return self.execute(
175 "rc-setup-app {0} --user=marcink "
175 "rc-setup-app {0} --user=marcink "
176 "--email=marcin@rhodeocode.com --password={1} "
176 "--email=marcin@rhodeocode.com --password={1} "
177 "--repos={2} --force-yes".format(
177 "--repos={2} --force-yes".format(
178 _ini_file, 'qweqwe', self._repos_location), env=env)
178 _ini_file, 'qweqwe', self._repos_location), env=env)
179
179
180 def upgrade_database(self, ini_params=None):
180 def upgrade_database(self, ini_params=None):
181 if not ini_params:
181 if not ini_params:
182 ini_params = self._base_ini_config
182 ini_params = self._base_ini_config
183 ini_params.extend(self._db_url)
183 ini_params.extend(self._db_url)
184
184
185 test_ini = TestINI(
185 test_ini = TestINI(
186 self._base_ini_file, ini_params, self._type, destroy=True)
186 self._base_ini_file, ini_params, self._type, destroy=True)
187 with test_ini as ini_file:
187 with test_ini as ini_file:
188 if not os.path.isdir(self._repos_location):
188 if not os.path.isdir(self._repos_location):
189 os.makedirs(self._repos_location)
189 os.makedirs(self._repos_location)
190
190
191 return self.execute(
191 return self.execute(
192 "rc-upgrade-db {0} --force-yes".format(ini_file))
192 "rc-upgrade-db {0} --force-yes".format(ini_file))
193
193
194 def setup_db(self):
194 def setup_db(self):
195 raise NotImplementedError
195 raise NotImplementedError
196
196
197 def teardown_db(self):
197 def teardown_db(self):
198 raise NotImplementedError
198 raise NotImplementedError
199
199
200 def import_dump(self, dumpname):
200 def import_dump(self, dumpname):
201 raise NotImplementedError
201 raise NotImplementedError
202
202
203
203
204 class EmptyDBBackend(DBBackend):
204 class EmptyDBBackend(DBBackend):
205 _type = ''
205 _type = ''
206
206
207 def setup_db(self):
207 def setup_db(self):
208 pass
208 pass
209
209
210 def teardown_db(self):
210 def teardown_db(self):
211 pass
211 pass
212
212
213 def import_dump(self, dumpname):
213 def import_dump(self, dumpname):
214 pass
214 pass
215
215
216 def assert_returncode_success(self):
216 def assert_returncode_success(self):
217 assert True
217 assert True
218
218
219
219
220 class SQLiteDBBackend(DBBackend):
220 class SQLiteDBBackend(DBBackend):
221 _type = 'sqlite'
221 _type = 'sqlite'
222
222
223 def get_default_connection_string(self):
223 def get_default_connection_string(self):
224 return 'sqlite:///{}/{}.sqlite'.format(self._basetemp, self.db_name)
224 return 'sqlite:///{}/{}.sqlite'.format(self._basetemp, self.db_name)
225
225
226 def setup_db(self):
226 def setup_db(self):
227 # dump schema for tests
227 # dump schema for tests
228 # cp -v $TEST_DB_NAME
228 # cp -v $TEST_DB_NAME
229 self._db_url = [{'app:main': {
229 self._db_url = [{'app:main': {
230 'sqlalchemy.db1.url': self.connection_string}}]
230 'sqlalchemy.db1.url': self.connection_string}}]
231
231
232 def import_dump(self, dumpname):
232 def import_dump(self, dumpname):
233 dump = os.path.join(self.fixture_store, dumpname)
233 dump = os.path.join(self.fixture_store, dumpname)
234 target = os.path.join(self._basetemp, '{0.db_name}.sqlite'.format(self))
234 target = os.path.join(self._basetemp, '{0.db_name}.sqlite'.format(self))
235 return self.execute('cp -v {} {}'.format(dump, target))
235 return self.execute('cp -v {} {}'.format(dump, target))
236
236
237 def teardown_db(self):
237 def teardown_db(self):
238 return self.execute("rm -rf {}.sqlite".format(
238 return self.execute("rm -rf {}.sqlite".format(
239 os.path.join(self._basetemp, self.db_name)))
239 os.path.join(self._basetemp, self.db_name)))
240
240
241
241
242 class MySQLDBBackend(DBBackend):
242 class MySQLDBBackend(DBBackend):
243 _type = 'mysql'
243 _type = 'mysql'
244
244
245 def get_default_connection_string(self):
245 def get_default_connection_string(self):
246 return 'mysql://root:qweqwe@127.0.0.1/{}'.format(self.db_name)
246 return 'mysql://root:qweqwe@127.0.0.1/{}'.format(self.db_name)
247
247
248 def setup_db(self):
248 def setup_db(self):
249 # dump schema for tests
249 # dump schema for tests
250 # mysqldump -uroot -pqweqwe $TEST_DB_NAME
250 # mysqldump -uroot -pqweqwe $TEST_DB_NAME
251 self._db_url = [{'app:main': {
251 self._db_url = [{'app:main': {
252 'sqlalchemy.db1.url': self.connection_string}}]
252 'sqlalchemy.db1.url': self.connection_string}}]
253 return self.execute("mysql -v -u{} -p{} -e 'create database '{}';'".format(
253 return self.execute("mysql -v -u{} -p{} -e 'create database '{}';'".format(
254 self.user, self.password, self.db_name))
254 self.user, self.password, self.db_name))
255
255
256 def import_dump(self, dumpname):
256 def import_dump(self, dumpname):
257 dump = os.path.join(self.fixture_store, dumpname)
257 dump = os.path.join(self.fixture_store, dumpname)
258 return self.execute("mysql -u{} -p{} {} < {}".format(
258 return self.execute("mysql -u{} -p{} {} < {}".format(
259 self.user, self.password, self.db_name, dump))
259 self.user, self.password, self.db_name, dump))
260
260
261 def teardown_db(self):
261 def teardown_db(self):
262 return self.execute("mysql -v -u{} -p{} -e 'drop database '{}';'".format(
262 return self.execute("mysql -v -u{} -p{} -e 'drop database '{}';'".format(
263 self.user, self.password, self.db_name))
263 self.user, self.password, self.db_name))
264
264
265
265
266 class PostgresDBBackend(DBBackend):
266 class PostgresDBBackend(DBBackend):
267 _type = 'postgres'
267 _type = 'postgres'
268
268
269 def get_default_connection_string(self):
269 def get_default_connection_string(self):
270 return 'postgresql://postgres:qweqwe@localhost/{}'.format(self.db_name)
270 return 'postgresql://postgres:qweqwe@localhost/{}'.format(self.db_name)
271
271
272 def setup_db(self):
272 def setup_db(self):
273 # dump schema for tests
273 # dump schema for tests
274 # pg_dump -U postgres -h localhost $TEST_DB_NAME
274 # pg_dump -U postgres -h localhost $TEST_DB_NAME
275 self._db_url = [{'app:main': {
275 self._db_url = [{'app:main': {
276 'sqlalchemy.db1.url':
276 'sqlalchemy.db1.url':
277 self.connection_string}}]
277 self.connection_string}}]
278 return self.execute("PGPASSWORD={} psql -U {} -h localhost "
278 return self.execute("PGPASSWORD={} psql -U {} -h localhost "
279 "-c 'create database '{}';'".format(
279 "-c 'create database '{}';'".format(
280 self.password, self.user, self.db_name))
280 self.password, self.user, self.db_name))
281
281
282 def teardown_db(self):
282 def teardown_db(self):
283 return self.execute("PGPASSWORD={} psql -U {} -h localhost "
283 return self.execute("PGPASSWORD={} psql -U {} -h localhost "
284 "-c 'drop database if exists '{}';'".format(
284 "-c 'drop database if exists '{}';'".format(
285 self.password, self.user, self.db_name))
285 self.password, self.user, self.db_name))
286
286
287 def import_dump(self, dumpname):
287 def import_dump(self, dumpname):
288 dump = os.path.join(self.fixture_store, dumpname)
288 dump = os.path.join(self.fixture_store, dumpname)
289 return self.execute(
289 return self.execute(
290 "PGPASSWORD={} psql -U {} -h localhost -d {} -1 "
290 "PGPASSWORD={} psql -U {} -h localhost -d {} -1 "
291 "-f {}".format(
291 "-f {}".format(
292 self.password, self.user, self.db_name, dump))
292 self.password, self.user, self.db_name, dump))
@@ -1,1867 +1,1886 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import collections
21 import collections
22 import datetime
22 import datetime
23 import hashlib
23 import hashlib
24 import os
24 import os
25 import re
25 import re
26 import pprint
26 import pprint
27 import shutil
27 import shutil
28 import socket
28 import socket
29 import subprocess32
29 import subprocess32
30 import time
30 import time
31 import uuid
31 import uuid
32 import dateutil.tz
32 import dateutil.tz
33 import functools
33 import functools
34
34
35 import mock
35 import mock
36 import pyramid.testing
36 import pyramid.testing
37 import pytest
37 import pytest
38 import colander
38 import colander
39 import requests
39 import requests
40 import pyramid.paster
40 import pyramid.paster
41
41
42 import rhodecode
42 import rhodecode
43 from rhodecode.lib.utils2 import AttributeDict
43 from rhodecode.lib.utils2 import AttributeDict
44 from rhodecode.model.changeset_status import ChangesetStatusModel
44 from rhodecode.model.changeset_status import ChangesetStatusModel
45 from rhodecode.model.comment import CommentsModel
45 from rhodecode.model.comment import CommentsModel
46 from rhodecode.model.db import (
46 from rhodecode.model.db import (
47 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
47 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
48 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
48 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
49 from rhodecode.model.meta import Session
49 from rhodecode.model.meta import Session
50 from rhodecode.model.pull_request import PullRequestModel
50 from rhodecode.model.pull_request import PullRequestModel
51 from rhodecode.model.repo import RepoModel
51 from rhodecode.model.repo import RepoModel
52 from rhodecode.model.repo_group import RepoGroupModel
52 from rhodecode.model.repo_group import RepoGroupModel
53 from rhodecode.model.user import UserModel
53 from rhodecode.model.user import UserModel
54 from rhodecode.model.settings import VcsSettingsModel
54 from rhodecode.model.settings import VcsSettingsModel
55 from rhodecode.model.user_group import UserGroupModel
55 from rhodecode.model.user_group import UserGroupModel
56 from rhodecode.model.integration import IntegrationModel
56 from rhodecode.model.integration import IntegrationModel
57 from rhodecode.integrations import integration_type_registry
57 from rhodecode.integrations import integration_type_registry
58 from rhodecode.integrations.types.base import IntegrationTypeBase
58 from rhodecode.integrations.types.base import IntegrationTypeBase
59 from rhodecode.lib.utils import repo2db_mapper
59 from rhodecode.lib.utils import repo2db_mapper
60 from rhodecode.lib.vcs import create_vcsserver_proxy
60 from rhodecode.lib.vcs import create_vcsserver_proxy
61 from rhodecode.lib.vcs.backends import get_backend
61 from rhodecode.lib.vcs.backends import get_backend
62 from rhodecode.lib.vcs.nodes import FileNode
62 from rhodecode.lib.vcs.nodes import FileNode
63 from rhodecode.tests import (
63 from rhodecode.tests import (
64 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
64 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
65 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
65 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
66 TEST_USER_REGULAR_PASS)
66 TEST_USER_REGULAR_PASS)
67 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
67 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
68 from rhodecode.tests.fixture import Fixture
68 from rhodecode.tests.fixture import Fixture
69 from rhodecode.config import utils as config_utils
69 from rhodecode.config import utils as config_utils
70
70
71 def _split_comma(value):
71 def _split_comma(value):
72 return value.split(',')
72 return value.split(',')
73
73
74
74
75 def pytest_addoption(parser):
75 def pytest_addoption(parser):
76 parser.addoption(
76 parser.addoption(
77 '--keep-tmp-path', action='store_true',
77 '--keep-tmp-path', action='store_true',
78 help="Keep the test temporary directories")
78 help="Keep the test temporary directories")
79 parser.addoption(
79 parser.addoption(
80 '--backends', action='store', type=_split_comma,
80 '--backends', action='store', type=_split_comma,
81 default=['git', 'hg', 'svn'],
81 default=['git', 'hg', 'svn'],
82 help="Select which backends to test for backend specific tests.")
82 help="Select which backends to test for backend specific tests.")
83 parser.addoption(
83 parser.addoption(
84 '--dbs', action='store', type=_split_comma,
84 '--dbs', action='store', type=_split_comma,
85 default=['sqlite'],
85 default=['sqlite'],
86 help="Select which database to test for database specific tests. "
86 help="Select which database to test for database specific tests. "
87 "Possible options are sqlite,postgres,mysql")
87 "Possible options are sqlite,postgres,mysql")
88 parser.addoption(
88 parser.addoption(
89 '--appenlight', '--ae', action='store_true',
89 '--appenlight', '--ae', action='store_true',
90 help="Track statistics in appenlight.")
90 help="Track statistics in appenlight.")
91 parser.addoption(
91 parser.addoption(
92 '--appenlight-api-key', '--ae-key',
92 '--appenlight-api-key', '--ae-key',
93 help="API key for Appenlight.")
93 help="API key for Appenlight.")
94 parser.addoption(
94 parser.addoption(
95 '--appenlight-url', '--ae-url',
95 '--appenlight-url', '--ae-url',
96 default="https://ae.rhodecode.com",
96 default="https://ae.rhodecode.com",
97 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
97 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
98 parser.addoption(
98 parser.addoption(
99 '--sqlite-connection-string', action='store',
99 '--sqlite-connection-string', action='store',
100 default='', help="Connection string for the dbs tests with SQLite")
100 default='', help="Connection string for the dbs tests with SQLite")
101 parser.addoption(
101 parser.addoption(
102 '--postgres-connection-string', action='store',
102 '--postgres-connection-string', action='store',
103 default='', help="Connection string for the dbs tests with Postgres")
103 default='', help="Connection string for the dbs tests with Postgres")
104 parser.addoption(
104 parser.addoption(
105 '--mysql-connection-string', action='store',
105 '--mysql-connection-string', action='store',
106 default='', help="Connection string for the dbs tests with MySQL")
106 default='', help="Connection string for the dbs tests with MySQL")
107 parser.addoption(
107 parser.addoption(
108 '--repeat', type=int, default=100,
108 '--repeat', type=int, default=100,
109 help="Number of repetitions in performance tests.")
109 help="Number of repetitions in performance tests.")
110
110
111
111
112 def pytest_configure(config):
112 def pytest_configure(config):
113 from rhodecode.config import patches
113 from rhodecode.config import patches
114
114
115
115
116 def pytest_collection_modifyitems(session, config, items):
116 def pytest_collection_modifyitems(session, config, items):
117 # nottest marked, compare nose, used for transition from nose to pytest
117 # nottest marked, compare nose, used for transition from nose to pytest
118 remaining = [
118 remaining = [
119 i for i in items if getattr(i.obj, '__test__', True)]
119 i for i in items if getattr(i.obj, '__test__', True)]
120 items[:] = remaining
120 items[:] = remaining
121
121
122
122
123 def pytest_generate_tests(metafunc):
123 def pytest_generate_tests(metafunc):
124 # Support test generation based on --backend parameter
124 # Support test generation based on --backend parameter
125 if 'backend_alias' in metafunc.fixturenames:
125 if 'backend_alias' in metafunc.fixturenames:
126 backends = get_backends_from_metafunc(metafunc)
126 backends = get_backends_from_metafunc(metafunc)
127 scope = None
127 scope = None
128 if not backends:
128 if not backends:
129 pytest.skip("Not enabled for any of selected backends")
129 pytest.skip("Not enabled for any of selected backends")
130 metafunc.parametrize('backend_alias', backends, scope=scope)
130 metafunc.parametrize('backend_alias', backends, scope=scope)
131 elif hasattr(metafunc.function, 'backends'):
131 elif hasattr(metafunc.function, 'backends'):
132 backends = get_backends_from_metafunc(metafunc)
132 backends = get_backends_from_metafunc(metafunc)
133 if not backends:
133 if not backends:
134 pytest.skip("Not enabled for any of selected backends")
134 pytest.skip("Not enabled for any of selected backends")
135
135
136
136
137 def get_backends_from_metafunc(metafunc):
137 def get_backends_from_metafunc(metafunc):
138 requested_backends = set(metafunc.config.getoption('--backends'))
138 requested_backends = set(metafunc.config.getoption('--backends'))
139 if hasattr(metafunc.function, 'backends'):
139 if hasattr(metafunc.function, 'backends'):
140 # Supported backends by this test function, created from
140 # Supported backends by this test function, created from
141 # pytest.mark.backends
141 # pytest.mark.backends
142 backends = metafunc.function.backends.args
142 backends = metafunc.definition.get_closest_marker('backends').args
143 elif hasattr(metafunc.cls, 'backend_alias'):
143 elif hasattr(metafunc.cls, 'backend_alias'):
144 # Support class attribute "backend_alias", this is mainly
144 # Support class attribute "backend_alias", this is mainly
145 # for legacy reasons for tests not yet using pytest.mark.backends
145 # for legacy reasons for tests not yet using pytest.mark.backends
146 backends = [metafunc.cls.backend_alias]
146 backends = [metafunc.cls.backend_alias]
147 else:
147 else:
148 backends = metafunc.config.getoption('--backends')
148 backends = metafunc.config.getoption('--backends')
149 return requested_backends.intersection(backends)
149 return requested_backends.intersection(backends)
150
150
151
151
152 @pytest.fixture(scope='session', autouse=True)
152 @pytest.fixture(scope='session', autouse=True)
153 def activate_example_rcextensions(request):
153 def activate_example_rcextensions(request):
154 """
154 """
155 Patch in an example rcextensions module which verifies passed in kwargs.
155 Patch in an example rcextensions module which verifies passed in kwargs.
156 """
156 """
157 from rhodecode.tests.other import example_rcextensions
157 from rhodecode.tests.other import example_rcextensions
158
158
159 old_extensions = rhodecode.EXTENSIONS
159 old_extensions = rhodecode.EXTENSIONS
160 rhodecode.EXTENSIONS = example_rcextensions
160 rhodecode.EXTENSIONS = example_rcextensions
161
161
162 @request.addfinalizer
162 @request.addfinalizer
163 def cleanup():
163 def cleanup():
164 rhodecode.EXTENSIONS = old_extensions
164 rhodecode.EXTENSIONS = old_extensions
165
165
166
166
167 @pytest.fixture
167 @pytest.fixture
168 def capture_rcextensions():
168 def capture_rcextensions():
169 """
169 """
170 Returns the recorded calls to entry points in rcextensions.
170 Returns the recorded calls to entry points in rcextensions.
171 """
171 """
172 calls = rhodecode.EXTENSIONS.calls
172 calls = rhodecode.EXTENSIONS.calls
173 calls.clear()
173 calls.clear()
174 # Note: At this moment, it is still the empty dict, but that will
174 # Note: At this moment, it is still the empty dict, but that will
175 # be filled during the test run and since it is a reference this
175 # be filled during the test run and since it is a reference this
176 # is enough to make it work.
176 # is enough to make it work.
177 return calls
177 return calls
178
178
179
179
180 @pytest.fixture(scope='session')
180 @pytest.fixture(scope='session')
181 def http_environ_session():
181 def http_environ_session():
182 """
182 """
183 Allow to use "http_environ" in session scope.
183 Allow to use "http_environ" in session scope.
184 """
184 """
185 return http_environ(
185 return plain_http_environ()
186 http_host_stub=http_host_stub())
186
187
188 def plain_http_host_stub():
189 """
190 Value of HTTP_HOST in the test run.
191 """
192 return 'example.com:80'
187
193
188
194
189 @pytest.fixture
195 @pytest.fixture
190 def http_host_stub():
196 def http_host_stub():
191 """
197 """
192 Value of HTTP_HOST in the test run.
198 Value of HTTP_HOST in the test run.
193 """
199 """
194 return 'example.com:80'
200 return plain_http_host_stub()
201
202
203 def plain_http_host_only_stub():
204 """
205 Value of HTTP_HOST in the test run.
206 """
207 return plain_http_host_stub().split(':')[0]
195
208
196
209
197 @pytest.fixture
210 @pytest.fixture
198 def http_host_only_stub():
211 def http_host_only_stub():
199 """
212 """
200 Value of HTTP_HOST in the test run.
213 Value of HTTP_HOST in the test run.
201 """
214 """
202 return http_host_stub().split(':')[0]
215 return plain_http_host_only_stub()
203
216
204
217
205 @pytest.fixture
218 def plain_http_environ():
206 def http_environ(http_host_stub):
207 """
219 """
208 HTTP extra environ keys.
220 HTTP extra environ keys.
209
221
210 User by the test application and as well for setting up the pylons
222 User by the test application and as well for setting up the pylons
211 environment. In the case of the fixture "app" it should be possible
223 environment. In the case of the fixture "app" it should be possible
212 to override this for a specific test case.
224 to override this for a specific test case.
213 """
225 """
214 return {
226 return {
215 'SERVER_NAME': http_host_only_stub(),
227 'SERVER_NAME': plain_http_host_only_stub(),
216 'SERVER_PORT': http_host_stub.split(':')[1],
228 'SERVER_PORT': plain_http_host_stub().split(':')[1],
217 'HTTP_HOST': http_host_stub,
229 'HTTP_HOST': plain_http_host_stub(),
218 'HTTP_USER_AGENT': 'rc-test-agent',
230 'HTTP_USER_AGENT': 'rc-test-agent',
219 'REQUEST_METHOD': 'GET'
231 'REQUEST_METHOD': 'GET'
220 }
232 }
221
233
222
234
235 @pytest.fixture
236 def http_environ():
237 """
238 HTTP extra environ keys.
239
240 User by the test application and as well for setting up the pylons
241 environment. In the case of the fixture "app" it should be possible
242 to override this for a specific test case.
243 """
244 return plain_http_environ()
245
246
223 @pytest.fixture(scope='session')
247 @pytest.fixture(scope='session')
224 def baseapp(ini_config, vcsserver, http_environ_session):
248 def baseapp(ini_config, vcsserver, http_environ_session):
225 from rhodecode.lib.pyramid_utils import get_app_config
249 from rhodecode.lib.pyramid_utils import get_app_config
226 from rhodecode.config.middleware import make_pyramid_app
250 from rhodecode.config.middleware import make_pyramid_app
227
251
228 print("Using the RhodeCode configuration:{}".format(ini_config))
252 print("Using the RhodeCode configuration:{}".format(ini_config))
229 pyramid.paster.setup_logging(ini_config)
253 pyramid.paster.setup_logging(ini_config)
230
254
231 settings = get_app_config(ini_config)
255 settings = get_app_config(ini_config)
232 app = make_pyramid_app({'__file__': ini_config}, **settings)
256 app = make_pyramid_app({'__file__': ini_config}, **settings)
233
257
234 return app
258 return app
235
259
236
260
237 @pytest.fixture(scope='function')
261 @pytest.fixture(scope='function')
238 def app(request, config_stub, baseapp, http_environ):
262 def app(request, config_stub, baseapp, http_environ):
239 app = CustomTestApp(
263 app = CustomTestApp(
240 baseapp,
264 baseapp,
241 extra_environ=http_environ)
265 extra_environ=http_environ)
242 if request.cls:
266 if request.cls:
243 request.cls.app = app
267 request.cls.app = app
244 return app
268 return app
245
269
246
270
247 @pytest.fixture(scope='session')
271 @pytest.fixture(scope='session')
248 def app_settings(baseapp, ini_config):
272 def app_settings(baseapp, ini_config):
249 """
273 """
250 Settings dictionary used to create the app.
274 Settings dictionary used to create the app.
251
275
252 Parses the ini file and passes the result through the sanitize and apply
276 Parses the ini file and passes the result through the sanitize and apply
253 defaults mechanism in `rhodecode.config.middleware`.
277 defaults mechanism in `rhodecode.config.middleware`.
254 """
278 """
255 return baseapp.config.get_settings()
279 return baseapp.config.get_settings()
256
280
257
281
258 @pytest.fixture(scope='session')
282 @pytest.fixture(scope='session')
259 def db_connection(ini_settings):
283 def db_connection(ini_settings):
260 # Initialize the database connection.
284 # Initialize the database connection.
261 config_utils.initialize_database(ini_settings)
285 config_utils.initialize_database(ini_settings)
262
286
263
287
264 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
288 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
265
289
266
290
267 def _autologin_user(app, *args):
291 def _autologin_user(app, *args):
268 session = login_user_session(app, *args)
292 session = login_user_session(app, *args)
269 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
293 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
270 return LoginData(csrf_token, session['rhodecode_user'])
294 return LoginData(csrf_token, session['rhodecode_user'])
271
295
272
296
273 @pytest.fixture
297 @pytest.fixture
274 def autologin_user(app):
298 def autologin_user(app):
275 """
299 """
276 Utility fixture which makes sure that the admin user is logged in
300 Utility fixture which makes sure that the admin user is logged in
277 """
301 """
278 return _autologin_user(app)
302 return _autologin_user(app)
279
303
280
304
281 @pytest.fixture
305 @pytest.fixture
282 def autologin_regular_user(app):
306 def autologin_regular_user(app):
283 """
307 """
284 Utility fixture which makes sure that the regular user is logged in
308 Utility fixture which makes sure that the regular user is logged in
285 """
309 """
286 return _autologin_user(
310 return _autologin_user(
287 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
311 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
288
312
289
313
290 @pytest.fixture(scope='function')
314 @pytest.fixture(scope='function')
291 def csrf_token(request, autologin_user):
315 def csrf_token(request, autologin_user):
292 return autologin_user.csrf_token
316 return autologin_user.csrf_token
293
317
294
318
295 @pytest.fixture(scope='function')
319 @pytest.fixture(scope='function')
296 def xhr_header(request):
320 def xhr_header(request):
297 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
321 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
298
322
299
323
300 @pytest.fixture
324 @pytest.fixture
301 def real_crypto_backend(monkeypatch):
325 def real_crypto_backend(monkeypatch):
302 """
326 """
303 Switch the production crypto backend on for this test.
327 Switch the production crypto backend on for this test.
304
328
305 During the test run the crypto backend is replaced with a faster
329 During the test run the crypto backend is replaced with a faster
306 implementation based on the MD5 algorithm.
330 implementation based on the MD5 algorithm.
307 """
331 """
308 monkeypatch.setattr(rhodecode, 'is_test', False)
332 monkeypatch.setattr(rhodecode, 'is_test', False)
309
333
310
334
311 @pytest.fixture(scope='class')
335 @pytest.fixture(scope='class')
312 def index_location(request, baseapp):
336 def index_location(request, baseapp):
313 index_location = baseapp.config.get_settings()['search.location']
337 index_location = baseapp.config.get_settings()['search.location']
314 if request.cls:
338 if request.cls:
315 request.cls.index_location = index_location
339 request.cls.index_location = index_location
316 return index_location
340 return index_location
317
341
318
342
319 @pytest.fixture(scope='session', autouse=True)
343 @pytest.fixture(scope='session', autouse=True)
320 def tests_tmp_path(request):
344 def tests_tmp_path(request):
321 """
345 """
322 Create temporary directory to be used during the test session.
346 Create temporary directory to be used during the test session.
323 """
347 """
324 if not os.path.exists(TESTS_TMP_PATH):
348 if not os.path.exists(TESTS_TMP_PATH):
325 os.makedirs(TESTS_TMP_PATH)
349 os.makedirs(TESTS_TMP_PATH)
326
350
327 if not request.config.getoption('--keep-tmp-path'):
351 if not request.config.getoption('--keep-tmp-path'):
328 @request.addfinalizer
352 @request.addfinalizer
329 def remove_tmp_path():
353 def remove_tmp_path():
330 shutil.rmtree(TESTS_TMP_PATH)
354 shutil.rmtree(TESTS_TMP_PATH)
331
355
332 return TESTS_TMP_PATH
356 return TESTS_TMP_PATH
333
357
334
358
335 @pytest.fixture
359 @pytest.fixture
336 def test_repo_group(request):
360 def test_repo_group(request):
337 """
361 """
338 Create a temporary repository group, and destroy it after
362 Create a temporary repository group, and destroy it after
339 usage automatically
363 usage automatically
340 """
364 """
341 fixture = Fixture()
365 fixture = Fixture()
342 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
366 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
343 repo_group = fixture.create_repo_group(repogroupid)
367 repo_group = fixture.create_repo_group(repogroupid)
344
368
345 def _cleanup():
369 def _cleanup():
346 fixture.destroy_repo_group(repogroupid)
370 fixture.destroy_repo_group(repogroupid)
347
371
348 request.addfinalizer(_cleanup)
372 request.addfinalizer(_cleanup)
349 return repo_group
373 return repo_group
350
374
351
375
352 @pytest.fixture
376 @pytest.fixture
353 def test_user_group(request):
377 def test_user_group(request):
354 """
378 """
355 Create a temporary user group, and destroy it after
379 Create a temporary user group, and destroy it after
356 usage automatically
380 usage automatically
357 """
381 """
358 fixture = Fixture()
382 fixture = Fixture()
359 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
383 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
360 user_group = fixture.create_user_group(usergroupid)
384 user_group = fixture.create_user_group(usergroupid)
361
385
362 def _cleanup():
386 def _cleanup():
363 fixture.destroy_user_group(user_group)
387 fixture.destroy_user_group(user_group)
364
388
365 request.addfinalizer(_cleanup)
389 request.addfinalizer(_cleanup)
366 return user_group
390 return user_group
367
391
368
392
369 @pytest.fixture(scope='session')
393 @pytest.fixture(scope='session')
370 def test_repo(request):
394 def test_repo(request):
371 container = TestRepoContainer()
395 container = TestRepoContainer()
372 request.addfinalizer(container._cleanup)
396 request.addfinalizer(container._cleanup)
373 return container
397 return container
374
398
375
399
376 class TestRepoContainer(object):
400 class TestRepoContainer(object):
377 """
401 """
378 Container for test repositories which are used read only.
402 Container for test repositories which are used read only.
379
403
380 Repositories will be created on demand and re-used during the lifetime
404 Repositories will be created on demand and re-used during the lifetime
381 of this object.
405 of this object.
382
406
383 Usage to get the svn test repository "minimal"::
407 Usage to get the svn test repository "minimal"::
384
408
385 test_repo = TestContainer()
409 test_repo = TestContainer()
386 repo = test_repo('minimal', 'svn')
410 repo = test_repo('minimal', 'svn')
387
411
388 """
412 """
389
413
390 dump_extractors = {
414 dump_extractors = {
391 'git': utils.extract_git_repo_from_dump,
415 'git': utils.extract_git_repo_from_dump,
392 'hg': utils.extract_hg_repo_from_dump,
416 'hg': utils.extract_hg_repo_from_dump,
393 'svn': utils.extract_svn_repo_from_dump,
417 'svn': utils.extract_svn_repo_from_dump,
394 }
418 }
395
419
396 def __init__(self):
420 def __init__(self):
397 self._cleanup_repos = []
421 self._cleanup_repos = []
398 self._fixture = Fixture()
422 self._fixture = Fixture()
399 self._repos = {}
423 self._repos = {}
400
424
401 def __call__(self, dump_name, backend_alias, config=None):
425 def __call__(self, dump_name, backend_alias, config=None):
402 key = (dump_name, backend_alias)
426 key = (dump_name, backend_alias)
403 if key not in self._repos:
427 if key not in self._repos:
404 repo = self._create_repo(dump_name, backend_alias, config)
428 repo = self._create_repo(dump_name, backend_alias, config)
405 self._repos[key] = repo.repo_id
429 self._repos[key] = repo.repo_id
406 return Repository.get(self._repos[key])
430 return Repository.get(self._repos[key])
407
431
408 def _create_repo(self, dump_name, backend_alias, config):
432 def _create_repo(self, dump_name, backend_alias, config):
409 repo_name = '%s-%s' % (backend_alias, dump_name)
433 repo_name = '%s-%s' % (backend_alias, dump_name)
410 backend_class = get_backend(backend_alias)
434 backend_class = get_backend(backend_alias)
411 dump_extractor = self.dump_extractors[backend_alias]
435 dump_extractor = self.dump_extractors[backend_alias]
412 repo_path = dump_extractor(dump_name, repo_name)
436 repo_path = dump_extractor(dump_name, repo_name)
413
437
414 vcs_repo = backend_class(repo_path, config=config)
438 vcs_repo = backend_class(repo_path, config=config)
415 repo2db_mapper({repo_name: vcs_repo})
439 repo2db_mapper({repo_name: vcs_repo})
416
440
417 repo = RepoModel().get_by_repo_name(repo_name)
441 repo = RepoModel().get_by_repo_name(repo_name)
418 self._cleanup_repos.append(repo_name)
442 self._cleanup_repos.append(repo_name)
419 return repo
443 return repo
420
444
421 def _cleanup(self):
445 def _cleanup(self):
422 for repo_name in reversed(self._cleanup_repos):
446 for repo_name in reversed(self._cleanup_repos):
423 self._fixture.destroy_repo(repo_name)
447 self._fixture.destroy_repo(repo_name)
424
448
425
449
426 @pytest.fixture
450 def backend_base(request, backend_alias, baseapp, test_repo):
427 def backend(request, backend_alias, baseapp, test_repo):
428 """
429 Parametrized fixture which represents a single backend implementation.
430
431 It respects the option `--backends` to focus the test run on specific
432 backend implementations.
433
434 It also supports `pytest.mark.xfail_backends` to mark tests as failing
435 for specific backends. This is intended as a utility for incremental
436 development of a new backend implementation.
437 """
438 if backend_alias not in request.config.getoption('--backends'):
451 if backend_alias not in request.config.getoption('--backends'):
439 pytest.skip("Backend %s not selected." % (backend_alias, ))
452 pytest.skip("Backend %s not selected." % (backend_alias, ))
440
453
441 utils.check_xfail_backends(request.node, backend_alias)
454 utils.check_xfail_backends(request.node, backend_alias)
442 utils.check_skip_backends(request.node, backend_alias)
455 utils.check_skip_backends(request.node, backend_alias)
443
456
444 repo_name = 'vcs_test_%s' % (backend_alias, )
457 repo_name = 'vcs_test_%s' % (backend_alias, )
445 backend = Backend(
458 backend = Backend(
446 alias=backend_alias,
459 alias=backend_alias,
447 repo_name=repo_name,
460 repo_name=repo_name,
448 test_name=request.node.name,
461 test_name=request.node.name,
449 test_repo_container=test_repo)
462 test_repo_container=test_repo)
450 request.addfinalizer(backend.cleanup)
463 request.addfinalizer(backend.cleanup)
451 return backend
464 return backend
452
465
453
466
454 @pytest.fixture
467 @pytest.fixture
468 def backend(request, backend_alias, baseapp, test_repo):
469 """
470 Parametrized fixture which represents a single backend implementation.
471
472 It respects the option `--backends` to focus the test run on specific
473 backend implementations.
474
475 It also supports `pytest.mark.xfail_backends` to mark tests as failing
476 for specific backends. This is intended as a utility for incremental
477 development of a new backend implementation.
478 """
479 return backend_base(request, backend_alias, baseapp, test_repo)
480
481
482 @pytest.fixture
455 def backend_git(request, baseapp, test_repo):
483 def backend_git(request, baseapp, test_repo):
456 return backend(request, 'git', baseapp, test_repo)
484 return backend_base(request, 'git', baseapp, test_repo)
457
485
458
486
459 @pytest.fixture
487 @pytest.fixture
460 def backend_hg(request, baseapp, test_repo):
488 def backend_hg(request, baseapp, test_repo):
461 return backend(request, 'hg', baseapp, test_repo)
489 return backend_base(request, 'hg', baseapp, test_repo)
462
490
463
491
464 @pytest.fixture
492 @pytest.fixture
465 def backend_svn(request, baseapp, test_repo):
493 def backend_svn(request, baseapp, test_repo):
466 return backend(request, 'svn', baseapp, test_repo)
494 return backend_base(request, 'svn', baseapp, test_repo)
467
495
468
496
469 @pytest.fixture
497 @pytest.fixture
470 def backend_random(backend_git):
498 def backend_random(backend_git):
471 """
499 """
472 Use this to express that your tests need "a backend.
500 Use this to express that your tests need "a backend.
473
501
474 A few of our tests need a backend, so that we can run the code. This
502 A few of our tests need a backend, so that we can run the code. This
475 fixture is intended to be used for such cases. It will pick one of the
503 fixture is intended to be used for such cases. It will pick one of the
476 backends and run the tests.
504 backends and run the tests.
477
505
478 The fixture `backend` would run the test multiple times for each
506 The fixture `backend` would run the test multiple times for each
479 available backend which is a pure waste of time if the test is
507 available backend which is a pure waste of time if the test is
480 independent of the backend type.
508 independent of the backend type.
481 """
509 """
482 # TODO: johbo: Change this to pick a random backend
510 # TODO: johbo: Change this to pick a random backend
483 return backend_git
511 return backend_git
484
512
485
513
486 @pytest.fixture
514 @pytest.fixture
487 def backend_stub(backend_git):
515 def backend_stub(backend_git):
488 """
516 """
489 Use this to express that your tests need a backend stub
517 Use this to express that your tests need a backend stub
490
518
491 TODO: mikhail: Implement a real stub logic instead of returning
519 TODO: mikhail: Implement a real stub logic instead of returning
492 a git backend
520 a git backend
493 """
521 """
494 return backend_git
522 return backend_git
495
523
496
524
497 @pytest.fixture
525 @pytest.fixture
498 def repo_stub(backend_stub):
526 def repo_stub(backend_stub):
499 """
527 """
500 Use this to express that your tests need a repository stub
528 Use this to express that your tests need a repository stub
501 """
529 """
502 return backend_stub.create_repo()
530 return backend_stub.create_repo()
503
531
504
532
505 class Backend(object):
533 class Backend(object):
506 """
534 """
507 Represents the test configuration for one supported backend
535 Represents the test configuration for one supported backend
508
536
509 Provides easy access to different test repositories based on
537 Provides easy access to different test repositories based on
510 `__getitem__`. Such repositories will only be created once per test
538 `__getitem__`. Such repositories will only be created once per test
511 session.
539 session.
512 """
540 """
513
541
514 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
542 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
515 _master_repo = None
543 _master_repo = None
516 _commit_ids = {}
544 _commit_ids = {}
517
545
518 def __init__(self, alias, repo_name, test_name, test_repo_container):
546 def __init__(self, alias, repo_name, test_name, test_repo_container):
519 self.alias = alias
547 self.alias = alias
520 self.repo_name = repo_name
548 self.repo_name = repo_name
521 self._cleanup_repos = []
549 self._cleanup_repos = []
522 self._test_name = test_name
550 self._test_name = test_name
523 self._test_repo_container = test_repo_container
551 self._test_repo_container = test_repo_container
524 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
552 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
525 # Fixture will survive in the end.
553 # Fixture will survive in the end.
526 self._fixture = Fixture()
554 self._fixture = Fixture()
527
555
528 def __getitem__(self, key):
556 def __getitem__(self, key):
529 return self._test_repo_container(key, self.alias)
557 return self._test_repo_container(key, self.alias)
530
558
531 def create_test_repo(self, key, config=None):
559 def create_test_repo(self, key, config=None):
532 return self._test_repo_container(key, self.alias, config)
560 return self._test_repo_container(key, self.alias, config)
533
561
534 @property
562 @property
535 def repo(self):
563 def repo(self):
536 """
564 """
537 Returns the "current" repository. This is the vcs_test repo or the
565 Returns the "current" repository. This is the vcs_test repo or the
538 last repo which has been created with `create_repo`.
566 last repo which has been created with `create_repo`.
539 """
567 """
540 from rhodecode.model.db import Repository
568 from rhodecode.model.db import Repository
541 return Repository.get_by_repo_name(self.repo_name)
569 return Repository.get_by_repo_name(self.repo_name)
542
570
543 @property
571 @property
544 def default_branch_name(self):
572 def default_branch_name(self):
545 VcsRepository = get_backend(self.alias)
573 VcsRepository = get_backend(self.alias)
546 return VcsRepository.DEFAULT_BRANCH_NAME
574 return VcsRepository.DEFAULT_BRANCH_NAME
547
575
548 @property
576 @property
549 def default_head_id(self):
577 def default_head_id(self):
550 """
578 """
551 Returns the default head id of the underlying backend.
579 Returns the default head id of the underlying backend.
552
580
553 This will be the default branch name in case the backend does have a
581 This will be the default branch name in case the backend does have a
554 default branch. In the other cases it will point to a valid head
582 default branch. In the other cases it will point to a valid head
555 which can serve as the base to create a new commit on top of it.
583 which can serve as the base to create a new commit on top of it.
556 """
584 """
557 vcsrepo = self.repo.scm_instance()
585 vcsrepo = self.repo.scm_instance()
558 head_id = (
586 head_id = (
559 vcsrepo.DEFAULT_BRANCH_NAME or
587 vcsrepo.DEFAULT_BRANCH_NAME or
560 vcsrepo.commit_ids[-1])
588 vcsrepo.commit_ids[-1])
561 return head_id
589 return head_id
562
590
563 @property
591 @property
564 def commit_ids(self):
592 def commit_ids(self):
565 """
593 """
566 Returns the list of commits for the last created repository
594 Returns the list of commits for the last created repository
567 """
595 """
568 return self._commit_ids
596 return self._commit_ids
569
597
570 def create_master_repo(self, commits):
598 def create_master_repo(self, commits):
571 """
599 """
572 Create a repository and remember it as a template.
600 Create a repository and remember it as a template.
573
601
574 This allows to easily create derived repositories to construct
602 This allows to easily create derived repositories to construct
575 more complex scenarios for diff, compare and pull requests.
603 more complex scenarios for diff, compare and pull requests.
576
604
577 Returns a commit map which maps from commit message to raw_id.
605 Returns a commit map which maps from commit message to raw_id.
578 """
606 """
579 self._master_repo = self.create_repo(commits=commits)
607 self._master_repo = self.create_repo(commits=commits)
580 return self._commit_ids
608 return self._commit_ids
581
609
582 def create_repo(
610 def create_repo(
583 self, commits=None, number_of_commits=0, heads=None,
611 self, commits=None, number_of_commits=0, heads=None,
584 name_suffix=u'', bare=False, **kwargs):
612 name_suffix=u'', bare=False, **kwargs):
585 """
613 """
586 Create a repository and record it for later cleanup.
614 Create a repository and record it for later cleanup.
587
615
588 :param commits: Optional. A sequence of dict instances.
616 :param commits: Optional. A sequence of dict instances.
589 Will add a commit per entry to the new repository.
617 Will add a commit per entry to the new repository.
590 :param number_of_commits: Optional. If set to a number, this number of
618 :param number_of_commits: Optional. If set to a number, this number of
591 commits will be added to the new repository.
619 commits will be added to the new repository.
592 :param heads: Optional. Can be set to a sequence of of commit
620 :param heads: Optional. Can be set to a sequence of of commit
593 names which shall be pulled in from the master repository.
621 names which shall be pulled in from the master repository.
594 :param name_suffix: adds special suffix to generated repo name
622 :param name_suffix: adds special suffix to generated repo name
595 :param bare: set a repo as bare (no checkout)
623 :param bare: set a repo as bare (no checkout)
596 """
624 """
597 self.repo_name = self._next_repo_name() + name_suffix
625 self.repo_name = self._next_repo_name() + name_suffix
598 repo = self._fixture.create_repo(
626 repo = self._fixture.create_repo(
599 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
627 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
600 self._cleanup_repos.append(repo.repo_name)
628 self._cleanup_repos.append(repo.repo_name)
601
629
602 commits = commits or [
630 commits = commits or [
603 {'message': 'Commit %s of %s' % (x, self.repo_name)}
631 {'message': 'Commit %s of %s' % (x, self.repo_name)}
604 for x in range(number_of_commits)]
632 for x in range(number_of_commits)]
605 self._add_commits_to_repo(repo.scm_instance(), commits)
633 self._add_commits_to_repo(repo.scm_instance(), commits)
606 if heads:
634 if heads:
607 self.pull_heads(repo, heads)
635 self.pull_heads(repo, heads)
608
636
609 return repo
637 return repo
610
638
611 def pull_heads(self, repo, heads):
639 def pull_heads(self, repo, heads):
612 """
640 """
613 Make sure that repo contains all commits mentioned in `heads`
641 Make sure that repo contains all commits mentioned in `heads`
614 """
642 """
615 vcsmaster = self._master_repo.scm_instance()
643 vcsmaster = self._master_repo.scm_instance()
616 vcsrepo = repo.scm_instance()
644 vcsrepo = repo.scm_instance()
617 vcsrepo.config.clear_section('hooks')
645 vcsrepo.config.clear_section('hooks')
618 commit_ids = [self._commit_ids[h] for h in heads]
646 commit_ids = [self._commit_ids[h] for h in heads]
619 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
647 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
620
648
621 def create_fork(self):
649 def create_fork(self):
622 repo_to_fork = self.repo_name
650 repo_to_fork = self.repo_name
623 self.repo_name = self._next_repo_name()
651 self.repo_name = self._next_repo_name()
624 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
652 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
625 self._cleanup_repos.append(self.repo_name)
653 self._cleanup_repos.append(self.repo_name)
626 return repo
654 return repo
627
655
628 def new_repo_name(self, suffix=u''):
656 def new_repo_name(self, suffix=u''):
629 self.repo_name = self._next_repo_name() + suffix
657 self.repo_name = self._next_repo_name() + suffix
630 self._cleanup_repos.append(self.repo_name)
658 self._cleanup_repos.append(self.repo_name)
631 return self.repo_name
659 return self.repo_name
632
660
633 def _next_repo_name(self):
661 def _next_repo_name(self):
634 return u"%s_%s" % (
662 return u"%s_%s" % (
635 self.invalid_repo_name.sub(u'_', self._test_name),
663 self.invalid_repo_name.sub(u'_', self._test_name),
636 len(self._cleanup_repos))
664 len(self._cleanup_repos))
637
665
638 def ensure_file(self, filename, content='Test content\n'):
666 def ensure_file(self, filename, content='Test content\n'):
639 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
667 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
640 commits = [
668 commits = [
641 {'added': [
669 {'added': [
642 FileNode(filename, content=content),
670 FileNode(filename, content=content),
643 ]},
671 ]},
644 ]
672 ]
645 self._add_commits_to_repo(self.repo.scm_instance(), commits)
673 self._add_commits_to_repo(self.repo.scm_instance(), commits)
646
674
647 def enable_downloads(self):
675 def enable_downloads(self):
648 repo = self.repo
676 repo = self.repo
649 repo.enable_downloads = True
677 repo.enable_downloads = True
650 Session().add(repo)
678 Session().add(repo)
651 Session().commit()
679 Session().commit()
652
680
653 def cleanup(self):
681 def cleanup(self):
654 for repo_name in reversed(self._cleanup_repos):
682 for repo_name in reversed(self._cleanup_repos):
655 self._fixture.destroy_repo(repo_name)
683 self._fixture.destroy_repo(repo_name)
656
684
657 def _add_commits_to_repo(self, repo, commits):
685 def _add_commits_to_repo(self, repo, commits):
658 commit_ids = _add_commits_to_repo(repo, commits)
686 commit_ids = _add_commits_to_repo(repo, commits)
659 if not commit_ids:
687 if not commit_ids:
660 return
688 return
661 self._commit_ids = commit_ids
689 self._commit_ids = commit_ids
662
690
663 # Creating refs for Git to allow fetching them from remote repository
691 # Creating refs for Git to allow fetching them from remote repository
664 if self.alias == 'git':
692 if self.alias == 'git':
665 refs = {}
693 refs = {}
666 for message in self._commit_ids:
694 for message in self._commit_ids:
667 # TODO: mikhail: do more special chars replacements
695 # TODO: mikhail: do more special chars replacements
668 ref_name = 'refs/test-refs/{}'.format(
696 ref_name = 'refs/test-refs/{}'.format(
669 message.replace(' ', ''))
697 message.replace(' ', ''))
670 refs[ref_name] = self._commit_ids[message]
698 refs[ref_name] = self._commit_ids[message]
671 self._create_refs(repo, refs)
699 self._create_refs(repo, refs)
672
700
673 def _create_refs(self, repo, refs):
701 def _create_refs(self, repo, refs):
674 for ref_name in refs:
702 for ref_name in refs:
675 repo.set_refs(ref_name, refs[ref_name])
703 repo.set_refs(ref_name, refs[ref_name])
676
704
677
705
678 @pytest.fixture
706 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo):
679 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
680 """
681 Parametrized fixture which represents a single vcs backend implementation.
682
683 See the fixture `backend` for more details. This one implements the same
684 concept, but on vcs level. So it does not provide model instances etc.
685
686 Parameters are generated dynamically, see :func:`pytest_generate_tests`
687 for how this works.
688 """
689 if backend_alias not in request.config.getoption('--backends'):
707 if backend_alias not in request.config.getoption('--backends'):
690 pytest.skip("Backend %s not selected." % (backend_alias, ))
708 pytest.skip("Backend %s not selected." % (backend_alias, ))
691
709
692 utils.check_xfail_backends(request.node, backend_alias)
710 utils.check_xfail_backends(request.node, backend_alias)
693 utils.check_skip_backends(request.node, backend_alias)
711 utils.check_skip_backends(request.node, backend_alias)
694
712
695 repo_name = 'vcs_test_%s' % (backend_alias, )
713 repo_name = 'vcs_test_%s' % (backend_alias, )
696 repo_path = os.path.join(tests_tmp_path, repo_name)
714 repo_path = os.path.join(tests_tmp_path, repo_name)
697 backend = VcsBackend(
715 backend = VcsBackend(
698 alias=backend_alias,
716 alias=backend_alias,
699 repo_path=repo_path,
717 repo_path=repo_path,
700 test_name=request.node.name,
718 test_name=request.node.name,
701 test_repo_container=test_repo)
719 test_repo_container=test_repo)
702 request.addfinalizer(backend.cleanup)
720 request.addfinalizer(backend.cleanup)
703 return backend
721 return backend
704
722
705
723
706 @pytest.fixture
724 @pytest.fixture
725 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
726 """
727 Parametrized fixture which represents a single vcs backend implementation.
728
729 See the fixture `backend` for more details. This one implements the same
730 concept, but on vcs level. So it does not provide model instances etc.
731
732 Parameters are generated dynamically, see :func:`pytest_generate_tests`
733 for how this works.
734 """
735 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
736
737
738 @pytest.fixture
707 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
739 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
708 return vcsbackend(request, 'git', tests_tmp_path, baseapp, test_repo)
740 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
709
741
710
742
711 @pytest.fixture
743 @pytest.fixture
712 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
744 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
713 return vcsbackend(request, 'hg', tests_tmp_path, baseapp, test_repo)
745 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
714
746
715
747
716 @pytest.fixture
748 @pytest.fixture
717 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
749 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
718 return vcsbackend(request, 'svn', tests_tmp_path, baseapp, test_repo)
750 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
719
720
721 @pytest.fixture
722 def vcsbackend_random(vcsbackend_git):
723 """
724 Use this to express that your tests need "a vcsbackend".
725
726 The fixture `vcsbackend` would run the test multiple times for each
727 available vcs backend which is a pure waste of time if the test is
728 independent of the vcs backend type.
729 """
730 # TODO: johbo: Change this to pick a random backend
731 return vcsbackend_git
732
751
733
752
734 @pytest.fixture
753 @pytest.fixture
735 def vcsbackend_stub(vcsbackend_git):
754 def vcsbackend_stub(vcsbackend_git):
736 """
755 """
737 Use this to express that your test just needs a stub of a vcsbackend.
756 Use this to express that your test just needs a stub of a vcsbackend.
738
757
739 Plan is to eventually implement an in-memory stub to speed tests up.
758 Plan is to eventually implement an in-memory stub to speed tests up.
740 """
759 """
741 return vcsbackend_git
760 return vcsbackend_git
742
761
743
762
744 class VcsBackend(object):
763 class VcsBackend(object):
745 """
764 """
746 Represents the test configuration for one supported vcs backend.
765 Represents the test configuration for one supported vcs backend.
747 """
766 """
748
767
749 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
768 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
750
769
751 def __init__(self, alias, repo_path, test_name, test_repo_container):
770 def __init__(self, alias, repo_path, test_name, test_repo_container):
752 self.alias = alias
771 self.alias = alias
753 self._repo_path = repo_path
772 self._repo_path = repo_path
754 self._cleanup_repos = []
773 self._cleanup_repos = []
755 self._test_name = test_name
774 self._test_name = test_name
756 self._test_repo_container = test_repo_container
775 self._test_repo_container = test_repo_container
757
776
758 def __getitem__(self, key):
777 def __getitem__(self, key):
759 return self._test_repo_container(key, self.alias).scm_instance()
778 return self._test_repo_container(key, self.alias).scm_instance()
760
779
761 @property
780 @property
762 def repo(self):
781 def repo(self):
763 """
782 """
764 Returns the "current" repository. This is the vcs_test repo of the last
783 Returns the "current" repository. This is the vcs_test repo of the last
765 repo which has been created.
784 repo which has been created.
766 """
785 """
767 Repository = get_backend(self.alias)
786 Repository = get_backend(self.alias)
768 return Repository(self._repo_path)
787 return Repository(self._repo_path)
769
788
770 @property
789 @property
771 def backend(self):
790 def backend(self):
772 """
791 """
773 Returns the backend implementation class.
792 Returns the backend implementation class.
774 """
793 """
775 return get_backend(self.alias)
794 return get_backend(self.alias)
776
795
777 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
796 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
778 bare=False):
797 bare=False):
779 repo_name = self._next_repo_name()
798 repo_name = self._next_repo_name()
780 self._repo_path = get_new_dir(repo_name)
799 self._repo_path = get_new_dir(repo_name)
781 repo_class = get_backend(self.alias)
800 repo_class = get_backend(self.alias)
782 src_url = None
801 src_url = None
783 if _clone_repo:
802 if _clone_repo:
784 src_url = _clone_repo.path
803 src_url = _clone_repo.path
785 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
804 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
786 self._cleanup_repos.append(repo)
805 self._cleanup_repos.append(repo)
787
806
788 commits = commits or [
807 commits = commits or [
789 {'message': 'Commit %s of %s' % (x, repo_name)}
808 {'message': 'Commit %s of %s' % (x, repo_name)}
790 for x in xrange(number_of_commits)]
809 for x in xrange(number_of_commits)]
791 _add_commits_to_repo(repo, commits)
810 _add_commits_to_repo(repo, commits)
792 return repo
811 return repo
793
812
794 def clone_repo(self, repo):
813 def clone_repo(self, repo):
795 return self.create_repo(_clone_repo=repo)
814 return self.create_repo(_clone_repo=repo)
796
815
797 def cleanup(self):
816 def cleanup(self):
798 for repo in self._cleanup_repos:
817 for repo in self._cleanup_repos:
799 shutil.rmtree(repo.path)
818 shutil.rmtree(repo.path)
800
819
801 def new_repo_path(self):
820 def new_repo_path(self):
802 repo_name = self._next_repo_name()
821 repo_name = self._next_repo_name()
803 self._repo_path = get_new_dir(repo_name)
822 self._repo_path = get_new_dir(repo_name)
804 return self._repo_path
823 return self._repo_path
805
824
806 def _next_repo_name(self):
825 def _next_repo_name(self):
807 return "%s_%s" % (
826 return "%s_%s" % (
808 self.invalid_repo_name.sub('_', self._test_name),
827 self.invalid_repo_name.sub('_', self._test_name),
809 len(self._cleanup_repos))
828 len(self._cleanup_repos))
810
829
811 def add_file(self, repo, filename, content='Test content\n'):
830 def add_file(self, repo, filename, content='Test content\n'):
812 imc = repo.in_memory_commit
831 imc = repo.in_memory_commit
813 imc.add(FileNode(filename, content=content))
832 imc.add(FileNode(filename, content=content))
814 imc.commit(
833 imc.commit(
815 message=u'Automatic commit from vcsbackend fixture',
834 message=u'Automatic commit from vcsbackend fixture',
816 author=u'Automatic')
835 author=u'Automatic')
817
836
818 def ensure_file(self, filename, content='Test content\n'):
837 def ensure_file(self, filename, content='Test content\n'):
819 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
838 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
820 self.add_file(self.repo, filename, content)
839 self.add_file(self.repo, filename, content)
821
840
822
841
823 def _add_commits_to_repo(vcs_repo, commits):
842 def _add_commits_to_repo(vcs_repo, commits):
824 commit_ids = {}
843 commit_ids = {}
825 if not commits:
844 if not commits:
826 return commit_ids
845 return commit_ids
827
846
828 imc = vcs_repo.in_memory_commit
847 imc = vcs_repo.in_memory_commit
829 commit = None
848 commit = None
830
849
831 for idx, commit in enumerate(commits):
850 for idx, commit in enumerate(commits):
832 message = unicode(commit.get('message', 'Commit %s' % idx))
851 message = unicode(commit.get('message', 'Commit %s' % idx))
833
852
834 for node in commit.get('added', []):
853 for node in commit.get('added', []):
835 imc.add(FileNode(node.path, content=node.content))
854 imc.add(FileNode(node.path, content=node.content))
836 for node in commit.get('changed', []):
855 for node in commit.get('changed', []):
837 imc.change(FileNode(node.path, content=node.content))
856 imc.change(FileNode(node.path, content=node.content))
838 for node in commit.get('removed', []):
857 for node in commit.get('removed', []):
839 imc.remove(FileNode(node.path))
858 imc.remove(FileNode(node.path))
840
859
841 parents = [
860 parents = [
842 vcs_repo.get_commit(commit_id=commit_ids[p])
861 vcs_repo.get_commit(commit_id=commit_ids[p])
843 for p in commit.get('parents', [])]
862 for p in commit.get('parents', [])]
844
863
845 operations = ('added', 'changed', 'removed')
864 operations = ('added', 'changed', 'removed')
846 if not any((commit.get(o) for o in operations)):
865 if not any((commit.get(o) for o in operations)):
847 imc.add(FileNode('file_%s' % idx, content=message))
866 imc.add(FileNode('file_%s' % idx, content=message))
848
867
849 commit = imc.commit(
868 commit = imc.commit(
850 message=message,
869 message=message,
851 author=unicode(commit.get('author', 'Automatic')),
870 author=unicode(commit.get('author', 'Automatic')),
852 date=commit.get('date'),
871 date=commit.get('date'),
853 branch=commit.get('branch'),
872 branch=commit.get('branch'),
854 parents=parents)
873 parents=parents)
855
874
856 commit_ids[commit.message] = commit.raw_id
875 commit_ids[commit.message] = commit.raw_id
857
876
858 return commit_ids
877 return commit_ids
859
878
860
879
861 @pytest.fixture
880 @pytest.fixture
862 def reposerver(request):
881 def reposerver(request):
863 """
882 """
864 Allows to serve a backend repository
883 Allows to serve a backend repository
865 """
884 """
866
885
867 repo_server = RepoServer()
886 repo_server = RepoServer()
868 request.addfinalizer(repo_server.cleanup)
887 request.addfinalizer(repo_server.cleanup)
869 return repo_server
888 return repo_server
870
889
871
890
872 class RepoServer(object):
891 class RepoServer(object):
873 """
892 """
874 Utility to serve a local repository for the duration of a test case.
893 Utility to serve a local repository for the duration of a test case.
875
894
876 Supports only Subversion so far.
895 Supports only Subversion so far.
877 """
896 """
878
897
879 url = None
898 url = None
880
899
881 def __init__(self):
900 def __init__(self):
882 self._cleanup_servers = []
901 self._cleanup_servers = []
883
902
884 def serve(self, vcsrepo):
903 def serve(self, vcsrepo):
885 if vcsrepo.alias != 'svn':
904 if vcsrepo.alias != 'svn':
886 raise TypeError("Backend %s not supported" % vcsrepo.alias)
905 raise TypeError("Backend %s not supported" % vcsrepo.alias)
887
906
888 proc = subprocess32.Popen(
907 proc = subprocess32.Popen(
889 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
908 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
890 '--root', vcsrepo.path])
909 '--root', vcsrepo.path])
891 self._cleanup_servers.append(proc)
910 self._cleanup_servers.append(proc)
892 self.url = 'svn://localhost'
911 self.url = 'svn://localhost'
893
912
894 def cleanup(self):
913 def cleanup(self):
895 for proc in self._cleanup_servers:
914 for proc in self._cleanup_servers:
896 proc.terminate()
915 proc.terminate()
897
916
898
917
899 @pytest.fixture
918 @pytest.fixture
900 def pr_util(backend, request, config_stub):
919 def pr_util(backend, request, config_stub):
901 """
920 """
902 Utility for tests of models and for functional tests around pull requests.
921 Utility for tests of models and for functional tests around pull requests.
903
922
904 It gives an instance of :class:`PRTestUtility` which provides various
923 It gives an instance of :class:`PRTestUtility` which provides various
905 utility methods around one pull request.
924 utility methods around one pull request.
906
925
907 This fixture uses `backend` and inherits its parameterization.
926 This fixture uses `backend` and inherits its parameterization.
908 """
927 """
909
928
910 util = PRTestUtility(backend)
929 util = PRTestUtility(backend)
911 request.addfinalizer(util.cleanup)
930 request.addfinalizer(util.cleanup)
912
931
913 return util
932 return util
914
933
915
934
916 class PRTestUtility(object):
935 class PRTestUtility(object):
917
936
918 pull_request = None
937 pull_request = None
919 pull_request_id = None
938 pull_request_id = None
920 mergeable_patcher = None
939 mergeable_patcher = None
921 mergeable_mock = None
940 mergeable_mock = None
922 notification_patcher = None
941 notification_patcher = None
923
942
924 def __init__(self, backend):
943 def __init__(self, backend):
925 self.backend = backend
944 self.backend = backend
926
945
927 def create_pull_request(
946 def create_pull_request(
928 self, commits=None, target_head=None, source_head=None,
947 self, commits=None, target_head=None, source_head=None,
929 revisions=None, approved=False, author=None, mergeable=False,
948 revisions=None, approved=False, author=None, mergeable=False,
930 enable_notifications=True, name_suffix=u'', reviewers=None,
949 enable_notifications=True, name_suffix=u'', reviewers=None,
931 title=u"Test", description=u"Description"):
950 title=u"Test", description=u"Description"):
932 self.set_mergeable(mergeable)
951 self.set_mergeable(mergeable)
933 if not enable_notifications:
952 if not enable_notifications:
934 # mock notification side effect
953 # mock notification side effect
935 self.notification_patcher = mock.patch(
954 self.notification_patcher = mock.patch(
936 'rhodecode.model.notification.NotificationModel.create')
955 'rhodecode.model.notification.NotificationModel.create')
937 self.notification_patcher.start()
956 self.notification_patcher.start()
938
957
939 if not self.pull_request:
958 if not self.pull_request:
940 if not commits:
959 if not commits:
941 commits = [
960 commits = [
942 {'message': 'c1'},
961 {'message': 'c1'},
943 {'message': 'c2'},
962 {'message': 'c2'},
944 {'message': 'c3'},
963 {'message': 'c3'},
945 ]
964 ]
946 target_head = 'c1'
965 target_head = 'c1'
947 source_head = 'c2'
966 source_head = 'c2'
948 revisions = ['c2']
967 revisions = ['c2']
949
968
950 self.commit_ids = self.backend.create_master_repo(commits)
969 self.commit_ids = self.backend.create_master_repo(commits)
951 self.target_repository = self.backend.create_repo(
970 self.target_repository = self.backend.create_repo(
952 heads=[target_head], name_suffix=name_suffix)
971 heads=[target_head], name_suffix=name_suffix)
953 self.source_repository = self.backend.create_repo(
972 self.source_repository = self.backend.create_repo(
954 heads=[source_head], name_suffix=name_suffix)
973 heads=[source_head], name_suffix=name_suffix)
955 self.author = author or UserModel().get_by_username(
974 self.author = author or UserModel().get_by_username(
956 TEST_USER_ADMIN_LOGIN)
975 TEST_USER_ADMIN_LOGIN)
957
976
958 model = PullRequestModel()
977 model = PullRequestModel()
959 self.create_parameters = {
978 self.create_parameters = {
960 'created_by': self.author,
979 'created_by': self.author,
961 'source_repo': self.source_repository.repo_name,
980 'source_repo': self.source_repository.repo_name,
962 'source_ref': self._default_branch_reference(source_head),
981 'source_ref': self._default_branch_reference(source_head),
963 'target_repo': self.target_repository.repo_name,
982 'target_repo': self.target_repository.repo_name,
964 'target_ref': self._default_branch_reference(target_head),
983 'target_ref': self._default_branch_reference(target_head),
965 'revisions': [self.commit_ids[r] for r in revisions],
984 'revisions': [self.commit_ids[r] for r in revisions],
966 'reviewers': reviewers or self._get_reviewers(),
985 'reviewers': reviewers or self._get_reviewers(),
967 'title': title,
986 'title': title,
968 'description': description,
987 'description': description,
969 }
988 }
970 self.pull_request = model.create(**self.create_parameters)
989 self.pull_request = model.create(**self.create_parameters)
971 assert model.get_versions(self.pull_request) == []
990 assert model.get_versions(self.pull_request) == []
972
991
973 self.pull_request_id = self.pull_request.pull_request_id
992 self.pull_request_id = self.pull_request.pull_request_id
974
993
975 if approved:
994 if approved:
976 self.approve()
995 self.approve()
977
996
978 Session().add(self.pull_request)
997 Session().add(self.pull_request)
979 Session().commit()
998 Session().commit()
980
999
981 return self.pull_request
1000 return self.pull_request
982
1001
983 def approve(self):
1002 def approve(self):
984 self.create_status_votes(
1003 self.create_status_votes(
985 ChangesetStatus.STATUS_APPROVED,
1004 ChangesetStatus.STATUS_APPROVED,
986 *self.pull_request.reviewers)
1005 *self.pull_request.reviewers)
987
1006
988 def close(self):
1007 def close(self):
989 PullRequestModel().close_pull_request(self.pull_request, self.author)
1008 PullRequestModel().close_pull_request(self.pull_request, self.author)
990
1009
991 def _default_branch_reference(self, commit_message):
1010 def _default_branch_reference(self, commit_message):
992 reference = '%s:%s:%s' % (
1011 reference = '%s:%s:%s' % (
993 'branch',
1012 'branch',
994 self.backend.default_branch_name,
1013 self.backend.default_branch_name,
995 self.commit_ids[commit_message])
1014 self.commit_ids[commit_message])
996 return reference
1015 return reference
997
1016
998 def _get_reviewers(self):
1017 def _get_reviewers(self):
999 return [
1018 return [
1000 (TEST_USER_REGULAR_LOGIN, ['default1'], False, []),
1019 (TEST_USER_REGULAR_LOGIN, ['default1'], False, []),
1001 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, []),
1020 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, []),
1002 ]
1021 ]
1003
1022
1004 def update_source_repository(self, head=None):
1023 def update_source_repository(self, head=None):
1005 heads = [head or 'c3']
1024 heads = [head or 'c3']
1006 self.backend.pull_heads(self.source_repository, heads=heads)
1025 self.backend.pull_heads(self.source_repository, heads=heads)
1007
1026
1008 def add_one_commit(self, head=None):
1027 def add_one_commit(self, head=None):
1009 self.update_source_repository(head=head)
1028 self.update_source_repository(head=head)
1010 old_commit_ids = set(self.pull_request.revisions)
1029 old_commit_ids = set(self.pull_request.revisions)
1011 PullRequestModel().update_commits(self.pull_request)
1030 PullRequestModel().update_commits(self.pull_request)
1012 commit_ids = set(self.pull_request.revisions)
1031 commit_ids = set(self.pull_request.revisions)
1013 new_commit_ids = commit_ids - old_commit_ids
1032 new_commit_ids = commit_ids - old_commit_ids
1014 assert len(new_commit_ids) == 1
1033 assert len(new_commit_ids) == 1
1015 return new_commit_ids.pop()
1034 return new_commit_ids.pop()
1016
1035
1017 def remove_one_commit(self):
1036 def remove_one_commit(self):
1018 assert len(self.pull_request.revisions) == 2
1037 assert len(self.pull_request.revisions) == 2
1019 source_vcs = self.source_repository.scm_instance()
1038 source_vcs = self.source_repository.scm_instance()
1020 removed_commit_id = source_vcs.commit_ids[-1]
1039 removed_commit_id = source_vcs.commit_ids[-1]
1021
1040
1022 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1041 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1023 # remove the if once that's sorted out.
1042 # remove the if once that's sorted out.
1024 if self.backend.alias == "git":
1043 if self.backend.alias == "git":
1025 kwargs = {'branch_name': self.backend.default_branch_name}
1044 kwargs = {'branch_name': self.backend.default_branch_name}
1026 else:
1045 else:
1027 kwargs = {}
1046 kwargs = {}
1028 source_vcs.strip(removed_commit_id, **kwargs)
1047 source_vcs.strip(removed_commit_id, **kwargs)
1029
1048
1030 PullRequestModel().update_commits(self.pull_request)
1049 PullRequestModel().update_commits(self.pull_request)
1031 assert len(self.pull_request.revisions) == 1
1050 assert len(self.pull_request.revisions) == 1
1032 return removed_commit_id
1051 return removed_commit_id
1033
1052
1034 def create_comment(self, linked_to=None):
1053 def create_comment(self, linked_to=None):
1035 comment = CommentsModel().create(
1054 comment = CommentsModel().create(
1036 text=u"Test comment",
1055 text=u"Test comment",
1037 repo=self.target_repository.repo_name,
1056 repo=self.target_repository.repo_name,
1038 user=self.author,
1057 user=self.author,
1039 pull_request=self.pull_request)
1058 pull_request=self.pull_request)
1040 assert comment.pull_request_version_id is None
1059 assert comment.pull_request_version_id is None
1041
1060
1042 if linked_to:
1061 if linked_to:
1043 PullRequestModel()._link_comments_to_version(linked_to)
1062 PullRequestModel()._link_comments_to_version(linked_to)
1044
1063
1045 return comment
1064 return comment
1046
1065
1047 def create_inline_comment(
1066 def create_inline_comment(
1048 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1067 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1049 comment = CommentsModel().create(
1068 comment = CommentsModel().create(
1050 text=u"Test comment",
1069 text=u"Test comment",
1051 repo=self.target_repository.repo_name,
1070 repo=self.target_repository.repo_name,
1052 user=self.author,
1071 user=self.author,
1053 line_no=line_no,
1072 line_no=line_no,
1054 f_path=file_path,
1073 f_path=file_path,
1055 pull_request=self.pull_request)
1074 pull_request=self.pull_request)
1056 assert comment.pull_request_version_id is None
1075 assert comment.pull_request_version_id is None
1057
1076
1058 if linked_to:
1077 if linked_to:
1059 PullRequestModel()._link_comments_to_version(linked_to)
1078 PullRequestModel()._link_comments_to_version(linked_to)
1060
1079
1061 return comment
1080 return comment
1062
1081
1063 def create_version_of_pull_request(self):
1082 def create_version_of_pull_request(self):
1064 pull_request = self.create_pull_request()
1083 pull_request = self.create_pull_request()
1065 version = PullRequestModel()._create_version_from_snapshot(
1084 version = PullRequestModel()._create_version_from_snapshot(
1066 pull_request)
1085 pull_request)
1067 return version
1086 return version
1068
1087
1069 def create_status_votes(self, status, *reviewers):
1088 def create_status_votes(self, status, *reviewers):
1070 for reviewer in reviewers:
1089 for reviewer in reviewers:
1071 ChangesetStatusModel().set_status(
1090 ChangesetStatusModel().set_status(
1072 repo=self.pull_request.target_repo,
1091 repo=self.pull_request.target_repo,
1073 status=status,
1092 status=status,
1074 user=reviewer.user_id,
1093 user=reviewer.user_id,
1075 pull_request=self.pull_request)
1094 pull_request=self.pull_request)
1076
1095
1077 def set_mergeable(self, value):
1096 def set_mergeable(self, value):
1078 if not self.mergeable_patcher:
1097 if not self.mergeable_patcher:
1079 self.mergeable_patcher = mock.patch.object(
1098 self.mergeable_patcher = mock.patch.object(
1080 VcsSettingsModel, 'get_general_settings')
1099 VcsSettingsModel, 'get_general_settings')
1081 self.mergeable_mock = self.mergeable_patcher.start()
1100 self.mergeable_mock = self.mergeable_patcher.start()
1082 self.mergeable_mock.return_value = {
1101 self.mergeable_mock.return_value = {
1083 'rhodecode_pr_merge_enabled': value}
1102 'rhodecode_pr_merge_enabled': value}
1084
1103
1085 def cleanup(self):
1104 def cleanup(self):
1086 # In case the source repository is already cleaned up, the pull
1105 # In case the source repository is already cleaned up, the pull
1087 # request will already be deleted.
1106 # request will already be deleted.
1088 pull_request = PullRequest().get(self.pull_request_id)
1107 pull_request = PullRequest().get(self.pull_request_id)
1089 if pull_request:
1108 if pull_request:
1090 PullRequestModel().delete(pull_request, pull_request.author)
1109 PullRequestModel().delete(pull_request, pull_request.author)
1091 Session().commit()
1110 Session().commit()
1092
1111
1093 if self.notification_patcher:
1112 if self.notification_patcher:
1094 self.notification_patcher.stop()
1113 self.notification_patcher.stop()
1095
1114
1096 if self.mergeable_patcher:
1115 if self.mergeable_patcher:
1097 self.mergeable_patcher.stop()
1116 self.mergeable_patcher.stop()
1098
1117
1099
1118
1100 @pytest.fixture
1119 @pytest.fixture
1101 def user_admin(baseapp):
1120 def user_admin(baseapp):
1102 """
1121 """
1103 Provides the default admin test user as an instance of `db.User`.
1122 Provides the default admin test user as an instance of `db.User`.
1104 """
1123 """
1105 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1124 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1106 return user
1125 return user
1107
1126
1108
1127
1109 @pytest.fixture
1128 @pytest.fixture
1110 def user_regular(baseapp):
1129 def user_regular(baseapp):
1111 """
1130 """
1112 Provides the default regular test user as an instance of `db.User`.
1131 Provides the default regular test user as an instance of `db.User`.
1113 """
1132 """
1114 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1133 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1115 return user
1134 return user
1116
1135
1117
1136
1118 @pytest.fixture
1137 @pytest.fixture
1119 def user_util(request, db_connection):
1138 def user_util(request, db_connection):
1120 """
1139 """
1121 Provides a wired instance of `UserUtility` with integrated cleanup.
1140 Provides a wired instance of `UserUtility` with integrated cleanup.
1122 """
1141 """
1123 utility = UserUtility(test_name=request.node.name)
1142 utility = UserUtility(test_name=request.node.name)
1124 request.addfinalizer(utility.cleanup)
1143 request.addfinalizer(utility.cleanup)
1125 return utility
1144 return utility
1126
1145
1127
1146
1128 # TODO: johbo: Split this up into utilities per domain or something similar
1147 # TODO: johbo: Split this up into utilities per domain or something similar
1129 class UserUtility(object):
1148 class UserUtility(object):
1130
1149
1131 def __init__(self, test_name="test"):
1150 def __init__(self, test_name="test"):
1132 self._test_name = self._sanitize_name(test_name)
1151 self._test_name = self._sanitize_name(test_name)
1133 self.fixture = Fixture()
1152 self.fixture = Fixture()
1134 self.repo_group_ids = []
1153 self.repo_group_ids = []
1135 self.repos_ids = []
1154 self.repos_ids = []
1136 self.user_ids = []
1155 self.user_ids = []
1137 self.user_group_ids = []
1156 self.user_group_ids = []
1138 self.user_repo_permission_ids = []
1157 self.user_repo_permission_ids = []
1139 self.user_group_repo_permission_ids = []
1158 self.user_group_repo_permission_ids = []
1140 self.user_repo_group_permission_ids = []
1159 self.user_repo_group_permission_ids = []
1141 self.user_group_repo_group_permission_ids = []
1160 self.user_group_repo_group_permission_ids = []
1142 self.user_user_group_permission_ids = []
1161 self.user_user_group_permission_ids = []
1143 self.user_group_user_group_permission_ids = []
1162 self.user_group_user_group_permission_ids = []
1144 self.user_permissions = []
1163 self.user_permissions = []
1145
1164
1146 def _sanitize_name(self, name):
1165 def _sanitize_name(self, name):
1147 for char in ['[', ']']:
1166 for char in ['[', ']']:
1148 name = name.replace(char, '_')
1167 name = name.replace(char, '_')
1149 return name
1168 return name
1150
1169
1151 def create_repo_group(
1170 def create_repo_group(
1152 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1171 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1153 group_name = "{prefix}_repogroup_{count}".format(
1172 group_name = "{prefix}_repogroup_{count}".format(
1154 prefix=self._test_name,
1173 prefix=self._test_name,
1155 count=len(self.repo_group_ids))
1174 count=len(self.repo_group_ids))
1156 repo_group = self.fixture.create_repo_group(
1175 repo_group = self.fixture.create_repo_group(
1157 group_name, cur_user=owner)
1176 group_name, cur_user=owner)
1158 if auto_cleanup:
1177 if auto_cleanup:
1159 self.repo_group_ids.append(repo_group.group_id)
1178 self.repo_group_ids.append(repo_group.group_id)
1160 return repo_group
1179 return repo_group
1161
1180
1162 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1181 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1163 auto_cleanup=True, repo_type='hg', bare=False):
1182 auto_cleanup=True, repo_type='hg', bare=False):
1164 repo_name = "{prefix}_repository_{count}".format(
1183 repo_name = "{prefix}_repository_{count}".format(
1165 prefix=self._test_name,
1184 prefix=self._test_name,
1166 count=len(self.repos_ids))
1185 count=len(self.repos_ids))
1167
1186
1168 repository = self.fixture.create_repo(
1187 repository = self.fixture.create_repo(
1169 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1188 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1170 if auto_cleanup:
1189 if auto_cleanup:
1171 self.repos_ids.append(repository.repo_id)
1190 self.repos_ids.append(repository.repo_id)
1172 return repository
1191 return repository
1173
1192
1174 def create_user(self, auto_cleanup=True, **kwargs):
1193 def create_user(self, auto_cleanup=True, **kwargs):
1175 user_name = "{prefix}_user_{count}".format(
1194 user_name = "{prefix}_user_{count}".format(
1176 prefix=self._test_name,
1195 prefix=self._test_name,
1177 count=len(self.user_ids))
1196 count=len(self.user_ids))
1178 user = self.fixture.create_user(user_name, **kwargs)
1197 user = self.fixture.create_user(user_name, **kwargs)
1179 if auto_cleanup:
1198 if auto_cleanup:
1180 self.user_ids.append(user.user_id)
1199 self.user_ids.append(user.user_id)
1181 return user
1200 return user
1182
1201
1183 def create_additional_user_email(self, user, email):
1202 def create_additional_user_email(self, user, email):
1184 uem = self.fixture.create_additional_user_email(user=user, email=email)
1203 uem = self.fixture.create_additional_user_email(user=user, email=email)
1185 return uem
1204 return uem
1186
1205
1187 def create_user_with_group(self):
1206 def create_user_with_group(self):
1188 user = self.create_user()
1207 user = self.create_user()
1189 user_group = self.create_user_group(members=[user])
1208 user_group = self.create_user_group(members=[user])
1190 return user, user_group
1209 return user, user_group
1191
1210
1192 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1211 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1193 auto_cleanup=True, **kwargs):
1212 auto_cleanup=True, **kwargs):
1194 group_name = "{prefix}_usergroup_{count}".format(
1213 group_name = "{prefix}_usergroup_{count}".format(
1195 prefix=self._test_name,
1214 prefix=self._test_name,
1196 count=len(self.user_group_ids))
1215 count=len(self.user_group_ids))
1197 user_group = self.fixture.create_user_group(
1216 user_group = self.fixture.create_user_group(
1198 group_name, cur_user=owner, **kwargs)
1217 group_name, cur_user=owner, **kwargs)
1199
1218
1200 if auto_cleanup:
1219 if auto_cleanup:
1201 self.user_group_ids.append(user_group.users_group_id)
1220 self.user_group_ids.append(user_group.users_group_id)
1202 if members:
1221 if members:
1203 for user in members:
1222 for user in members:
1204 UserGroupModel().add_user_to_group(user_group, user)
1223 UserGroupModel().add_user_to_group(user_group, user)
1205 return user_group
1224 return user_group
1206
1225
1207 def grant_user_permission(self, user_name, permission_name):
1226 def grant_user_permission(self, user_name, permission_name):
1208 self._inherit_default_user_permissions(user_name, False)
1227 self._inherit_default_user_permissions(user_name, False)
1209 self.user_permissions.append((user_name, permission_name))
1228 self.user_permissions.append((user_name, permission_name))
1210
1229
1211 def grant_user_permission_to_repo_group(
1230 def grant_user_permission_to_repo_group(
1212 self, repo_group, user, permission_name):
1231 self, repo_group, user, permission_name):
1213 permission = RepoGroupModel().grant_user_permission(
1232 permission = RepoGroupModel().grant_user_permission(
1214 repo_group, user, permission_name)
1233 repo_group, user, permission_name)
1215 self.user_repo_group_permission_ids.append(
1234 self.user_repo_group_permission_ids.append(
1216 (repo_group.group_id, user.user_id))
1235 (repo_group.group_id, user.user_id))
1217 return permission
1236 return permission
1218
1237
1219 def grant_user_group_permission_to_repo_group(
1238 def grant_user_group_permission_to_repo_group(
1220 self, repo_group, user_group, permission_name):
1239 self, repo_group, user_group, permission_name):
1221 permission = RepoGroupModel().grant_user_group_permission(
1240 permission = RepoGroupModel().grant_user_group_permission(
1222 repo_group, user_group, permission_name)
1241 repo_group, user_group, permission_name)
1223 self.user_group_repo_group_permission_ids.append(
1242 self.user_group_repo_group_permission_ids.append(
1224 (repo_group.group_id, user_group.users_group_id))
1243 (repo_group.group_id, user_group.users_group_id))
1225 return permission
1244 return permission
1226
1245
1227 def grant_user_permission_to_repo(
1246 def grant_user_permission_to_repo(
1228 self, repo, user, permission_name):
1247 self, repo, user, permission_name):
1229 permission = RepoModel().grant_user_permission(
1248 permission = RepoModel().grant_user_permission(
1230 repo, user, permission_name)
1249 repo, user, permission_name)
1231 self.user_repo_permission_ids.append(
1250 self.user_repo_permission_ids.append(
1232 (repo.repo_id, user.user_id))
1251 (repo.repo_id, user.user_id))
1233 return permission
1252 return permission
1234
1253
1235 def grant_user_group_permission_to_repo(
1254 def grant_user_group_permission_to_repo(
1236 self, repo, user_group, permission_name):
1255 self, repo, user_group, permission_name):
1237 permission = RepoModel().grant_user_group_permission(
1256 permission = RepoModel().grant_user_group_permission(
1238 repo, user_group, permission_name)
1257 repo, user_group, permission_name)
1239 self.user_group_repo_permission_ids.append(
1258 self.user_group_repo_permission_ids.append(
1240 (repo.repo_id, user_group.users_group_id))
1259 (repo.repo_id, user_group.users_group_id))
1241 return permission
1260 return permission
1242
1261
1243 def grant_user_permission_to_user_group(
1262 def grant_user_permission_to_user_group(
1244 self, target_user_group, user, permission_name):
1263 self, target_user_group, user, permission_name):
1245 permission = UserGroupModel().grant_user_permission(
1264 permission = UserGroupModel().grant_user_permission(
1246 target_user_group, user, permission_name)
1265 target_user_group, user, permission_name)
1247 self.user_user_group_permission_ids.append(
1266 self.user_user_group_permission_ids.append(
1248 (target_user_group.users_group_id, user.user_id))
1267 (target_user_group.users_group_id, user.user_id))
1249 return permission
1268 return permission
1250
1269
1251 def grant_user_group_permission_to_user_group(
1270 def grant_user_group_permission_to_user_group(
1252 self, target_user_group, user_group, permission_name):
1271 self, target_user_group, user_group, permission_name):
1253 permission = UserGroupModel().grant_user_group_permission(
1272 permission = UserGroupModel().grant_user_group_permission(
1254 target_user_group, user_group, permission_name)
1273 target_user_group, user_group, permission_name)
1255 self.user_group_user_group_permission_ids.append(
1274 self.user_group_user_group_permission_ids.append(
1256 (target_user_group.users_group_id, user_group.users_group_id))
1275 (target_user_group.users_group_id, user_group.users_group_id))
1257 return permission
1276 return permission
1258
1277
1259 def revoke_user_permission(self, user_name, permission_name):
1278 def revoke_user_permission(self, user_name, permission_name):
1260 self._inherit_default_user_permissions(user_name, True)
1279 self._inherit_default_user_permissions(user_name, True)
1261 UserModel().revoke_perm(user_name, permission_name)
1280 UserModel().revoke_perm(user_name, permission_name)
1262
1281
1263 def _inherit_default_user_permissions(self, user_name, value):
1282 def _inherit_default_user_permissions(self, user_name, value):
1264 user = UserModel().get_by_username(user_name)
1283 user = UserModel().get_by_username(user_name)
1265 user.inherit_default_permissions = value
1284 user.inherit_default_permissions = value
1266 Session().add(user)
1285 Session().add(user)
1267 Session().commit()
1286 Session().commit()
1268
1287
1269 def cleanup(self):
1288 def cleanup(self):
1270 self._cleanup_permissions()
1289 self._cleanup_permissions()
1271 self._cleanup_repos()
1290 self._cleanup_repos()
1272 self._cleanup_repo_groups()
1291 self._cleanup_repo_groups()
1273 self._cleanup_user_groups()
1292 self._cleanup_user_groups()
1274 self._cleanup_users()
1293 self._cleanup_users()
1275
1294
1276 def _cleanup_permissions(self):
1295 def _cleanup_permissions(self):
1277 if self.user_permissions:
1296 if self.user_permissions:
1278 for user_name, permission_name in self.user_permissions:
1297 for user_name, permission_name in self.user_permissions:
1279 self.revoke_user_permission(user_name, permission_name)
1298 self.revoke_user_permission(user_name, permission_name)
1280
1299
1281 for permission in self.user_repo_permission_ids:
1300 for permission in self.user_repo_permission_ids:
1282 RepoModel().revoke_user_permission(*permission)
1301 RepoModel().revoke_user_permission(*permission)
1283
1302
1284 for permission in self.user_group_repo_permission_ids:
1303 for permission in self.user_group_repo_permission_ids:
1285 RepoModel().revoke_user_group_permission(*permission)
1304 RepoModel().revoke_user_group_permission(*permission)
1286
1305
1287 for permission in self.user_repo_group_permission_ids:
1306 for permission in self.user_repo_group_permission_ids:
1288 RepoGroupModel().revoke_user_permission(*permission)
1307 RepoGroupModel().revoke_user_permission(*permission)
1289
1308
1290 for permission in self.user_group_repo_group_permission_ids:
1309 for permission in self.user_group_repo_group_permission_ids:
1291 RepoGroupModel().revoke_user_group_permission(*permission)
1310 RepoGroupModel().revoke_user_group_permission(*permission)
1292
1311
1293 for permission in self.user_user_group_permission_ids:
1312 for permission in self.user_user_group_permission_ids:
1294 UserGroupModel().revoke_user_permission(*permission)
1313 UserGroupModel().revoke_user_permission(*permission)
1295
1314
1296 for permission in self.user_group_user_group_permission_ids:
1315 for permission in self.user_group_user_group_permission_ids:
1297 UserGroupModel().revoke_user_group_permission(*permission)
1316 UserGroupModel().revoke_user_group_permission(*permission)
1298
1317
1299 def _cleanup_repo_groups(self):
1318 def _cleanup_repo_groups(self):
1300 def _repo_group_compare(first_group_id, second_group_id):
1319 def _repo_group_compare(first_group_id, second_group_id):
1301 """
1320 """
1302 Gives higher priority to the groups with the most complex paths
1321 Gives higher priority to the groups with the most complex paths
1303 """
1322 """
1304 first_group = RepoGroup.get(first_group_id)
1323 first_group = RepoGroup.get(first_group_id)
1305 second_group = RepoGroup.get(second_group_id)
1324 second_group = RepoGroup.get(second_group_id)
1306 first_group_parts = (
1325 first_group_parts = (
1307 len(first_group.group_name.split('/')) if first_group else 0)
1326 len(first_group.group_name.split('/')) if first_group else 0)
1308 second_group_parts = (
1327 second_group_parts = (
1309 len(second_group.group_name.split('/')) if second_group else 0)
1328 len(second_group.group_name.split('/')) if second_group else 0)
1310 return cmp(second_group_parts, first_group_parts)
1329 return cmp(second_group_parts, first_group_parts)
1311
1330
1312 sorted_repo_group_ids = sorted(
1331 sorted_repo_group_ids = sorted(
1313 self.repo_group_ids, cmp=_repo_group_compare)
1332 self.repo_group_ids, cmp=_repo_group_compare)
1314 for repo_group_id in sorted_repo_group_ids:
1333 for repo_group_id in sorted_repo_group_ids:
1315 self.fixture.destroy_repo_group(repo_group_id)
1334 self.fixture.destroy_repo_group(repo_group_id)
1316
1335
1317 def _cleanup_repos(self):
1336 def _cleanup_repos(self):
1318 sorted_repos_ids = sorted(self.repos_ids)
1337 sorted_repos_ids = sorted(self.repos_ids)
1319 for repo_id in sorted_repos_ids:
1338 for repo_id in sorted_repos_ids:
1320 self.fixture.destroy_repo(repo_id)
1339 self.fixture.destroy_repo(repo_id)
1321
1340
1322 def _cleanup_user_groups(self):
1341 def _cleanup_user_groups(self):
1323 def _user_group_compare(first_group_id, second_group_id):
1342 def _user_group_compare(first_group_id, second_group_id):
1324 """
1343 """
1325 Gives higher priority to the groups with the most complex paths
1344 Gives higher priority to the groups with the most complex paths
1326 """
1345 """
1327 first_group = UserGroup.get(first_group_id)
1346 first_group = UserGroup.get(first_group_id)
1328 second_group = UserGroup.get(second_group_id)
1347 second_group = UserGroup.get(second_group_id)
1329 first_group_parts = (
1348 first_group_parts = (
1330 len(first_group.users_group_name.split('/'))
1349 len(first_group.users_group_name.split('/'))
1331 if first_group else 0)
1350 if first_group else 0)
1332 second_group_parts = (
1351 second_group_parts = (
1333 len(second_group.users_group_name.split('/'))
1352 len(second_group.users_group_name.split('/'))
1334 if second_group else 0)
1353 if second_group else 0)
1335 return cmp(second_group_parts, first_group_parts)
1354 return cmp(second_group_parts, first_group_parts)
1336
1355
1337 sorted_user_group_ids = sorted(
1356 sorted_user_group_ids = sorted(
1338 self.user_group_ids, cmp=_user_group_compare)
1357 self.user_group_ids, cmp=_user_group_compare)
1339 for user_group_id in sorted_user_group_ids:
1358 for user_group_id in sorted_user_group_ids:
1340 self.fixture.destroy_user_group(user_group_id)
1359 self.fixture.destroy_user_group(user_group_id)
1341
1360
1342 def _cleanup_users(self):
1361 def _cleanup_users(self):
1343 for user_id in self.user_ids:
1362 for user_id in self.user_ids:
1344 self.fixture.destroy_user(user_id)
1363 self.fixture.destroy_user(user_id)
1345
1364
1346
1365
1347 # TODO: Think about moving this into a pytest-pyro package and make it a
1366 # TODO: Think about moving this into a pytest-pyro package and make it a
1348 # pytest plugin
1367 # pytest plugin
1349 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1368 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1350 def pytest_runtest_makereport(item, call):
1369 def pytest_runtest_makereport(item, call):
1351 """
1370 """
1352 Adding the remote traceback if the exception has this information.
1371 Adding the remote traceback if the exception has this information.
1353
1372
1354 VCSServer attaches this information as the attribute `_vcs_server_traceback`
1373 VCSServer attaches this information as the attribute `_vcs_server_traceback`
1355 to the exception instance.
1374 to the exception instance.
1356 """
1375 """
1357 outcome = yield
1376 outcome = yield
1358 report = outcome.get_result()
1377 report = outcome.get_result()
1359 if call.excinfo:
1378 if call.excinfo:
1360 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1379 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1361
1380
1362
1381
1363 def _add_vcsserver_remote_traceback(report, exc):
1382 def _add_vcsserver_remote_traceback(report, exc):
1364 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1383 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1365
1384
1366 if vcsserver_traceback:
1385 if vcsserver_traceback:
1367 section = 'VCSServer remote traceback ' + report.when
1386 section = 'VCSServer remote traceback ' + report.when
1368 report.sections.append((section, vcsserver_traceback))
1387 report.sections.append((section, vcsserver_traceback))
1369
1388
1370
1389
1371 @pytest.fixture(scope='session')
1390 @pytest.fixture(scope='session')
1372 def testrun():
1391 def testrun():
1373 return {
1392 return {
1374 'uuid': uuid.uuid4(),
1393 'uuid': uuid.uuid4(),
1375 'start': datetime.datetime.utcnow().isoformat(),
1394 'start': datetime.datetime.utcnow().isoformat(),
1376 'timestamp': int(time.time()),
1395 'timestamp': int(time.time()),
1377 }
1396 }
1378
1397
1379
1398
1380 @pytest.fixture(autouse=True)
1399 @pytest.fixture(autouse=True)
1381 def collect_appenlight_stats(request, testrun):
1400 def collect_appenlight_stats(request, testrun):
1382 """
1401 """
1383 This fixture reports memory consumtion of single tests.
1402 This fixture reports memory consumtion of single tests.
1384
1403
1385 It gathers data based on `psutil` and sends them to Appenlight. The option
1404 It gathers data based on `psutil` and sends them to Appenlight. The option
1386 ``--ae`` has te be used to enable this fixture and the API key for your
1405 ``--ae`` has te be used to enable this fixture and the API key for your
1387 application has to be provided in ``--ae-key``.
1406 application has to be provided in ``--ae-key``.
1388 """
1407 """
1389 try:
1408 try:
1390 # cygwin cannot have yet psutil support.
1409 # cygwin cannot have yet psutil support.
1391 import psutil
1410 import psutil
1392 except ImportError:
1411 except ImportError:
1393 return
1412 return
1394
1413
1395 if not request.config.getoption('--appenlight'):
1414 if not request.config.getoption('--appenlight'):
1396 return
1415 return
1397 else:
1416 else:
1398 # Only request the baseapp fixture if appenlight tracking is
1417 # Only request the baseapp fixture if appenlight tracking is
1399 # enabled. This will speed up a test run of unit tests by 2 to 3
1418 # enabled. This will speed up a test run of unit tests by 2 to 3
1400 # seconds if appenlight is not enabled.
1419 # seconds if appenlight is not enabled.
1401 baseapp = request.getfuncargvalue("baseapp")
1420 baseapp = request.getfuncargvalue("baseapp")
1402 url = '{}/api/logs'.format(request.config.getoption('--appenlight-url'))
1421 url = '{}/api/logs'.format(request.config.getoption('--appenlight-url'))
1403 client = AppenlightClient(
1422 client = AppenlightClient(
1404 url=url,
1423 url=url,
1405 api_key=request.config.getoption('--appenlight-api-key'),
1424 api_key=request.config.getoption('--appenlight-api-key'),
1406 namespace=request.node.nodeid,
1425 namespace=request.node.nodeid,
1407 request=str(testrun['uuid']),
1426 request=str(testrun['uuid']),
1408 testrun=testrun)
1427 testrun=testrun)
1409
1428
1410 client.collect({
1429 client.collect({
1411 'message': "Starting",
1430 'message': "Starting",
1412 })
1431 })
1413
1432
1414 server_and_port = baseapp.config.get_settings()['vcs.server']
1433 server_and_port = baseapp.config.get_settings()['vcs.server']
1415 protocol = baseapp.config.get_settings()['vcs.server.protocol']
1434 protocol = baseapp.config.get_settings()['vcs.server.protocol']
1416 server = create_vcsserver_proxy(server_and_port, protocol)
1435 server = create_vcsserver_proxy(server_and_port, protocol)
1417 with server:
1436 with server:
1418 vcs_pid = server.get_pid()
1437 vcs_pid = server.get_pid()
1419 server.run_gc()
1438 server.run_gc()
1420 vcs_process = psutil.Process(vcs_pid)
1439 vcs_process = psutil.Process(vcs_pid)
1421 mem = vcs_process.memory_info()
1440 mem = vcs_process.memory_info()
1422 client.tag_before('vcsserver.rss', mem.rss)
1441 client.tag_before('vcsserver.rss', mem.rss)
1423 client.tag_before('vcsserver.vms', mem.vms)
1442 client.tag_before('vcsserver.vms', mem.vms)
1424
1443
1425 test_process = psutil.Process()
1444 test_process = psutil.Process()
1426 mem = test_process.memory_info()
1445 mem = test_process.memory_info()
1427 client.tag_before('test.rss', mem.rss)
1446 client.tag_before('test.rss', mem.rss)
1428 client.tag_before('test.vms', mem.vms)
1447 client.tag_before('test.vms', mem.vms)
1429
1448
1430 client.tag_before('time', time.time())
1449 client.tag_before('time', time.time())
1431
1450
1432 @request.addfinalizer
1451 @request.addfinalizer
1433 def send_stats():
1452 def send_stats():
1434 client.tag_after('time', time.time())
1453 client.tag_after('time', time.time())
1435 with server:
1454 with server:
1436 gc_stats = server.run_gc()
1455 gc_stats = server.run_gc()
1437 for tag, value in gc_stats.items():
1456 for tag, value in gc_stats.items():
1438 client.tag_after(tag, value)
1457 client.tag_after(tag, value)
1439 mem = vcs_process.memory_info()
1458 mem = vcs_process.memory_info()
1440 client.tag_after('vcsserver.rss', mem.rss)
1459 client.tag_after('vcsserver.rss', mem.rss)
1441 client.tag_after('vcsserver.vms', mem.vms)
1460 client.tag_after('vcsserver.vms', mem.vms)
1442
1461
1443 mem = test_process.memory_info()
1462 mem = test_process.memory_info()
1444 client.tag_after('test.rss', mem.rss)
1463 client.tag_after('test.rss', mem.rss)
1445 client.tag_after('test.vms', mem.vms)
1464 client.tag_after('test.vms', mem.vms)
1446
1465
1447 client.collect({
1466 client.collect({
1448 'message': "Finished",
1467 'message': "Finished",
1449 })
1468 })
1450 client.send_stats()
1469 client.send_stats()
1451
1470
1452 return client
1471 return client
1453
1472
1454
1473
1455 class AppenlightClient():
1474 class AppenlightClient():
1456
1475
1457 url_template = '{url}?protocol_version=0.5'
1476 url_template = '{url}?protocol_version=0.5'
1458
1477
1459 def __init__(
1478 def __init__(
1460 self, url, api_key, add_server=True, add_timestamp=True,
1479 self, url, api_key, add_server=True, add_timestamp=True,
1461 namespace=None, request=None, testrun=None):
1480 namespace=None, request=None, testrun=None):
1462 self.url = self.url_template.format(url=url)
1481 self.url = self.url_template.format(url=url)
1463 self.api_key = api_key
1482 self.api_key = api_key
1464 self.add_server = add_server
1483 self.add_server = add_server
1465 self.add_timestamp = add_timestamp
1484 self.add_timestamp = add_timestamp
1466 self.namespace = namespace
1485 self.namespace = namespace
1467 self.request = request
1486 self.request = request
1468 self.server = socket.getfqdn(socket.gethostname())
1487 self.server = socket.getfqdn(socket.gethostname())
1469 self.tags_before = {}
1488 self.tags_before = {}
1470 self.tags_after = {}
1489 self.tags_after = {}
1471 self.stats = []
1490 self.stats = []
1472 self.testrun = testrun or {}
1491 self.testrun = testrun or {}
1473
1492
1474 def tag_before(self, tag, value):
1493 def tag_before(self, tag, value):
1475 self.tags_before[tag] = value
1494 self.tags_before[tag] = value
1476
1495
1477 def tag_after(self, tag, value):
1496 def tag_after(self, tag, value):
1478 self.tags_after[tag] = value
1497 self.tags_after[tag] = value
1479
1498
1480 def collect(self, data):
1499 def collect(self, data):
1481 if self.add_server:
1500 if self.add_server:
1482 data.setdefault('server', self.server)
1501 data.setdefault('server', self.server)
1483 if self.add_timestamp:
1502 if self.add_timestamp:
1484 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1503 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1485 if self.namespace:
1504 if self.namespace:
1486 data.setdefault('namespace', self.namespace)
1505 data.setdefault('namespace', self.namespace)
1487 if self.request:
1506 if self.request:
1488 data.setdefault('request', self.request)
1507 data.setdefault('request', self.request)
1489 self.stats.append(data)
1508 self.stats.append(data)
1490
1509
1491 def send_stats(self):
1510 def send_stats(self):
1492 tags = [
1511 tags = [
1493 ('testrun', self.request),
1512 ('testrun', self.request),
1494 ('testrun.start', self.testrun['start']),
1513 ('testrun.start', self.testrun['start']),
1495 ('testrun.timestamp', self.testrun['timestamp']),
1514 ('testrun.timestamp', self.testrun['timestamp']),
1496 ('test', self.namespace),
1515 ('test', self.namespace),
1497 ]
1516 ]
1498 for key, value in self.tags_before.items():
1517 for key, value in self.tags_before.items():
1499 tags.append((key + '.before', value))
1518 tags.append((key + '.before', value))
1500 try:
1519 try:
1501 delta = self.tags_after[key] - value
1520 delta = self.tags_after[key] - value
1502 tags.append((key + '.delta', delta))
1521 tags.append((key + '.delta', delta))
1503 except Exception:
1522 except Exception:
1504 pass
1523 pass
1505 for key, value in self.tags_after.items():
1524 for key, value in self.tags_after.items():
1506 tags.append((key + '.after', value))
1525 tags.append((key + '.after', value))
1507 self.collect({
1526 self.collect({
1508 'message': "Collected tags",
1527 'message': "Collected tags",
1509 'tags': tags,
1528 'tags': tags,
1510 })
1529 })
1511
1530
1512 response = requests.post(
1531 response = requests.post(
1513 self.url,
1532 self.url,
1514 headers={
1533 headers={
1515 'X-appenlight-api-key': self.api_key},
1534 'X-appenlight-api-key': self.api_key},
1516 json=self.stats,
1535 json=self.stats,
1517 )
1536 )
1518
1537
1519 if not response.status_code == 200:
1538 if not response.status_code == 200:
1520 pprint.pprint(self.stats)
1539 pprint.pprint(self.stats)
1521 print(response.headers)
1540 print(response.headers)
1522 print(response.text)
1541 print(response.text)
1523 raise Exception('Sending to appenlight failed')
1542 raise Exception('Sending to appenlight failed')
1524
1543
1525
1544
1526 @pytest.fixture
1545 @pytest.fixture
1527 def gist_util(request, db_connection):
1546 def gist_util(request, db_connection):
1528 """
1547 """
1529 Provides a wired instance of `GistUtility` with integrated cleanup.
1548 Provides a wired instance of `GistUtility` with integrated cleanup.
1530 """
1549 """
1531 utility = GistUtility()
1550 utility = GistUtility()
1532 request.addfinalizer(utility.cleanup)
1551 request.addfinalizer(utility.cleanup)
1533 return utility
1552 return utility
1534
1553
1535
1554
1536 class GistUtility(object):
1555 class GistUtility(object):
1537 def __init__(self):
1556 def __init__(self):
1538 self.fixture = Fixture()
1557 self.fixture = Fixture()
1539 self.gist_ids = []
1558 self.gist_ids = []
1540
1559
1541 def create_gist(self, **kwargs):
1560 def create_gist(self, **kwargs):
1542 gist = self.fixture.create_gist(**kwargs)
1561 gist = self.fixture.create_gist(**kwargs)
1543 self.gist_ids.append(gist.gist_id)
1562 self.gist_ids.append(gist.gist_id)
1544 return gist
1563 return gist
1545
1564
1546 def cleanup(self):
1565 def cleanup(self):
1547 for id_ in self.gist_ids:
1566 for id_ in self.gist_ids:
1548 self.fixture.destroy_gists(str(id_))
1567 self.fixture.destroy_gists(str(id_))
1549
1568
1550
1569
1551 @pytest.fixture
1570 @pytest.fixture
1552 def enabled_backends(request):
1571 def enabled_backends(request):
1553 backends = request.config.option.backends
1572 backends = request.config.option.backends
1554 return backends[:]
1573 return backends[:]
1555
1574
1556
1575
1557 @pytest.fixture
1576 @pytest.fixture
1558 def settings_util(request, db_connection):
1577 def settings_util(request, db_connection):
1559 """
1578 """
1560 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1579 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1561 """
1580 """
1562 utility = SettingsUtility()
1581 utility = SettingsUtility()
1563 request.addfinalizer(utility.cleanup)
1582 request.addfinalizer(utility.cleanup)
1564 return utility
1583 return utility
1565
1584
1566
1585
1567 class SettingsUtility(object):
1586 class SettingsUtility(object):
1568 def __init__(self):
1587 def __init__(self):
1569 self.rhodecode_ui_ids = []
1588 self.rhodecode_ui_ids = []
1570 self.rhodecode_setting_ids = []
1589 self.rhodecode_setting_ids = []
1571 self.repo_rhodecode_ui_ids = []
1590 self.repo_rhodecode_ui_ids = []
1572 self.repo_rhodecode_setting_ids = []
1591 self.repo_rhodecode_setting_ids = []
1573
1592
1574 def create_repo_rhodecode_ui(
1593 def create_repo_rhodecode_ui(
1575 self, repo, section, value, key=None, active=True, cleanup=True):
1594 self, repo, section, value, key=None, active=True, cleanup=True):
1576 key = key or hashlib.sha1(
1595 key = key or hashlib.sha1(
1577 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1596 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1578
1597
1579 setting = RepoRhodeCodeUi()
1598 setting = RepoRhodeCodeUi()
1580 setting.repository_id = repo.repo_id
1599 setting.repository_id = repo.repo_id
1581 setting.ui_section = section
1600 setting.ui_section = section
1582 setting.ui_value = value
1601 setting.ui_value = value
1583 setting.ui_key = key
1602 setting.ui_key = key
1584 setting.ui_active = active
1603 setting.ui_active = active
1585 Session().add(setting)
1604 Session().add(setting)
1586 Session().commit()
1605 Session().commit()
1587
1606
1588 if cleanup:
1607 if cleanup:
1589 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1608 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1590 return setting
1609 return setting
1591
1610
1592 def create_rhodecode_ui(
1611 def create_rhodecode_ui(
1593 self, section, value, key=None, active=True, cleanup=True):
1612 self, section, value, key=None, active=True, cleanup=True):
1594 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1613 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1595
1614
1596 setting = RhodeCodeUi()
1615 setting = RhodeCodeUi()
1597 setting.ui_section = section
1616 setting.ui_section = section
1598 setting.ui_value = value
1617 setting.ui_value = value
1599 setting.ui_key = key
1618 setting.ui_key = key
1600 setting.ui_active = active
1619 setting.ui_active = active
1601 Session().add(setting)
1620 Session().add(setting)
1602 Session().commit()
1621 Session().commit()
1603
1622
1604 if cleanup:
1623 if cleanup:
1605 self.rhodecode_ui_ids.append(setting.ui_id)
1624 self.rhodecode_ui_ids.append(setting.ui_id)
1606 return setting
1625 return setting
1607
1626
1608 def create_repo_rhodecode_setting(
1627 def create_repo_rhodecode_setting(
1609 self, repo, name, value, type_, cleanup=True):
1628 self, repo, name, value, type_, cleanup=True):
1610 setting = RepoRhodeCodeSetting(
1629 setting = RepoRhodeCodeSetting(
1611 repo.repo_id, key=name, val=value, type=type_)
1630 repo.repo_id, key=name, val=value, type=type_)
1612 Session().add(setting)
1631 Session().add(setting)
1613 Session().commit()
1632 Session().commit()
1614
1633
1615 if cleanup:
1634 if cleanup:
1616 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1635 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1617 return setting
1636 return setting
1618
1637
1619 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1638 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1620 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1639 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1621 Session().add(setting)
1640 Session().add(setting)
1622 Session().commit()
1641 Session().commit()
1623
1642
1624 if cleanup:
1643 if cleanup:
1625 self.rhodecode_setting_ids.append(setting.app_settings_id)
1644 self.rhodecode_setting_ids.append(setting.app_settings_id)
1626
1645
1627 return setting
1646 return setting
1628
1647
1629 def cleanup(self):
1648 def cleanup(self):
1630 for id_ in self.rhodecode_ui_ids:
1649 for id_ in self.rhodecode_ui_ids:
1631 setting = RhodeCodeUi.get(id_)
1650 setting = RhodeCodeUi.get(id_)
1632 Session().delete(setting)
1651 Session().delete(setting)
1633
1652
1634 for id_ in self.rhodecode_setting_ids:
1653 for id_ in self.rhodecode_setting_ids:
1635 setting = RhodeCodeSetting.get(id_)
1654 setting = RhodeCodeSetting.get(id_)
1636 Session().delete(setting)
1655 Session().delete(setting)
1637
1656
1638 for id_ in self.repo_rhodecode_ui_ids:
1657 for id_ in self.repo_rhodecode_ui_ids:
1639 setting = RepoRhodeCodeUi.get(id_)
1658 setting = RepoRhodeCodeUi.get(id_)
1640 Session().delete(setting)
1659 Session().delete(setting)
1641
1660
1642 for id_ in self.repo_rhodecode_setting_ids:
1661 for id_ in self.repo_rhodecode_setting_ids:
1643 setting = RepoRhodeCodeSetting.get(id_)
1662 setting = RepoRhodeCodeSetting.get(id_)
1644 Session().delete(setting)
1663 Session().delete(setting)
1645
1664
1646 Session().commit()
1665 Session().commit()
1647
1666
1648
1667
1649 @pytest.fixture
1668 @pytest.fixture
1650 def no_notifications(request):
1669 def no_notifications(request):
1651 notification_patcher = mock.patch(
1670 notification_patcher = mock.patch(
1652 'rhodecode.model.notification.NotificationModel.create')
1671 'rhodecode.model.notification.NotificationModel.create')
1653 notification_patcher.start()
1672 notification_patcher.start()
1654 request.addfinalizer(notification_patcher.stop)
1673 request.addfinalizer(notification_patcher.stop)
1655
1674
1656
1675
1657 @pytest.fixture(scope='session')
1676 @pytest.fixture(scope='session')
1658 def repeat(request):
1677 def repeat(request):
1659 """
1678 """
1660 The number of repetitions is based on this fixture.
1679 The number of repetitions is based on this fixture.
1661
1680
1662 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1681 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1663 tests are not too slow in our default test suite.
1682 tests are not too slow in our default test suite.
1664 """
1683 """
1665 return request.config.getoption('--repeat')
1684 return request.config.getoption('--repeat')
1666
1685
1667
1686
1668 @pytest.fixture
1687 @pytest.fixture
1669 def rhodecode_fixtures():
1688 def rhodecode_fixtures():
1670 return Fixture()
1689 return Fixture()
1671
1690
1672
1691
1673 @pytest.fixture
1692 @pytest.fixture
1674 def context_stub():
1693 def context_stub():
1675 """
1694 """
1676 Stub context object.
1695 Stub context object.
1677 """
1696 """
1678 context = pyramid.testing.DummyResource()
1697 context = pyramid.testing.DummyResource()
1679 return context
1698 return context
1680
1699
1681
1700
1682 @pytest.fixture
1701 @pytest.fixture
1683 def request_stub():
1702 def request_stub():
1684 """
1703 """
1685 Stub request object.
1704 Stub request object.
1686 """
1705 """
1687 from rhodecode.lib.base import bootstrap_request
1706 from rhodecode.lib.base import bootstrap_request
1688 request = bootstrap_request(scheme='https')
1707 request = bootstrap_request(scheme='https')
1689 return request
1708 return request
1690
1709
1691
1710
1692 @pytest.fixture
1711 @pytest.fixture
1693 def config_stub(request, request_stub):
1712 def config_stub(request, request_stub):
1694 """
1713 """
1695 Set up pyramid.testing and return the Configurator.
1714 Set up pyramid.testing and return the Configurator.
1696 """
1715 """
1697 from rhodecode.lib.base import bootstrap_config
1716 from rhodecode.lib.base import bootstrap_config
1698 config = bootstrap_config(request=request_stub)
1717 config = bootstrap_config(request=request_stub)
1699
1718
1700 @request.addfinalizer
1719 @request.addfinalizer
1701 def cleanup():
1720 def cleanup():
1702 pyramid.testing.tearDown()
1721 pyramid.testing.tearDown()
1703
1722
1704 return config
1723 return config
1705
1724
1706
1725
1707 @pytest.fixture
1726 @pytest.fixture
1708 def StubIntegrationType():
1727 def StubIntegrationType():
1709 class _StubIntegrationType(IntegrationTypeBase):
1728 class _StubIntegrationType(IntegrationTypeBase):
1710 """ Test integration type class """
1729 """ Test integration type class """
1711
1730
1712 key = 'test'
1731 key = 'test'
1713 display_name = 'Test integration type'
1732 display_name = 'Test integration type'
1714 description = 'A test integration type for testing'
1733 description = 'A test integration type for testing'
1715
1734
1716 @classmethod
1735 @classmethod
1717 def icon(cls):
1736 def icon(cls):
1718 return 'test_icon_html_image'
1737 return 'test_icon_html_image'
1719
1738
1720 def __init__(self, settings):
1739 def __init__(self, settings):
1721 super(_StubIntegrationType, self).__init__(settings)
1740 super(_StubIntegrationType, self).__init__(settings)
1722 self.sent_events = [] # for testing
1741 self.sent_events = [] # for testing
1723
1742
1724 def send_event(self, event):
1743 def send_event(self, event):
1725 self.sent_events.append(event)
1744 self.sent_events.append(event)
1726
1745
1727 def settings_schema(self):
1746 def settings_schema(self):
1728 class SettingsSchema(colander.Schema):
1747 class SettingsSchema(colander.Schema):
1729 test_string_field = colander.SchemaNode(
1748 test_string_field = colander.SchemaNode(
1730 colander.String(),
1749 colander.String(),
1731 missing=colander.required,
1750 missing=colander.required,
1732 title='test string field',
1751 title='test string field',
1733 )
1752 )
1734 test_int_field = colander.SchemaNode(
1753 test_int_field = colander.SchemaNode(
1735 colander.Int(),
1754 colander.Int(),
1736 title='some integer setting',
1755 title='some integer setting',
1737 )
1756 )
1738 return SettingsSchema()
1757 return SettingsSchema()
1739
1758
1740
1759
1741 integration_type_registry.register_integration_type(_StubIntegrationType)
1760 integration_type_registry.register_integration_type(_StubIntegrationType)
1742 return _StubIntegrationType
1761 return _StubIntegrationType
1743
1762
1744 @pytest.fixture
1763 @pytest.fixture
1745 def stub_integration_settings():
1764 def stub_integration_settings():
1746 return {
1765 return {
1747 'test_string_field': 'some data',
1766 'test_string_field': 'some data',
1748 'test_int_field': 100,
1767 'test_int_field': 100,
1749 }
1768 }
1750
1769
1751
1770
1752 @pytest.fixture
1771 @pytest.fixture
1753 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1772 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1754 stub_integration_settings):
1773 stub_integration_settings):
1755 integration = IntegrationModel().create(
1774 integration = IntegrationModel().create(
1756 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1775 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1757 name='test repo integration',
1776 name='test repo integration',
1758 repo=repo_stub, repo_group=None, child_repos_only=None)
1777 repo=repo_stub, repo_group=None, child_repos_only=None)
1759
1778
1760 @request.addfinalizer
1779 @request.addfinalizer
1761 def cleanup():
1780 def cleanup():
1762 IntegrationModel().delete(integration)
1781 IntegrationModel().delete(integration)
1763
1782
1764 return integration
1783 return integration
1765
1784
1766
1785
1767 @pytest.fixture
1786 @pytest.fixture
1768 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1787 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1769 stub_integration_settings):
1788 stub_integration_settings):
1770 integration = IntegrationModel().create(
1789 integration = IntegrationModel().create(
1771 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1790 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1772 name='test repogroup integration',
1791 name='test repogroup integration',
1773 repo=None, repo_group=test_repo_group, child_repos_only=True)
1792 repo=None, repo_group=test_repo_group, child_repos_only=True)
1774
1793
1775 @request.addfinalizer
1794 @request.addfinalizer
1776 def cleanup():
1795 def cleanup():
1777 IntegrationModel().delete(integration)
1796 IntegrationModel().delete(integration)
1778
1797
1779 return integration
1798 return integration
1780
1799
1781
1800
1782 @pytest.fixture
1801 @pytest.fixture
1783 def repogroup_recursive_integration_stub(request, test_repo_group,
1802 def repogroup_recursive_integration_stub(request, test_repo_group,
1784 StubIntegrationType, stub_integration_settings):
1803 StubIntegrationType, stub_integration_settings):
1785 integration = IntegrationModel().create(
1804 integration = IntegrationModel().create(
1786 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1805 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1787 name='test recursive repogroup integration',
1806 name='test recursive repogroup integration',
1788 repo=None, repo_group=test_repo_group, child_repos_only=False)
1807 repo=None, repo_group=test_repo_group, child_repos_only=False)
1789
1808
1790 @request.addfinalizer
1809 @request.addfinalizer
1791 def cleanup():
1810 def cleanup():
1792 IntegrationModel().delete(integration)
1811 IntegrationModel().delete(integration)
1793
1812
1794 return integration
1813 return integration
1795
1814
1796
1815
1797 @pytest.fixture
1816 @pytest.fixture
1798 def global_integration_stub(request, StubIntegrationType,
1817 def global_integration_stub(request, StubIntegrationType,
1799 stub_integration_settings):
1818 stub_integration_settings):
1800 integration = IntegrationModel().create(
1819 integration = IntegrationModel().create(
1801 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1820 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1802 name='test global integration',
1821 name='test global integration',
1803 repo=None, repo_group=None, child_repos_only=None)
1822 repo=None, repo_group=None, child_repos_only=None)
1804
1823
1805 @request.addfinalizer
1824 @request.addfinalizer
1806 def cleanup():
1825 def cleanup():
1807 IntegrationModel().delete(integration)
1826 IntegrationModel().delete(integration)
1808
1827
1809 return integration
1828 return integration
1810
1829
1811
1830
1812 @pytest.fixture
1831 @pytest.fixture
1813 def root_repos_integration_stub(request, StubIntegrationType,
1832 def root_repos_integration_stub(request, StubIntegrationType,
1814 stub_integration_settings):
1833 stub_integration_settings):
1815 integration = IntegrationModel().create(
1834 integration = IntegrationModel().create(
1816 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1835 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1817 name='test global integration',
1836 name='test global integration',
1818 repo=None, repo_group=None, child_repos_only=True)
1837 repo=None, repo_group=None, child_repos_only=True)
1819
1838
1820 @request.addfinalizer
1839 @request.addfinalizer
1821 def cleanup():
1840 def cleanup():
1822 IntegrationModel().delete(integration)
1841 IntegrationModel().delete(integration)
1823
1842
1824 return integration
1843 return integration
1825
1844
1826
1845
1827 @pytest.fixture
1846 @pytest.fixture
1828 def local_dt_to_utc():
1847 def local_dt_to_utc():
1829 def _factory(dt):
1848 def _factory(dt):
1830 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1849 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1831 dateutil.tz.tzutc()).replace(tzinfo=None)
1850 dateutil.tz.tzutc()).replace(tzinfo=None)
1832 return _factory
1851 return _factory
1833
1852
1834
1853
1835 @pytest.fixture
1854 @pytest.fixture
1836 def disable_anonymous_user(request, baseapp):
1855 def disable_anonymous_user(request, baseapp):
1837 set_anonymous_access(False)
1856 set_anonymous_access(False)
1838
1857
1839 @request.addfinalizer
1858 @request.addfinalizer
1840 def cleanup():
1859 def cleanup():
1841 set_anonymous_access(True)
1860 set_anonymous_access(True)
1842
1861
1843
1862
1844 @pytest.fixture(scope='module')
1863 @pytest.fixture(scope='module')
1845 def rc_fixture(request):
1864 def rc_fixture(request):
1846 return Fixture()
1865 return Fixture()
1847
1866
1848
1867
1849 @pytest.fixture
1868 @pytest.fixture
1850 def repo_groups(request):
1869 def repo_groups(request):
1851 fixture = Fixture()
1870 fixture = Fixture()
1852
1871
1853 session = Session()
1872 session = Session()
1854 zombie_group = fixture.create_repo_group('zombie')
1873 zombie_group = fixture.create_repo_group('zombie')
1855 parent_group = fixture.create_repo_group('parent')
1874 parent_group = fixture.create_repo_group('parent')
1856 child_group = fixture.create_repo_group('parent/child')
1875 child_group = fixture.create_repo_group('parent/child')
1857 groups_in_db = session.query(RepoGroup).all()
1876 groups_in_db = session.query(RepoGroup).all()
1858 assert len(groups_in_db) == 3
1877 assert len(groups_in_db) == 3
1859 assert child_group.group_parent_id == parent_group.group_id
1878 assert child_group.group_parent_id == parent_group.group_id
1860
1879
1861 @request.addfinalizer
1880 @request.addfinalizer
1862 def cleanup():
1881 def cleanup():
1863 fixture.destroy_repo_group(zombie_group)
1882 fixture.destroy_repo_group(zombie_group)
1864 fixture.destroy_repo_group(child_group)
1883 fixture.destroy_repo_group(child_group)
1865 fixture.destroy_repo_group(parent_group)
1884 fixture.destroy_repo_group(parent_group)
1866
1885
1867 return zombie_group, parent_group, child_group
1886 return zombie_group, parent_group, child_group
@@ -1,458 +1,458 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import threading
21 import threading
22 import time
22 import time
23 import logging
23 import logging
24 import os.path
24 import os.path
25 import subprocess32
25 import subprocess32
26 import tempfile
26 import tempfile
27 import urllib2
27 import urllib2
28 from lxml.html import fromstring, tostring
28 from lxml.html import fromstring, tostring
29 from lxml.cssselect import CSSSelector
29 from lxml.cssselect import CSSSelector
30 from urlparse import urlparse, parse_qsl
30 from urlparse import urlparse, parse_qsl
31 from urllib import unquote_plus
31 from urllib import unquote_plus
32 import webob
32 import webob
33
33
34 from webtest.app import TestResponse, TestApp, string_types
34 from webtest.app import TestResponse, TestApp, string_types
35 from webtest.compat import print_stderr
35 from webtest.compat import print_stderr
36
36
37 import pytest
37 import pytest
38 import rc_testdata
38 import rc_testdata
39
39
40 from rhodecode.model.db import User, Repository
40 from rhodecode.model.db import User, Repository
41 from rhodecode.model.meta import Session
41 from rhodecode.model.meta import Session
42 from rhodecode.model.scm import ScmModel
42 from rhodecode.model.scm import ScmModel
43 from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository
43 from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository
44 from rhodecode.lib.vcs.backends.base import EmptyCommit
44 from rhodecode.lib.vcs.backends.base import EmptyCommit
45 from rhodecode.tests import login_user_session
45 from rhodecode.tests import login_user_session
46
46
47 log = logging.getLogger(__name__)
47 log = logging.getLogger(__name__)
48
48
49
49
50 class CustomTestResponse(TestResponse):
50 class CustomTestResponse(TestResponse):
51 def _save_output(self, out):
51 def _save_output(self, out):
52 f = tempfile.NamedTemporaryFile(
52 f = tempfile.NamedTemporaryFile(
53 delete=False, prefix='rc-test-', suffix='.html')
53 delete=False, prefix='rc-test-', suffix='.html')
54 f.write(out)
54 f.write(out)
55 return f.name
55 return f.name
56
56
57 def mustcontain(self, *strings, **kw):
57 def mustcontain(self, *strings, **kw):
58 """
58 """
59 Assert that the response contains all of the strings passed
59 Assert that the response contains all of the strings passed
60 in as arguments.
60 in as arguments.
61
61
62 Equivalent to::
62 Equivalent to::
63
63
64 assert string in res
64 assert string in res
65 """
65 """
66 if 'no' in kw:
66 if 'no' in kw:
67 no = kw['no']
67 no = kw['no']
68 del kw['no']
68 del kw['no']
69 if isinstance(no, string_types):
69 if isinstance(no, string_types):
70 no = [no]
70 no = [no]
71 else:
71 else:
72 no = []
72 no = []
73 if kw:
73 if kw:
74 raise TypeError(
74 raise TypeError(
75 "The only keyword argument allowed is 'no' got %s" % kw)
75 "The only keyword argument allowed is 'no' got %s" % kw)
76
76
77 f = self._save_output(str(self))
77 f = self._save_output(str(self))
78
78
79 for s in strings:
79 for s in strings:
80 if not s in self:
80 if not s in self:
81 print_stderr("Actual response (no %r):" % s)
81 print_stderr("Actual response (no %r):" % s)
82 print_stderr(str(self))
82 print_stderr(str(self))
83 raise IndexError(
83 raise IndexError(
84 "Body does not contain string %r, output saved as %s" % (
84 "Body does not contain string %r, output saved as %s" % (
85 s, f))
85 s, f))
86
86
87 for no_s in no:
87 for no_s in no:
88 if no_s in self:
88 if no_s in self:
89 print_stderr("Actual response (has %r)" % no_s)
89 print_stderr("Actual response (has %r)" % no_s)
90 print_stderr(str(self))
90 print_stderr(str(self))
91 raise IndexError(
91 raise IndexError(
92 "Body contains bad string %r, output saved as %s" % (
92 "Body contains bad string %r, output saved as %s" % (
93 no_s, f))
93 no_s, f))
94
94
95 def assert_response(self):
95 def assert_response(self):
96 return AssertResponse(self)
96 return AssertResponse(self)
97
97
98 def get_session_from_response(self):
98 def get_session_from_response(self):
99 """
99 """
100 This returns the session from a response object.
100 This returns the session from a response object.
101 """
101 """
102
102
103 from pyramid_beaker import session_factory_from_settings
103 from pyramid_beaker import session_factory_from_settings
104 session = session_factory_from_settings(
104 session = session_factory_from_settings(
105 self.test_app.app.config.get_settings())
105 self.test_app.app.config.get_settings())
106 return session(self.request)
106 return session(self.request)
107
107
108
108
109 class TestRequest(webob.BaseRequest):
109 class TestRequest(webob.BaseRequest):
110
110
111 # for py.test
111 # for py.test
112 disabled = True
112 disabled = True
113 ResponseClass = CustomTestResponse
113 ResponseClass = CustomTestResponse
114
114
115 def add_response_callback(self, callback):
115 def add_response_callback(self, callback):
116 pass
116 pass
117
117
118
118
119 class CustomTestApp(TestApp):
119 class CustomTestApp(TestApp):
120 """
120 """
121 Custom app to make mustcontain more usefull, and extract special methods
121 Custom app to make mustcontain more usefull, and extract special methods
122 """
122 """
123 RequestClass = TestRequest
123 RequestClass = TestRequest
124 rc_login_data = {}
124 rc_login_data = {}
125 rc_current_session = None
125 rc_current_session = None
126
126
127 def login(self, username=None, password=None):
127 def login(self, username=None, password=None):
128 from rhodecode.lib import auth
128 from rhodecode.lib import auth
129
129
130 if username and password:
130 if username and password:
131 session = login_user_session(self, username, password)
131 session = login_user_session(self, username, password)
132 else:
132 else:
133 session = login_user_session(self)
133 session = login_user_session(self)
134
134
135 self.rc_login_data['csrf_token'] = auth.get_csrf_token(session)
135 self.rc_login_data['csrf_token'] = auth.get_csrf_token(session)
136 self.rc_current_session = session
136 self.rc_current_session = session
137 return session['rhodecode_user']
137 return session['rhodecode_user']
138
138
139 @property
139 @property
140 def csrf_token(self):
140 def csrf_token(self):
141 return self.rc_login_data['csrf_token']
141 return self.rc_login_data['csrf_token']
142
142
143
143
144 def set_anonymous_access(enabled):
144 def set_anonymous_access(enabled):
145 """(Dis)allows anonymous access depending on parameter `enabled`"""
145 """(Dis)allows anonymous access depending on parameter `enabled`"""
146 user = User.get_default_user()
146 user = User.get_default_user()
147 user.active = enabled
147 user.active = enabled
148 Session().add(user)
148 Session().add(user)
149 Session().commit()
149 Session().commit()
150 time.sleep(1.5) # must sleep for cache (1s to expire)
150 time.sleep(1.5) # must sleep for cache (1s to expire)
151 log.info('anonymous access is now: %s', enabled)
151 log.info('anonymous access is now: %s', enabled)
152 assert enabled == User.get_default_user().active, (
152 assert enabled == User.get_default_user().active, (
153 'Cannot set anonymous access')
153 'Cannot set anonymous access')
154
154
155
155
156 def check_xfail_backends(node, backend_alias):
156 def check_xfail_backends(node, backend_alias):
157 # Using "xfail_backends" here intentionally, since this marks work
157 # Using "xfail_backends" here intentionally, since this marks work
158 # which is "to be done" soon.
158 # which is "to be done" soon.
159 skip_marker = node.get_marker('xfail_backends')
159 skip_marker = node.get_closest_marker('xfail_backends')
160 if skip_marker and backend_alias in skip_marker.args:
160 if skip_marker and backend_alias in skip_marker.args:
161 msg = "Support for backend %s to be developed." % (backend_alias, )
161 msg = "Support for backend %s to be developed." % (backend_alias, )
162 msg = skip_marker.kwargs.get('reason', msg)
162 msg = skip_marker.kwargs.get('reason', msg)
163 pytest.xfail(msg)
163 pytest.xfail(msg)
164
164
165
165
166 def check_skip_backends(node, backend_alias):
166 def check_skip_backends(node, backend_alias):
167 # Using "skip_backends" here intentionally, since this marks work which is
167 # Using "skip_backends" here intentionally, since this marks work which is
168 # not supported.
168 # not supported.
169 skip_marker = node.get_marker('skip_backends')
169 skip_marker = node.get_closest_marker('skip_backends')
170 if skip_marker and backend_alias in skip_marker.args:
170 if skip_marker and backend_alias in skip_marker.args:
171 msg = "Feature not supported for backend %s." % (backend_alias, )
171 msg = "Feature not supported for backend %s." % (backend_alias, )
172 msg = skip_marker.kwargs.get('reason', msg)
172 msg = skip_marker.kwargs.get('reason', msg)
173 pytest.skip(msg)
173 pytest.skip(msg)
174
174
175
175
176 def extract_git_repo_from_dump(dump_name, repo_name):
176 def extract_git_repo_from_dump(dump_name, repo_name):
177 """Create git repo `repo_name` from dump `dump_name`."""
177 """Create git repo `repo_name` from dump `dump_name`."""
178 repos_path = ScmModel().repos_path
178 repos_path = ScmModel().repos_path
179 target_path = os.path.join(repos_path, repo_name)
179 target_path = os.path.join(repos_path, repo_name)
180 rc_testdata.extract_git_dump(dump_name, target_path)
180 rc_testdata.extract_git_dump(dump_name, target_path)
181 return target_path
181 return target_path
182
182
183
183
184 def extract_hg_repo_from_dump(dump_name, repo_name):
184 def extract_hg_repo_from_dump(dump_name, repo_name):
185 """Create hg repo `repo_name` from dump `dump_name`."""
185 """Create hg repo `repo_name` from dump `dump_name`."""
186 repos_path = ScmModel().repos_path
186 repos_path = ScmModel().repos_path
187 target_path = os.path.join(repos_path, repo_name)
187 target_path = os.path.join(repos_path, repo_name)
188 rc_testdata.extract_hg_dump(dump_name, target_path)
188 rc_testdata.extract_hg_dump(dump_name, target_path)
189 return target_path
189 return target_path
190
190
191
191
192 def extract_svn_repo_from_dump(dump_name, repo_name):
192 def extract_svn_repo_from_dump(dump_name, repo_name):
193 """Create a svn repo `repo_name` from dump `dump_name`."""
193 """Create a svn repo `repo_name` from dump `dump_name`."""
194 repos_path = ScmModel().repos_path
194 repos_path = ScmModel().repos_path
195 target_path = os.path.join(repos_path, repo_name)
195 target_path = os.path.join(repos_path, repo_name)
196 SubversionRepository(target_path, create=True)
196 SubversionRepository(target_path, create=True)
197 _load_svn_dump_into_repo(dump_name, target_path)
197 _load_svn_dump_into_repo(dump_name, target_path)
198 return target_path
198 return target_path
199
199
200
200
201 def assert_message_in_log(log_records, message, levelno, module):
201 def assert_message_in_log(log_records, message, levelno, module):
202 messages = [
202 messages = [
203 r.message for r in log_records
203 r.message for r in log_records
204 if r.module == module and r.levelno == levelno
204 if r.module == module and r.levelno == levelno
205 ]
205 ]
206 assert message in messages
206 assert message in messages
207
207
208
208
209 def _load_svn_dump_into_repo(dump_name, repo_path):
209 def _load_svn_dump_into_repo(dump_name, repo_path):
210 """
210 """
211 Utility to populate a svn repository with a named dump
211 Utility to populate a svn repository with a named dump
212
212
213 Currently the dumps are in rc_testdata. They might later on be
213 Currently the dumps are in rc_testdata. They might later on be
214 integrated with the main repository once they stabilize more.
214 integrated with the main repository once they stabilize more.
215 """
215 """
216 dump = rc_testdata.load_svn_dump(dump_name)
216 dump = rc_testdata.load_svn_dump(dump_name)
217 load_dump = subprocess32.Popen(
217 load_dump = subprocess32.Popen(
218 ['svnadmin', 'load', repo_path],
218 ['svnadmin', 'load', repo_path],
219 stdin=subprocess32.PIPE, stdout=subprocess32.PIPE,
219 stdin=subprocess32.PIPE, stdout=subprocess32.PIPE,
220 stderr=subprocess32.PIPE)
220 stderr=subprocess32.PIPE)
221 out, err = load_dump.communicate(dump)
221 out, err = load_dump.communicate(dump)
222 if load_dump.returncode != 0:
222 if load_dump.returncode != 0:
223 log.error("Output of load_dump command: %s", out)
223 log.error("Output of load_dump command: %s", out)
224 log.error("Error output of load_dump command: %s", err)
224 log.error("Error output of load_dump command: %s", err)
225 raise Exception(
225 raise Exception(
226 'Failed to load dump "%s" into repository at path "%s".'
226 'Failed to load dump "%s" into repository at path "%s".'
227 % (dump_name, repo_path))
227 % (dump_name, repo_path))
228
228
229
229
230 class AssertResponse(object):
230 class AssertResponse(object):
231 """
231 """
232 Utility that helps to assert things about a given HTML response.
232 Utility that helps to assert things about a given HTML response.
233 """
233 """
234
234
235 def __init__(self, response):
235 def __init__(self, response):
236 self.response = response
236 self.response = response
237
237
238 def get_imports(self):
238 def get_imports(self):
239 return fromstring, tostring, CSSSelector
239 return fromstring, tostring, CSSSelector
240
240
241 def one_element_exists(self, css_selector):
241 def one_element_exists(self, css_selector):
242 self.get_element(css_selector)
242 self.get_element(css_selector)
243
243
244 def no_element_exists(self, css_selector):
244 def no_element_exists(self, css_selector):
245 assert not self._get_elements(css_selector)
245 assert not self._get_elements(css_selector)
246
246
247 def element_equals_to(self, css_selector, expected_content):
247 def element_equals_to(self, css_selector, expected_content):
248 element = self.get_element(css_selector)
248 element = self.get_element(css_selector)
249 element_text = self._element_to_string(element)
249 element_text = self._element_to_string(element)
250 assert expected_content in element_text
250 assert expected_content in element_text
251
251
252 def element_contains(self, css_selector, expected_content):
252 def element_contains(self, css_selector, expected_content):
253 element = self.get_element(css_selector)
253 element = self.get_element(css_selector)
254 assert expected_content in element.text_content()
254 assert expected_content in element.text_content()
255
255
256 def element_value_contains(self, css_selector, expected_content):
256 def element_value_contains(self, css_selector, expected_content):
257 element = self.get_element(css_selector)
257 element = self.get_element(css_selector)
258 assert expected_content in element.value
258 assert expected_content in element.value
259
259
260 def contains_one_link(self, link_text, href):
260 def contains_one_link(self, link_text, href):
261 fromstring, tostring, CSSSelector = self.get_imports()
261 fromstring, tostring, CSSSelector = self.get_imports()
262 doc = fromstring(self.response.body)
262 doc = fromstring(self.response.body)
263 sel = CSSSelector('a[href]')
263 sel = CSSSelector('a[href]')
264 elements = [
264 elements = [
265 e for e in sel(doc) if e.text_content().strip() == link_text]
265 e for e in sel(doc) if e.text_content().strip() == link_text]
266 assert len(elements) == 1, "Did not find link or found multiple links"
266 assert len(elements) == 1, "Did not find link or found multiple links"
267 self._ensure_url_equal(elements[0].attrib.get('href'), href)
267 self._ensure_url_equal(elements[0].attrib.get('href'), href)
268
268
269 def contains_one_anchor(self, anchor_id):
269 def contains_one_anchor(self, anchor_id):
270 fromstring, tostring, CSSSelector = self.get_imports()
270 fromstring, tostring, CSSSelector = self.get_imports()
271 doc = fromstring(self.response.body)
271 doc = fromstring(self.response.body)
272 sel = CSSSelector('#' + anchor_id)
272 sel = CSSSelector('#' + anchor_id)
273 elements = sel(doc)
273 elements = sel(doc)
274 assert len(elements) == 1, 'cannot find 1 element {}'.format(anchor_id)
274 assert len(elements) == 1, 'cannot find 1 element {}'.format(anchor_id)
275
275
276 def _ensure_url_equal(self, found, expected):
276 def _ensure_url_equal(self, found, expected):
277 assert _Url(found) == _Url(expected)
277 assert _Url(found) == _Url(expected)
278
278
279 def get_element(self, css_selector):
279 def get_element(self, css_selector):
280 elements = self._get_elements(css_selector)
280 elements = self._get_elements(css_selector)
281 assert len(elements) == 1, 'cannot find 1 element {}'.format(css_selector)
281 assert len(elements) == 1, 'cannot find 1 element {}'.format(css_selector)
282 return elements[0]
282 return elements[0]
283
283
284 def get_elements(self, css_selector):
284 def get_elements(self, css_selector):
285 return self._get_elements(css_selector)
285 return self._get_elements(css_selector)
286
286
287 def _get_elements(self, css_selector):
287 def _get_elements(self, css_selector):
288 fromstring, tostring, CSSSelector = self.get_imports()
288 fromstring, tostring, CSSSelector = self.get_imports()
289 doc = fromstring(self.response.body)
289 doc = fromstring(self.response.body)
290 sel = CSSSelector(css_selector)
290 sel = CSSSelector(css_selector)
291 elements = sel(doc)
291 elements = sel(doc)
292 return elements
292 return elements
293
293
294 def _element_to_string(self, element):
294 def _element_to_string(self, element):
295 fromstring, tostring, CSSSelector = self.get_imports()
295 fromstring, tostring, CSSSelector = self.get_imports()
296 return tostring(element)
296 return tostring(element)
297
297
298
298
299 class _Url(object):
299 class _Url(object):
300 """
300 """
301 A url object that can be compared with other url orbjects
301 A url object that can be compared with other url orbjects
302 without regard to the vagaries of encoding, escaping, and ordering
302 without regard to the vagaries of encoding, escaping, and ordering
303 of parameters in query strings.
303 of parameters in query strings.
304
304
305 Inspired by
305 Inspired by
306 http://stackoverflow.com/questions/5371992/comparing-two-urls-in-python
306 http://stackoverflow.com/questions/5371992/comparing-two-urls-in-python
307 """
307 """
308
308
309 def __init__(self, url):
309 def __init__(self, url):
310 parts = urlparse(url)
310 parts = urlparse(url)
311 _query = frozenset(parse_qsl(parts.query))
311 _query = frozenset(parse_qsl(parts.query))
312 _path = unquote_plus(parts.path)
312 _path = unquote_plus(parts.path)
313 parts = parts._replace(query=_query, path=_path)
313 parts = parts._replace(query=_query, path=_path)
314 self.parts = parts
314 self.parts = parts
315
315
316 def __eq__(self, other):
316 def __eq__(self, other):
317 return self.parts == other.parts
317 return self.parts == other.parts
318
318
319 def __hash__(self):
319 def __hash__(self):
320 return hash(self.parts)
320 return hash(self.parts)
321
321
322
322
323 def run_test_concurrently(times, raise_catched_exc=True):
323 def run_test_concurrently(times, raise_catched_exc=True):
324 """
324 """
325 Add this decorator to small pieces of code that you want to test
325 Add this decorator to small pieces of code that you want to test
326 concurrently
326 concurrently
327
327
328 ex:
328 ex:
329
329
330 @test_concurrently(25)
330 @test_concurrently(25)
331 def my_test_function():
331 def my_test_function():
332 ...
332 ...
333 """
333 """
334 def test_concurrently_decorator(test_func):
334 def test_concurrently_decorator(test_func):
335 def wrapper(*args, **kwargs):
335 def wrapper(*args, **kwargs):
336 exceptions = []
336 exceptions = []
337
337
338 def call_test_func():
338 def call_test_func():
339 try:
339 try:
340 test_func(*args, **kwargs)
340 test_func(*args, **kwargs)
341 except Exception as e:
341 except Exception as e:
342 exceptions.append(e)
342 exceptions.append(e)
343 if raise_catched_exc:
343 if raise_catched_exc:
344 raise
344 raise
345 threads = []
345 threads = []
346 for i in range(times):
346 for i in range(times):
347 threads.append(threading.Thread(target=call_test_func))
347 threads.append(threading.Thread(target=call_test_func))
348 for t in threads:
348 for t in threads:
349 t.start()
349 t.start()
350 for t in threads:
350 for t in threads:
351 t.join()
351 t.join()
352 if exceptions:
352 if exceptions:
353 raise Exception(
353 raise Exception(
354 'test_concurrently intercepted %s exceptions: %s' % (
354 'test_concurrently intercepted %s exceptions: %s' % (
355 len(exceptions), exceptions))
355 len(exceptions), exceptions))
356 return wrapper
356 return wrapper
357 return test_concurrently_decorator
357 return test_concurrently_decorator
358
358
359
359
360 def wait_for_url(url, timeout=10):
360 def wait_for_url(url, timeout=10):
361 """
361 """
362 Wait until URL becomes reachable.
362 Wait until URL becomes reachable.
363
363
364 It polls the URL until the timeout is reached or it became reachable.
364 It polls the URL until the timeout is reached or it became reachable.
365 If will call to `py.test.fail` in case the URL is not reachable.
365 If will call to `py.test.fail` in case the URL is not reachable.
366 """
366 """
367 timeout = time.time() + timeout
367 timeout = time.time() + timeout
368 last = 0
368 last = 0
369 wait = 0.1
369 wait = 0.1
370
370
371 while timeout > last:
371 while timeout > last:
372 last = time.time()
372 last = time.time()
373 if is_url_reachable(url):
373 if is_url_reachable(url):
374 break
374 break
375 elif (last + wait) > time.time():
375 elif (last + wait) > time.time():
376 # Go to sleep because not enough time has passed since last check.
376 # Go to sleep because not enough time has passed since last check.
377 time.sleep(wait)
377 time.sleep(wait)
378 else:
378 else:
379 pytest.fail("Timeout while waiting for URL {}".format(url))
379 pytest.fail("Timeout while waiting for URL {}".format(url))
380
380
381
381
382 def is_url_reachable(url):
382 def is_url_reachable(url):
383 try:
383 try:
384 urllib2.urlopen(url)
384 urllib2.urlopen(url)
385 except urllib2.URLError:
385 except urllib2.URLError:
386 return False
386 return False
387 return True
387 return True
388
388
389
389
390 def repo_on_filesystem(repo_name):
390 def repo_on_filesystem(repo_name):
391 from rhodecode.lib import vcs
391 from rhodecode.lib import vcs
392 from rhodecode.tests import TESTS_TMP_PATH
392 from rhodecode.tests import TESTS_TMP_PATH
393 repo = vcs.get_vcs_instance(
393 repo = vcs.get_vcs_instance(
394 os.path.join(TESTS_TMP_PATH, repo_name), create=False)
394 os.path.join(TESTS_TMP_PATH, repo_name), create=False)
395 return repo is not None
395 return repo is not None
396
396
397
397
398 def commit_change(
398 def commit_change(
399 repo, filename, content, message, vcs_type, parent=None, newfile=False):
399 repo, filename, content, message, vcs_type, parent=None, newfile=False):
400 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
400 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
401
401
402 repo = Repository.get_by_repo_name(repo)
402 repo = Repository.get_by_repo_name(repo)
403 _commit = parent
403 _commit = parent
404 if not parent:
404 if not parent:
405 _commit = EmptyCommit(alias=vcs_type)
405 _commit = EmptyCommit(alias=vcs_type)
406
406
407 if newfile:
407 if newfile:
408 nodes = {
408 nodes = {
409 filename: {
409 filename: {
410 'content': content
410 'content': content
411 }
411 }
412 }
412 }
413 commit = ScmModel().create_nodes(
413 commit = ScmModel().create_nodes(
414 user=TEST_USER_ADMIN_LOGIN, repo=repo,
414 user=TEST_USER_ADMIN_LOGIN, repo=repo,
415 message=message,
415 message=message,
416 nodes=nodes,
416 nodes=nodes,
417 parent_commit=_commit,
417 parent_commit=_commit,
418 author=TEST_USER_ADMIN_LOGIN,
418 author=TEST_USER_ADMIN_LOGIN,
419 )
419 )
420 else:
420 else:
421 commit = ScmModel().commit_change(
421 commit = ScmModel().commit_change(
422 repo=repo.scm_instance(), repo_name=repo.repo_name,
422 repo=repo.scm_instance(), repo_name=repo.repo_name,
423 commit=parent, user=TEST_USER_ADMIN_LOGIN,
423 commit=parent, user=TEST_USER_ADMIN_LOGIN,
424 author=TEST_USER_ADMIN_LOGIN,
424 author=TEST_USER_ADMIN_LOGIN,
425 message=message,
425 message=message,
426 content=content,
426 content=content,
427 f_path=filename
427 f_path=filename
428 )
428 )
429 return commit
429 return commit
430
430
431
431
432 def permission_update_data_generator(csrf_token, default=None, grant=None, revoke=None):
432 def permission_update_data_generator(csrf_token, default=None, grant=None, revoke=None):
433 if not default:
433 if not default:
434 raise ValueError('Permission for default user must be given')
434 raise ValueError('Permission for default user must be given')
435 form_data = [(
435 form_data = [(
436 'csrf_token', csrf_token
436 'csrf_token', csrf_token
437 )]
437 )]
438 # add default
438 # add default
439 form_data.extend([
439 form_data.extend([
440 ('u_perm_1', default)
440 ('u_perm_1', default)
441 ])
441 ])
442
442
443 if grant:
443 if grant:
444 for cnt, (obj_id, perm, obj_name, obj_type) in enumerate(grant, 1):
444 for cnt, (obj_id, perm, obj_name, obj_type) in enumerate(grant, 1):
445 form_data.extend([
445 form_data.extend([
446 ('perm_new_member_perm_new{}'.format(cnt), perm),
446 ('perm_new_member_perm_new{}'.format(cnt), perm),
447 ('perm_new_member_id_new{}'.format(cnt), obj_id),
447 ('perm_new_member_id_new{}'.format(cnt), obj_id),
448 ('perm_new_member_name_new{}'.format(cnt), obj_name),
448 ('perm_new_member_name_new{}'.format(cnt), obj_name),
449 ('perm_new_member_type_new{}'.format(cnt), obj_type),
449 ('perm_new_member_type_new{}'.format(cnt), obj_type),
450
450
451 ])
451 ])
452 if revoke:
452 if revoke:
453 for obj_id, obj_type in revoke:
453 for obj_id, obj_type in revoke:
454 form_data.extend([
454 form_data.extend([
455 ('perm_del_member_id_{}'.format(obj_id), obj_id),
455 ('perm_del_member_id_{}'.format(obj_id), obj_id),
456 ('perm_del_member_type_{}'.format(obj_id), obj_type),
456 ('perm_del_member_type_{}'.format(obj_id), obj_type),
457 ])
457 ])
458 return form_data
458 return form_data
General Comments 0
You need to be logged in to leave comments. Login now