Show More
@@ -0,0 +1,80 b'' | |||||
|
1 | # -*- coding: utf-8 -*- | |||
|
2 | ||||
|
3 | # Copyright (C) 2010-2020 RhodeCode GmbH | |||
|
4 | # | |||
|
5 | # This program is free software: you can redistribute it and/or modify | |||
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |||
|
7 | # (only), as published by the Free Software Foundation. | |||
|
8 | # | |||
|
9 | # This program is distributed in the hope that it will be useful, | |||
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
12 | # GNU General Public License for more details. | |||
|
13 | # | |||
|
14 | # You should have received a copy of the GNU Affero General Public License | |||
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
|
16 | # | |||
|
17 | # This program is dual-licensed. If you wish to learn more about the | |||
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |||
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |||
|
20 | ||||
|
21 | """ | |||
|
22 | Test suite for making push/pull operations, on specially modified INI files | |||
|
23 | ||||
|
24 | .. important:: | |||
|
25 | ||||
|
26 | You must have git >= 1.8.5 for tests to work fine. With 68b939b git started | |||
|
27 | to redirect things to stderr instead of stdout. | |||
|
28 | """ | |||
|
29 | ||||
|
30 | import pytest | |||
|
31 | import requests | |||
|
32 | ||||
|
33 | from rhodecode.tests import GIT_REPO, HG_REPO | |||
|
34 | from rhodecode.tests.vcs_operations import Command, _add_files_and_push | |||
|
35 | ||||
|
36 | ||||
|
37 | def check_connection(): | |||
|
38 | try: | |||
|
39 | response = requests.get('http://httpbin.org') | |||
|
40 | return response.status_code == 200 | |||
|
41 | except Exception as e: | |||
|
42 | print(e) | |||
|
43 | ||||
|
44 | return False | |||
|
45 | ||||
|
46 | ||||
|
47 | connection_available = pytest.mark.skipif( | |||
|
48 | not check_connection(), reason="No outside internet connection available") | |||
|
49 | ||||
|
50 | ||||
|
51 | @pytest.mark.usefixtures("baseapp", "enable_webhook_push_integration") | |||
|
52 | class TestVCSOperationsOnCustomIniConfig(object): | |||
|
53 | ||||
|
54 | def test_push_with_webhook_hg(self, rc_web_server, tmpdir): | |||
|
55 | clone_url = rc_web_server.repo_clone_url(HG_REPO) | |||
|
56 | ||||
|
57 | stdout, stderr = Command('/tmp').execute( | |||
|
58 | 'hg clone', clone_url, tmpdir.strpath) | |||
|
59 | ||||
|
60 | push_url = rc_web_server.repo_clone_url(HG_REPO) | |||
|
61 | _add_files_and_push('hg', tmpdir.strpath, clone_url=push_url) | |||
|
62 | ||||
|
63 | rc_log = rc_web_server.get_rc_log() | |||
|
64 | assert 'ERROR' not in rc_log | |||
|
65 | assert "executing task TASK:<@task: rhodecode.integrations.types.webhook.post_to_webhook" in rc_log | |||
|
66 | assert "handling event repo-push with integration <rhodecode.integrations.types.webhook.WebhookIntegrationType" in rc_log | |||
|
67 | ||||
|
68 | def test_push_with_webhook_gut(self, rc_web_server, tmpdir): | |||
|
69 | clone_url = rc_web_server.repo_clone_url(GIT_REPO) | |||
|
70 | ||||
|
71 | stdout, stderr = Command('/tmp').execute( | |||
|
72 | 'git clone', clone_url, tmpdir.strpath) | |||
|
73 | ||||
|
74 | push_url = rc_web_server.repo_clone_url(GIT_REPO) | |||
|
75 | _add_files_and_push('git', tmpdir.strpath, clone_url=push_url) | |||
|
76 | ||||
|
77 | rc_log = rc_web_server.get_rc_log() | |||
|
78 | assert 'ERROR' not in rc_log | |||
|
79 | assert "executing task TASK:<@task: rhodecode.integrations.types.webhook.post_to_webhook" in rc_log | |||
|
80 | assert "handling event repo-push with integration <rhodecode.integrations.types.webhook.WebhookIntegrationType" in rc_log |
@@ -1,1092 +1,1092 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import os |
|
21 | import os | |
22 |
|
22 | |||
23 | import mock |
|
23 | import mock | |
24 | import pytest |
|
24 | import pytest | |
25 |
|
25 | |||
26 | from rhodecode.apps.repository.tests.test_repo_compare import ComparePage |
|
26 | from rhodecode.apps.repository.tests.test_repo_compare import ComparePage | |
27 | from rhodecode.apps.repository.views.repo_files import RepoFilesView |
|
27 | from rhodecode.apps.repository.views.repo_files import RepoFilesView | |
28 | from rhodecode.lib import helpers as h |
|
28 | from rhodecode.lib import helpers as h | |
29 | from rhodecode.lib.compat import OrderedDict |
|
29 | from rhodecode.lib.compat import OrderedDict | |
30 | from rhodecode.lib.ext_json import json |
|
30 | from rhodecode.lib.ext_json import json | |
31 | from rhodecode.lib.vcs import nodes |
|
31 | from rhodecode.lib.vcs import nodes | |
32 |
|
32 | |||
33 | from rhodecode.lib.vcs.conf import settings |
|
33 | from rhodecode.lib.vcs.conf import settings | |
34 | from rhodecode.tests import assert_session_flash |
|
34 | from rhodecode.tests import assert_session_flash | |
35 | from rhodecode.tests.fixture import Fixture |
|
35 | from rhodecode.tests.fixture import Fixture | |
36 | from rhodecode.model.db import Session |
|
36 | from rhodecode.model.db import Session | |
37 |
|
37 | |||
38 | fixture = Fixture() |
|
38 | fixture = Fixture() | |
39 |
|
39 | |||
40 |
|
40 | |||
41 | def get_node_history(backend_type): |
|
41 | def get_node_history(backend_type): | |
42 | return { |
|
42 | return { | |
43 | 'hg': json.loads(fixture.load_resource('hg_node_history_response.json')), |
|
43 | 'hg': json.loads(fixture.load_resource('hg_node_history_response.json')), | |
44 | 'git': json.loads(fixture.load_resource('git_node_history_response.json')), |
|
44 | 'git': json.loads(fixture.load_resource('git_node_history_response.json')), | |
45 | 'svn': json.loads(fixture.load_resource('svn_node_history_response.json')), |
|
45 | 'svn': json.loads(fixture.load_resource('svn_node_history_response.json')), | |
46 | }[backend_type] |
|
46 | }[backend_type] | |
47 |
|
47 | |||
48 |
|
48 | |||
49 | def route_path(name, params=None, **kwargs): |
|
49 | def route_path(name, params=None, **kwargs): | |
50 | import urllib |
|
50 | import urllib | |
51 |
|
51 | |||
52 | base_url = { |
|
52 | base_url = { | |
53 | 'repo_summary': '/{repo_name}', |
|
53 | 'repo_summary': '/{repo_name}', | |
54 | 'repo_archivefile': '/{repo_name}/archive/{fname}', |
|
54 | 'repo_archivefile': '/{repo_name}/archive/{fname}', | |
55 | 'repo_files_diff': '/{repo_name}/diff/{f_path}', |
|
55 | 'repo_files_diff': '/{repo_name}/diff/{f_path}', | |
56 | 'repo_files_diff_2way_redirect': '/{repo_name}/diff-2way/{f_path}', |
|
56 | 'repo_files_diff_2way_redirect': '/{repo_name}/diff-2way/{f_path}', | |
57 | 'repo_files': '/{repo_name}/files/{commit_id}/{f_path}', |
|
57 | 'repo_files': '/{repo_name}/files/{commit_id}/{f_path}', | |
58 | 'repo_files:default_path': '/{repo_name}/files/{commit_id}/', |
|
58 | 'repo_files:default_path': '/{repo_name}/files/{commit_id}/', | |
59 | 'repo_files:default_commit': '/{repo_name}/files', |
|
59 | 'repo_files:default_commit': '/{repo_name}/files', | |
60 | 'repo_files:rendered': '/{repo_name}/render/{commit_id}/{f_path}', |
|
60 | 'repo_files:rendered': '/{repo_name}/render/{commit_id}/{f_path}', | |
61 | 'repo_files:annotated': '/{repo_name}/annotate/{commit_id}/{f_path}', |
|
61 | 'repo_files:annotated': '/{repo_name}/annotate/{commit_id}/{f_path}', | |
62 | 'repo_files:annotated_previous': '/{repo_name}/annotate-previous/{commit_id}/{f_path}', |
|
62 | 'repo_files:annotated_previous': '/{repo_name}/annotate-previous/{commit_id}/{f_path}', | |
63 | 'repo_files_nodelist': '/{repo_name}/nodelist/{commit_id}/{f_path}', |
|
63 | 'repo_files_nodelist': '/{repo_name}/nodelist/{commit_id}/{f_path}', | |
64 | 'repo_file_raw': '/{repo_name}/raw/{commit_id}/{f_path}', |
|
64 | 'repo_file_raw': '/{repo_name}/raw/{commit_id}/{f_path}', | |
65 | 'repo_file_download': '/{repo_name}/download/{commit_id}/{f_path}', |
|
65 | 'repo_file_download': '/{repo_name}/download/{commit_id}/{f_path}', | |
66 | 'repo_file_history': '/{repo_name}/history/{commit_id}/{f_path}', |
|
66 | 'repo_file_history': '/{repo_name}/history/{commit_id}/{f_path}', | |
67 | 'repo_file_authors': '/{repo_name}/authors/{commit_id}/{f_path}', |
|
67 | 'repo_file_authors': '/{repo_name}/authors/{commit_id}/{f_path}', | |
68 | 'repo_files_remove_file': '/{repo_name}/remove_file/{commit_id}/{f_path}', |
|
68 | 'repo_files_remove_file': '/{repo_name}/remove_file/{commit_id}/{f_path}', | |
69 | 'repo_files_delete_file': '/{repo_name}/delete_file/{commit_id}/{f_path}', |
|
69 | 'repo_files_delete_file': '/{repo_name}/delete_file/{commit_id}/{f_path}', | |
70 | 'repo_files_edit_file': '/{repo_name}/edit_file/{commit_id}/{f_path}', |
|
70 | 'repo_files_edit_file': '/{repo_name}/edit_file/{commit_id}/{f_path}', | |
71 | 'repo_files_update_file': '/{repo_name}/update_file/{commit_id}/{f_path}', |
|
71 | 'repo_files_update_file': '/{repo_name}/update_file/{commit_id}/{f_path}', | |
72 | 'repo_files_add_file': '/{repo_name}/add_file/{commit_id}/{f_path}', |
|
72 | 'repo_files_add_file': '/{repo_name}/add_file/{commit_id}/{f_path}', | |
73 | 'repo_files_create_file': '/{repo_name}/create_file/{commit_id}/{f_path}', |
|
73 | 'repo_files_create_file': '/{repo_name}/create_file/{commit_id}/{f_path}', | |
74 | 'repo_nodetree_full': '/{repo_name}/nodetree_full/{commit_id}/{f_path}', |
|
74 | 'repo_nodetree_full': '/{repo_name}/nodetree_full/{commit_id}/{f_path}', | |
75 | 'repo_nodetree_full:default_path': '/{repo_name}/nodetree_full/{commit_id}/', |
|
75 | 'repo_nodetree_full:default_path': '/{repo_name}/nodetree_full/{commit_id}/', | |
76 | }[name].format(**kwargs) |
|
76 | }[name].format(**kwargs) | |
77 |
|
77 | |||
78 | if params: |
|
78 | if params: | |
79 | base_url = '{}?{}'.format(base_url, urllib.urlencode(params)) |
|
79 | base_url = '{}?{}'.format(base_url, urllib.urlencode(params)) | |
80 | return base_url |
|
80 | return base_url | |
81 |
|
81 | |||
82 |
|
82 | |||
83 | def assert_files_in_response(response, files, params): |
|
83 | def assert_files_in_response(response, files, params): | |
84 | template = ( |
|
84 | template = ( | |
85 | 'href="/%(repo_name)s/files/%(commit_id)s/%(name)s"') |
|
85 | 'href="/%(repo_name)s/files/%(commit_id)s/%(name)s"') | |
86 | _assert_items_in_response(response, files, template, params) |
|
86 | _assert_items_in_response(response, files, template, params) | |
87 |
|
87 | |||
88 |
|
88 | |||
89 | def assert_dirs_in_response(response, dirs, params): |
|
89 | def assert_dirs_in_response(response, dirs, params): | |
90 | template = ( |
|
90 | template = ( | |
91 | 'href="/%(repo_name)s/files/%(commit_id)s/%(name)s"') |
|
91 | 'href="/%(repo_name)s/files/%(commit_id)s/%(name)s"') | |
92 | _assert_items_in_response(response, dirs, template, params) |
|
92 | _assert_items_in_response(response, dirs, template, params) | |
93 |
|
93 | |||
94 |
|
94 | |||
95 | def _assert_items_in_response(response, items, template, params): |
|
95 | def _assert_items_in_response(response, items, template, params): | |
96 | for item in items: |
|
96 | for item in items: | |
97 | item_params = {'name': item} |
|
97 | item_params = {'name': item} | |
98 | item_params.update(params) |
|
98 | item_params.update(params) | |
99 | response.mustcontain(template % item_params) |
|
99 | response.mustcontain(template % item_params) | |
100 |
|
100 | |||
101 |
|
101 | |||
102 | def assert_timeago_in_response(response, items, params): |
|
102 | def assert_timeago_in_response(response, items, params): | |
103 | for item in items: |
|
103 | for item in items: | |
104 | response.mustcontain(h.age_component(params['date'])) |
|
104 | response.mustcontain(h.age_component(params['date'])) | |
105 |
|
105 | |||
106 |
|
106 | |||
107 | @pytest.mark.usefixtures("app") |
|
107 | @pytest.mark.usefixtures("app") | |
108 | class TestFilesViews(object): |
|
108 | class TestFilesViews(object): | |
109 |
|
109 | |||
110 | def test_show_files(self, backend): |
|
110 | def test_show_files(self, backend): | |
111 | response = self.app.get( |
|
111 | response = self.app.get( | |
112 | route_path('repo_files', |
|
112 | route_path('repo_files', | |
113 | repo_name=backend.repo_name, |
|
113 | repo_name=backend.repo_name, | |
114 | commit_id='tip', f_path='/')) |
|
114 | commit_id='tip', f_path='/')) | |
115 | commit = backend.repo.get_commit() |
|
115 | commit = backend.repo.get_commit() | |
116 |
|
116 | |||
117 | params = { |
|
117 | params = { | |
118 | 'repo_name': backend.repo_name, |
|
118 | 'repo_name': backend.repo_name, | |
119 | 'commit_id': commit.raw_id, |
|
119 | 'commit_id': commit.raw_id, | |
120 | 'date': commit.date |
|
120 | 'date': commit.date | |
121 | } |
|
121 | } | |
122 | assert_dirs_in_response(response, ['docs', 'vcs'], params) |
|
122 | assert_dirs_in_response(response, ['docs', 'vcs'], params) | |
123 | files = [ |
|
123 | files = [ | |
124 | '.gitignore', |
|
124 | '.gitignore', | |
125 | '.hgignore', |
|
125 | '.hgignore', | |
126 | '.hgtags', |
|
126 | '.hgtags', | |
127 | # TODO: missing in Git |
|
127 | # TODO: missing in Git | |
128 | # '.travis.yml', |
|
128 | # '.travis.yml', | |
129 | 'MANIFEST.in', |
|
129 | 'MANIFEST.in', | |
130 | 'README.rst', |
|
130 | 'README.rst', | |
131 | # TODO: File is missing in svn repository |
|
131 | # TODO: File is missing in svn repository | |
132 | # 'run_test_and_report.sh', |
|
132 | # 'run_test_and_report.sh', | |
133 | 'setup.cfg', |
|
133 | 'setup.cfg', | |
134 | 'setup.py', |
|
134 | 'setup.py', | |
135 | 'test_and_report.sh', |
|
135 | 'test_and_report.sh', | |
136 | 'tox.ini', |
|
136 | 'tox.ini', | |
137 | ] |
|
137 | ] | |
138 | assert_files_in_response(response, files, params) |
|
138 | assert_files_in_response(response, files, params) | |
139 | assert_timeago_in_response(response, files, params) |
|
139 | assert_timeago_in_response(response, files, params) | |
140 |
|
140 | |||
141 | def test_show_files_links_submodules_with_absolute_url(self, backend_hg): |
|
141 | def test_show_files_links_submodules_with_absolute_url(self, backend_hg): | |
142 | repo = backend_hg['subrepos'] |
|
142 | repo = backend_hg['subrepos'] | |
143 | response = self.app.get( |
|
143 | response = self.app.get( | |
144 | route_path('repo_files', |
|
144 | route_path('repo_files', | |
145 | repo_name=repo.repo_name, |
|
145 | repo_name=repo.repo_name, | |
146 | commit_id='tip', f_path='/')) |
|
146 | commit_id='tip', f_path='/')) | |
147 | assert_response = response.assert_response() |
|
147 | assert_response = response.assert_response() | |
148 | assert_response.contains_one_link( |
|
148 | assert_response.contains_one_link( | |
149 | 'absolute-path @ 000000000000', 'http://example.com/absolute-path') |
|
149 | 'absolute-path @ 000000000000', 'http://example.com/absolute-path') | |
150 |
|
150 | |||
151 | def test_show_files_links_submodules_with_absolute_url_subpaths( |
|
151 | def test_show_files_links_submodules_with_absolute_url_subpaths( | |
152 | self, backend_hg): |
|
152 | self, backend_hg): | |
153 | repo = backend_hg['subrepos'] |
|
153 | repo = backend_hg['subrepos'] | |
154 | response = self.app.get( |
|
154 | response = self.app.get( | |
155 | route_path('repo_files', |
|
155 | route_path('repo_files', | |
156 | repo_name=repo.repo_name, |
|
156 | repo_name=repo.repo_name, | |
157 | commit_id='tip', f_path='/')) |
|
157 | commit_id='tip', f_path='/')) | |
158 | assert_response = response.assert_response() |
|
158 | assert_response = response.assert_response() | |
159 | assert_response.contains_one_link( |
|
159 | assert_response.contains_one_link( | |
160 | 'subpaths-path @ 000000000000', |
|
160 | 'subpaths-path @ 000000000000', | |
161 | 'http://sub-base.example.com/subpaths-path') |
|
161 | 'http://sub-base.example.com/subpaths-path') | |
162 |
|
162 | |||
163 | @pytest.mark.xfail_backends("svn", reason="Depends on branch support") |
|
163 | @pytest.mark.xfail_backends("svn", reason="Depends on branch support") | |
164 | def test_files_menu(self, backend): |
|
164 | def test_files_menu(self, backend): | |
165 | new_branch = "temp_branch_name" |
|
165 | new_branch = "temp_branch_name" | |
166 | commits = [ |
|
166 | commits = [ | |
167 | {'message': 'a'}, |
|
167 | {'message': 'a'}, | |
168 | {'message': 'b', 'branch': new_branch} |
|
168 | {'message': 'b', 'branch': new_branch} | |
169 | ] |
|
169 | ] | |
170 | backend.create_repo(commits) |
|
170 | backend.create_repo(commits) | |
171 | backend.repo.landing_rev = "branch:%s" % new_branch |
|
171 | backend.repo.landing_rev = "branch:%s" % new_branch | |
172 | Session().commit() |
|
172 | Session().commit() | |
173 |
|
173 | |||
174 | # get response based on tip and not new commit |
|
174 | # get response based on tip and not new commit | |
175 | response = self.app.get( |
|
175 | response = self.app.get( | |
176 | route_path('repo_files', |
|
176 | route_path('repo_files', | |
177 | repo_name=backend.repo_name, |
|
177 | repo_name=backend.repo_name, | |
178 | commit_id='tip', f_path='/')) |
|
178 | commit_id='tip', f_path='/')) | |
179 |
|
179 | |||
180 | # make sure Files menu url is not tip but new commit |
|
180 | # make sure Files menu url is not tip but new commit | |
181 | landing_rev = backend.repo.landing_ref_name |
|
181 | landing_rev = backend.repo.landing_ref_name | |
182 | files_url = route_path('repo_files:default_path', |
|
182 | files_url = route_path('repo_files:default_path', | |
183 | repo_name=backend.repo_name, |
|
183 | repo_name=backend.repo_name, | |
184 | commit_id=landing_rev, params={'at': landing_rev}) |
|
184 | commit_id=landing_rev, params={'at': landing_rev}) | |
185 |
|
185 | |||
186 | assert landing_rev != 'tip' |
|
186 | assert landing_rev != 'tip' | |
187 | response.mustcontain( |
|
187 | response.mustcontain( | |
188 | '<li class="active"><a class="menulink" href="%s">' % files_url) |
|
188 | '<li class="active"><a class="menulink" href="%s">' % files_url) | |
189 |
|
189 | |||
190 | def test_show_files_commit(self, backend): |
|
190 | def test_show_files_commit(self, backend): | |
191 | commit = backend.repo.get_commit(commit_idx=32) |
|
191 | commit = backend.repo.get_commit(commit_idx=32) | |
192 |
|
192 | |||
193 | response = self.app.get( |
|
193 | response = self.app.get( | |
194 | route_path('repo_files', |
|
194 | route_path('repo_files', | |
195 | repo_name=backend.repo_name, |
|
195 | repo_name=backend.repo_name, | |
196 | commit_id=commit.raw_id, f_path='/')) |
|
196 | commit_id=commit.raw_id, f_path='/')) | |
197 |
|
197 | |||
198 | dirs = ['docs', 'tests'] |
|
198 | dirs = ['docs', 'tests'] | |
199 | files = ['README.rst'] |
|
199 | files = ['README.rst'] | |
200 | params = { |
|
200 | params = { | |
201 | 'repo_name': backend.repo_name, |
|
201 | 'repo_name': backend.repo_name, | |
202 | 'commit_id': commit.raw_id, |
|
202 | 'commit_id': commit.raw_id, | |
203 | } |
|
203 | } | |
204 | assert_dirs_in_response(response, dirs, params) |
|
204 | assert_dirs_in_response(response, dirs, params) | |
205 | assert_files_in_response(response, files, params) |
|
205 | assert_files_in_response(response, files, params) | |
206 |
|
206 | |||
207 | def test_show_files_different_branch(self, backend): |
|
207 | def test_show_files_different_branch(self, backend): | |
208 | branches = dict( |
|
208 | branches = dict( | |
209 | hg=(150, ['git']), |
|
209 | hg=(150, ['git']), | |
210 | # TODO: Git test repository does not contain other branches |
|
210 | # TODO: Git test repository does not contain other branches | |
211 | git=(633, ['master']), |
|
211 | git=(633, ['master']), | |
212 | # TODO: Branch support in Subversion |
|
212 | # TODO: Branch support in Subversion | |
213 | svn=(150, []) |
|
213 | svn=(150, []) | |
214 | ) |
|
214 | ) | |
215 | idx, branches = branches[backend.alias] |
|
215 | idx, branches = branches[backend.alias] | |
216 | commit = backend.repo.get_commit(commit_idx=idx) |
|
216 | commit = backend.repo.get_commit(commit_idx=idx) | |
217 | response = self.app.get( |
|
217 | response = self.app.get( | |
218 | route_path('repo_files', |
|
218 | route_path('repo_files', | |
219 | repo_name=backend.repo_name, |
|
219 | repo_name=backend.repo_name, | |
220 | commit_id=commit.raw_id, f_path='/')) |
|
220 | commit_id=commit.raw_id, f_path='/')) | |
221 |
|
221 | |||
222 | assert_response = response.assert_response() |
|
222 | assert_response = response.assert_response() | |
223 | for branch in branches: |
|
223 | for branch in branches: | |
224 | assert_response.element_contains('.tags .branchtag', branch) |
|
224 | assert_response.element_contains('.tags .branchtag', branch) | |
225 |
|
225 | |||
226 | def test_show_files_paging(self, backend): |
|
226 | def test_show_files_paging(self, backend): | |
227 | repo = backend.repo |
|
227 | repo = backend.repo | |
228 | indexes = [73, 92, 109, 1, 0] |
|
228 | indexes = [73, 92, 109, 1, 0] | |
229 | idx_map = [(rev, repo.get_commit(commit_idx=rev).raw_id) |
|
229 | idx_map = [(rev, repo.get_commit(commit_idx=rev).raw_id) | |
230 | for rev in indexes] |
|
230 | for rev in indexes] | |
231 |
|
231 | |||
232 | for idx in idx_map: |
|
232 | for idx in idx_map: | |
233 | response = self.app.get( |
|
233 | response = self.app.get( | |
234 | route_path('repo_files', |
|
234 | route_path('repo_files', | |
235 | repo_name=backend.repo_name, |
|
235 | repo_name=backend.repo_name, | |
236 | commit_id=idx[1], f_path='/')) |
|
236 | commit_id=idx[1], f_path='/')) | |
237 |
|
237 | |||
238 | response.mustcontain("""r%s:%s""" % (idx[0], idx[1][:8])) |
|
238 | response.mustcontain("""r%s:%s""" % (idx[0], idx[1][:8])) | |
239 |
|
239 | |||
240 | def test_file_source(self, backend): |
|
240 | def test_file_source(self, backend): | |
241 | commit = backend.repo.get_commit(commit_idx=167) |
|
241 | commit = backend.repo.get_commit(commit_idx=167) | |
242 | response = self.app.get( |
|
242 | response = self.app.get( | |
243 | route_path('repo_files', |
|
243 | route_path('repo_files', | |
244 | repo_name=backend.repo_name, |
|
244 | repo_name=backend.repo_name, | |
245 | commit_id=commit.raw_id, f_path='vcs/nodes.py')) |
|
245 | commit_id=commit.raw_id, f_path='vcs/nodes.py')) | |
246 |
|
246 | |||
247 | msgbox = """<div class="commit">%s</div>""" |
|
247 | msgbox = """<div class="commit">%s</div>""" | |
248 | response.mustcontain(msgbox % (commit.message, )) |
|
248 | response.mustcontain(msgbox % (commit.message, )) | |
249 |
|
249 | |||
250 | assert_response = response.assert_response() |
|
250 | assert_response = response.assert_response() | |
251 | if commit.branch: |
|
251 | if commit.branch: | |
252 | assert_response.element_contains( |
|
252 | assert_response.element_contains( | |
253 | '.tags.tags-main .branchtag', commit.branch) |
|
253 | '.tags.tags-main .branchtag', commit.branch) | |
254 | if commit.tags: |
|
254 | if commit.tags: | |
255 | for tag in commit.tags: |
|
255 | for tag in commit.tags: | |
256 | assert_response.element_contains('.tags.tags-main .tagtag', tag) |
|
256 | assert_response.element_contains('.tags.tags-main .tagtag', tag) | |
257 |
|
257 | |||
258 | def test_file_source_annotated(self, backend): |
|
258 | def test_file_source_annotated(self, backend): | |
259 | response = self.app.get( |
|
259 | response = self.app.get( | |
260 | route_path('repo_files:annotated', |
|
260 | route_path('repo_files:annotated', | |
261 | repo_name=backend.repo_name, |
|
261 | repo_name=backend.repo_name, | |
262 | commit_id='tip', f_path='vcs/nodes.py')) |
|
262 | commit_id='tip', f_path='vcs/nodes.py')) | |
263 | expected_commits = { |
|
263 | expected_commits = { | |
264 | 'hg': 'r356', |
|
264 | 'hg': 'r356', | |
265 | 'git': 'r345', |
|
265 | 'git': 'r345', | |
266 | 'svn': 'r208', |
|
266 | 'svn': 'r208', | |
267 | } |
|
267 | } | |
268 | response.mustcontain(expected_commits[backend.alias]) |
|
268 | response.mustcontain(expected_commits[backend.alias]) | |
269 |
|
269 | |||
270 | def test_file_source_authors(self, backend): |
|
270 | def test_file_source_authors(self, backend): | |
271 | response = self.app.get( |
|
271 | response = self.app.get( | |
272 | route_path('repo_file_authors', |
|
272 | route_path('repo_file_authors', | |
273 | repo_name=backend.repo_name, |
|
273 | repo_name=backend.repo_name, | |
274 | commit_id='tip', f_path='vcs/nodes.py')) |
|
274 | commit_id='tip', f_path='vcs/nodes.py')) | |
275 | expected_authors = { |
|
275 | expected_authors = { | |
276 | 'hg': ('Marcin Kuzminski', 'Lukasz Balcerzak'), |
|
276 | 'hg': ('Marcin Kuzminski', 'Lukasz Balcerzak'), | |
277 | 'git': ('Marcin Kuzminski', 'Lukasz Balcerzak'), |
|
277 | 'git': ('Marcin Kuzminski', 'Lukasz Balcerzak'), | |
278 | 'svn': ('marcin', 'lukasz'), |
|
278 | 'svn': ('marcin', 'lukasz'), | |
279 | } |
|
279 | } | |
280 |
|
280 | |||
281 | for author in expected_authors[backend.alias]: |
|
281 | for author in expected_authors[backend.alias]: | |
282 | response.mustcontain(author) |
|
282 | response.mustcontain(author) | |
283 |
|
283 | |||
284 | def test_file_source_authors_with_annotation(self, backend): |
|
284 | def test_file_source_authors_with_annotation(self, backend): | |
285 | response = self.app.get( |
|
285 | response = self.app.get( | |
286 | route_path('repo_file_authors', |
|
286 | route_path('repo_file_authors', | |
287 | repo_name=backend.repo_name, |
|
287 | repo_name=backend.repo_name, | |
288 | commit_id='tip', f_path='vcs/nodes.py', |
|
288 | commit_id='tip', f_path='vcs/nodes.py', | |
289 | params=dict(annotate=1))) |
|
289 | params=dict(annotate=1))) | |
290 | expected_authors = { |
|
290 | expected_authors = { | |
291 | 'hg': ('Marcin Kuzminski', 'Lukasz Balcerzak'), |
|
291 | 'hg': ('Marcin Kuzminski', 'Lukasz Balcerzak'), | |
292 | 'git': ('Marcin Kuzminski', 'Lukasz Balcerzak'), |
|
292 | 'git': ('Marcin Kuzminski', 'Lukasz Balcerzak'), | |
293 | 'svn': ('marcin', 'lukasz'), |
|
293 | 'svn': ('marcin', 'lukasz'), | |
294 | } |
|
294 | } | |
295 |
|
295 | |||
296 | for author in expected_authors[backend.alias]: |
|
296 | for author in expected_authors[backend.alias]: | |
297 | response.mustcontain(author) |
|
297 | response.mustcontain(author) | |
298 |
|
298 | |||
299 | def test_file_source_history(self, backend, xhr_header): |
|
299 | def test_file_source_history(self, backend, xhr_header): | |
300 | response = self.app.get( |
|
300 | response = self.app.get( | |
301 | route_path('repo_file_history', |
|
301 | route_path('repo_file_history', | |
302 | repo_name=backend.repo_name, |
|
302 | repo_name=backend.repo_name, | |
303 | commit_id='tip', f_path='vcs/nodes.py'), |
|
303 | commit_id='tip', f_path='vcs/nodes.py'), | |
304 | extra_environ=xhr_header) |
|
304 | extra_environ=xhr_header) | |
305 | assert get_node_history(backend.alias) == json.loads(response.body) |
|
305 | assert get_node_history(backend.alias) == json.loads(response.body) | |
306 |
|
306 | |||
307 | def test_file_source_history_svn(self, backend_svn, xhr_header): |
|
307 | def test_file_source_history_svn(self, backend_svn, xhr_header): | |
308 | simple_repo = backend_svn['svn-simple-layout'] |
|
308 | simple_repo = backend_svn['svn-simple-layout'] | |
309 | response = self.app.get( |
|
309 | response = self.app.get( | |
310 | route_path('repo_file_history', |
|
310 | route_path('repo_file_history', | |
311 | repo_name=simple_repo.repo_name, |
|
311 | repo_name=simple_repo.repo_name, | |
312 | commit_id='tip', f_path='trunk/example.py'), |
|
312 | commit_id='tip', f_path='trunk/example.py'), | |
313 | extra_environ=xhr_header) |
|
313 | extra_environ=xhr_header) | |
314 |
|
314 | |||
315 | expected_data = json.loads( |
|
315 | expected_data = json.loads( | |
316 | fixture.load_resource('svn_node_history_branches.json')) |
|
316 | fixture.load_resource('svn_node_history_branches.json')) | |
317 |
|
317 | |||
318 | assert expected_data == response.json |
|
318 | assert expected_data == response.json | |
319 |
|
319 | |||
320 | def test_file_source_history_with_annotation(self, backend, xhr_header): |
|
320 | def test_file_source_history_with_annotation(self, backend, xhr_header): | |
321 | response = self.app.get( |
|
321 | response = self.app.get( | |
322 | route_path('repo_file_history', |
|
322 | route_path('repo_file_history', | |
323 | repo_name=backend.repo_name, |
|
323 | repo_name=backend.repo_name, | |
324 | commit_id='tip', f_path='vcs/nodes.py', |
|
324 | commit_id='tip', f_path='vcs/nodes.py', | |
325 | params=dict(annotate=1)), |
|
325 | params=dict(annotate=1)), | |
326 |
|
326 | |||
327 | extra_environ=xhr_header) |
|
327 | extra_environ=xhr_header) | |
328 | assert get_node_history(backend.alias) == json.loads(response.body) |
|
328 | assert get_node_history(backend.alias) == json.loads(response.body) | |
329 |
|
329 | |||
330 | def test_tree_search_top_level(self, backend, xhr_header): |
|
330 | def test_tree_search_top_level(self, backend, xhr_header): | |
331 | commit = backend.repo.get_commit(commit_idx=173) |
|
331 | commit = backend.repo.get_commit(commit_idx=173) | |
332 | response = self.app.get( |
|
332 | response = self.app.get( | |
333 | route_path('repo_files_nodelist', |
|
333 | route_path('repo_files_nodelist', | |
334 | repo_name=backend.repo_name, |
|
334 | repo_name=backend.repo_name, | |
335 | commit_id=commit.raw_id, f_path='/'), |
|
335 | commit_id=commit.raw_id, f_path='/'), | |
336 | extra_environ=xhr_header) |
|
336 | extra_environ=xhr_header) | |
337 | assert 'nodes' in response.json |
|
337 | assert 'nodes' in response.json | |
338 | assert {'name': 'docs', 'type': 'dir'} in response.json['nodes'] |
|
338 | assert {'name': 'docs', 'type': 'dir'} in response.json['nodes'] | |
339 |
|
339 | |||
340 | def test_tree_search_missing_xhr(self, backend): |
|
340 | def test_tree_search_missing_xhr(self, backend): | |
341 | self.app.get( |
|
341 | self.app.get( | |
342 | route_path('repo_files_nodelist', |
|
342 | route_path('repo_files_nodelist', | |
343 | repo_name=backend.repo_name, |
|
343 | repo_name=backend.repo_name, | |
344 | commit_id='tip', f_path='/'), |
|
344 | commit_id='tip', f_path='/'), | |
345 | status=404) |
|
345 | status=404) | |
346 |
|
346 | |||
347 | def test_tree_search_at_path(self, backend, xhr_header): |
|
347 | def test_tree_search_at_path(self, backend, xhr_header): | |
348 | commit = backend.repo.get_commit(commit_idx=173) |
|
348 | commit = backend.repo.get_commit(commit_idx=173) | |
349 | response = self.app.get( |
|
349 | response = self.app.get( | |
350 | route_path('repo_files_nodelist', |
|
350 | route_path('repo_files_nodelist', | |
351 | repo_name=backend.repo_name, |
|
351 | repo_name=backend.repo_name, | |
352 | commit_id=commit.raw_id, f_path='/docs'), |
|
352 | commit_id=commit.raw_id, f_path='/docs'), | |
353 | extra_environ=xhr_header) |
|
353 | extra_environ=xhr_header) | |
354 | assert 'nodes' in response.json |
|
354 | assert 'nodes' in response.json | |
355 | nodes = response.json['nodes'] |
|
355 | nodes = response.json['nodes'] | |
356 | assert {'name': 'docs/api', 'type': 'dir'} in nodes |
|
356 | assert {'name': 'docs/api', 'type': 'dir'} in nodes | |
357 | assert {'name': 'docs/index.rst', 'type': 'file'} in nodes |
|
357 | assert {'name': 'docs/index.rst', 'type': 'file'} in nodes | |
358 |
|
358 | |||
359 | def test_tree_search_at_path_2nd_level(self, backend, xhr_header): |
|
359 | def test_tree_search_at_path_2nd_level(self, backend, xhr_header): | |
360 | commit = backend.repo.get_commit(commit_idx=173) |
|
360 | commit = backend.repo.get_commit(commit_idx=173) | |
361 | response = self.app.get( |
|
361 | response = self.app.get( | |
362 | route_path('repo_files_nodelist', |
|
362 | route_path('repo_files_nodelist', | |
363 | repo_name=backend.repo_name, |
|
363 | repo_name=backend.repo_name, | |
364 | commit_id=commit.raw_id, f_path='/docs/api'), |
|
364 | commit_id=commit.raw_id, f_path='/docs/api'), | |
365 | extra_environ=xhr_header) |
|
365 | extra_environ=xhr_header) | |
366 | assert 'nodes' in response.json |
|
366 | assert 'nodes' in response.json | |
367 | nodes = response.json['nodes'] |
|
367 | nodes = response.json['nodes'] | |
368 | assert {'name': 'docs/api/index.rst', 'type': 'file'} in nodes |
|
368 | assert {'name': 'docs/api/index.rst', 'type': 'file'} in nodes | |
369 |
|
369 | |||
370 | def test_tree_search_at_path_missing_xhr(self, backend): |
|
370 | def test_tree_search_at_path_missing_xhr(self, backend): | |
371 | self.app.get( |
|
371 | self.app.get( | |
372 | route_path('repo_files_nodelist', |
|
372 | route_path('repo_files_nodelist', | |
373 | repo_name=backend.repo_name, |
|
373 | repo_name=backend.repo_name, | |
374 | commit_id='tip', f_path='/docs'), |
|
374 | commit_id='tip', f_path='/docs'), | |
375 | status=404) |
|
375 | status=404) | |
376 |
|
376 | |||
377 | def test_nodetree(self, backend, xhr_header): |
|
377 | def test_nodetree(self, backend, xhr_header): | |
378 | commit = backend.repo.get_commit(commit_idx=173) |
|
378 | commit = backend.repo.get_commit(commit_idx=173) | |
379 | response = self.app.get( |
|
379 | response = self.app.get( | |
380 | route_path('repo_nodetree_full', |
|
380 | route_path('repo_nodetree_full', | |
381 | repo_name=backend.repo_name, |
|
381 | repo_name=backend.repo_name, | |
382 | commit_id=commit.raw_id, f_path='/'), |
|
382 | commit_id=commit.raw_id, f_path='/'), | |
383 | extra_environ=xhr_header) |
|
383 | extra_environ=xhr_header) | |
384 |
|
384 | |||
385 | assert_response = response.assert_response() |
|
385 | assert_response = response.assert_response() | |
386 |
|
386 | |||
387 | for attr in ['data-commit-id', 'data-date', 'data-author']: |
|
387 | for attr in ['data-commit-id', 'data-date', 'data-author']: | |
388 | elements = assert_response.get_elements('[{}]'.format(attr)) |
|
388 | elements = assert_response.get_elements('[{}]'.format(attr)) | |
389 | assert len(elements) > 1 |
|
389 | assert len(elements) > 1 | |
390 |
|
390 | |||
391 | for element in elements: |
|
391 | for element in elements: | |
392 | assert element.get(attr) |
|
392 | assert element.get(attr) | |
393 |
|
393 | |||
394 | def test_nodetree_if_file(self, backend, xhr_header): |
|
394 | def test_nodetree_if_file(self, backend, xhr_header): | |
395 | commit = backend.repo.get_commit(commit_idx=173) |
|
395 | commit = backend.repo.get_commit(commit_idx=173) | |
396 | response = self.app.get( |
|
396 | response = self.app.get( | |
397 | route_path('repo_nodetree_full', |
|
397 | route_path('repo_nodetree_full', | |
398 | repo_name=backend.repo_name, |
|
398 | repo_name=backend.repo_name, | |
399 | commit_id=commit.raw_id, f_path='README.rst'), |
|
399 | commit_id=commit.raw_id, f_path='README.rst'), | |
400 | extra_environ=xhr_header) |
|
400 | extra_environ=xhr_header) | |
401 | assert response.body == '' |
|
401 | assert response.body == '' | |
402 |
|
402 | |||
403 | def test_nodetree_wrong_path(self, backend, xhr_header): |
|
403 | def test_nodetree_wrong_path(self, backend, xhr_header): | |
404 | commit = backend.repo.get_commit(commit_idx=173) |
|
404 | commit = backend.repo.get_commit(commit_idx=173) | |
405 | response = self.app.get( |
|
405 | response = self.app.get( | |
406 | route_path('repo_nodetree_full', |
|
406 | route_path('repo_nodetree_full', | |
407 | repo_name=backend.repo_name, |
|
407 | repo_name=backend.repo_name, | |
408 | commit_id=commit.raw_id, f_path='/dont-exist'), |
|
408 | commit_id=commit.raw_id, f_path='/dont-exist'), | |
409 | extra_environ=xhr_header) |
|
409 | extra_environ=xhr_header) | |
410 |
|
410 | |||
411 | err = 'error: There is no file nor ' \ |
|
411 | err = 'error: There is no file nor ' \ | |
412 | 'directory at the given path' |
|
412 | 'directory at the given path' | |
413 | assert err in response.body |
|
413 | assert err in response.body | |
414 |
|
414 | |||
415 | def test_nodetree_missing_xhr(self, backend): |
|
415 | def test_nodetree_missing_xhr(self, backend): | |
416 | self.app.get( |
|
416 | self.app.get( | |
417 | route_path('repo_nodetree_full', |
|
417 | route_path('repo_nodetree_full', | |
418 | repo_name=backend.repo_name, |
|
418 | repo_name=backend.repo_name, | |
419 | commit_id='tip', f_path='/'), |
|
419 | commit_id='tip', f_path='/'), | |
420 | status=404) |
|
420 | status=404) | |
421 |
|
421 | |||
422 |
|
422 | |||
423 | @pytest.mark.usefixtures("app", "autologin_user") |
|
423 | @pytest.mark.usefixtures("app", "autologin_user") | |
424 | class TestRawFileHandling(object): |
|
424 | class TestRawFileHandling(object): | |
425 |
|
425 | |||
426 | def test_download_file(self, backend): |
|
426 | def test_download_file(self, backend): | |
427 | commit = backend.repo.get_commit(commit_idx=173) |
|
427 | commit = backend.repo.get_commit(commit_idx=173) | |
428 | response = self.app.get( |
|
428 | response = self.app.get( | |
429 | route_path('repo_file_download', |
|
429 | route_path('repo_file_download', | |
430 | repo_name=backend.repo_name, |
|
430 | repo_name=backend.repo_name, | |
431 | commit_id=commit.raw_id, f_path='vcs/nodes.py'),) |
|
431 | commit_id=commit.raw_id, f_path='vcs/nodes.py'),) | |
432 |
|
432 | |||
433 | assert response.content_disposition == 'attachment; filename="nodes.py"; filename*=UTF-8\'\'nodes.py' |
|
433 | assert response.content_disposition == 'attachment; filename="nodes.py"; filename*=UTF-8\'\'nodes.py' | |
434 | assert response.content_type == "text/x-python" |
|
434 | assert response.content_type == "text/x-python" | |
435 |
|
435 | |||
436 | def test_download_file_wrong_cs(self, backend): |
|
436 | def test_download_file_wrong_cs(self, backend): | |
437 | raw_id = u'ERRORce30c96924232dffcd24178a07ffeb5dfc' |
|
437 | raw_id = u'ERRORce30c96924232dffcd24178a07ffeb5dfc' | |
438 |
|
438 | |||
439 | response = self.app.get( |
|
439 | response = self.app.get( | |
440 | route_path('repo_file_download', |
|
440 | route_path('repo_file_download', | |
441 | repo_name=backend.repo_name, |
|
441 | repo_name=backend.repo_name, | |
442 | commit_id=raw_id, f_path='vcs/nodes.svg'), |
|
442 | commit_id=raw_id, f_path='vcs/nodes.svg'), | |
443 | status=404) |
|
443 | status=404) | |
444 |
|
444 | |||
445 | msg = """No such commit exists for this repository""" |
|
445 | msg = """No such commit exists for this repository""" | |
446 | response.mustcontain(msg) |
|
446 | response.mustcontain(msg) | |
447 |
|
447 | |||
448 | def test_download_file_wrong_f_path(self, backend): |
|
448 | def test_download_file_wrong_f_path(self, backend): | |
449 | commit = backend.repo.get_commit(commit_idx=173) |
|
449 | commit = backend.repo.get_commit(commit_idx=173) | |
450 | f_path = 'vcs/ERRORnodes.py' |
|
450 | f_path = 'vcs/ERRORnodes.py' | |
451 |
|
451 | |||
452 | response = self.app.get( |
|
452 | response = self.app.get( | |
453 | route_path('repo_file_download', |
|
453 | route_path('repo_file_download', | |
454 | repo_name=backend.repo_name, |
|
454 | repo_name=backend.repo_name, | |
455 | commit_id=commit.raw_id, f_path=f_path), |
|
455 | commit_id=commit.raw_id, f_path=f_path), | |
456 | status=404) |
|
456 | status=404) | |
457 |
|
457 | |||
458 | msg = ( |
|
458 | msg = ( | |
459 | "There is no file nor directory at the given path: " |
|
459 | "There is no file nor directory at the given path: " | |
460 | "`%s` at commit %s" % (f_path, commit.short_id)) |
|
460 | "`%s` at commit %s" % (f_path, commit.short_id)) | |
461 | response.mustcontain(msg) |
|
461 | response.mustcontain(msg) | |
462 |
|
462 | |||
463 | def test_file_raw(self, backend): |
|
463 | def test_file_raw(self, backend): | |
464 | commit = backend.repo.get_commit(commit_idx=173) |
|
464 | commit = backend.repo.get_commit(commit_idx=173) | |
465 | response = self.app.get( |
|
465 | response = self.app.get( | |
466 | route_path('repo_file_raw', |
|
466 | route_path('repo_file_raw', | |
467 | repo_name=backend.repo_name, |
|
467 | repo_name=backend.repo_name, | |
468 | commit_id=commit.raw_id, f_path='vcs/nodes.py'),) |
|
468 | commit_id=commit.raw_id, f_path='vcs/nodes.py'),) | |
469 |
|
469 | |||
470 | assert response.content_type == "text/plain" |
|
470 | assert response.content_type == "text/plain" | |
471 |
|
471 | |||
472 | def test_file_raw_binary(self, backend): |
|
472 | def test_file_raw_binary(self, backend): | |
473 | commit = backend.repo.get_commit() |
|
473 | commit = backend.repo.get_commit() | |
474 | response = self.app.get( |
|
474 | response = self.app.get( | |
475 | route_path('repo_file_raw', |
|
475 | route_path('repo_file_raw', | |
476 | repo_name=backend.repo_name, |
|
476 | repo_name=backend.repo_name, | |
477 | commit_id=commit.raw_id, |
|
477 | commit_id=commit.raw_id, | |
478 | f_path='docs/theme/ADC/static/breadcrumb_background.png'),) |
|
478 | f_path='docs/theme/ADC/static/breadcrumb_background.png'),) | |
479 |
|
479 | |||
480 | assert response.content_disposition == 'inline' |
|
480 | assert response.content_disposition == 'inline' | |
481 |
|
481 | |||
482 | def test_raw_file_wrong_cs(self, backend): |
|
482 | def test_raw_file_wrong_cs(self, backend): | |
483 | raw_id = u'ERRORcce30c96924232dffcd24178a07ffeb5dfc' |
|
483 | raw_id = u'ERRORcce30c96924232dffcd24178a07ffeb5dfc' | |
484 |
|
484 | |||
485 | response = self.app.get( |
|
485 | response = self.app.get( | |
486 | route_path('repo_file_raw', |
|
486 | route_path('repo_file_raw', | |
487 | repo_name=backend.repo_name, |
|
487 | repo_name=backend.repo_name, | |
488 | commit_id=raw_id, f_path='vcs/nodes.svg'), |
|
488 | commit_id=raw_id, f_path='vcs/nodes.svg'), | |
489 | status=404) |
|
489 | status=404) | |
490 |
|
490 | |||
491 | msg = """No such commit exists for this repository""" |
|
491 | msg = """No such commit exists for this repository""" | |
492 | response.mustcontain(msg) |
|
492 | response.mustcontain(msg) | |
493 |
|
493 | |||
494 | def test_raw_wrong_f_path(self, backend): |
|
494 | def test_raw_wrong_f_path(self, backend): | |
495 | commit = backend.repo.get_commit(commit_idx=173) |
|
495 | commit = backend.repo.get_commit(commit_idx=173) | |
496 | f_path = 'vcs/ERRORnodes.py' |
|
496 | f_path = 'vcs/ERRORnodes.py' | |
497 | response = self.app.get( |
|
497 | response = self.app.get( | |
498 | route_path('repo_file_raw', |
|
498 | route_path('repo_file_raw', | |
499 | repo_name=backend.repo_name, |
|
499 | repo_name=backend.repo_name, | |
500 | commit_id=commit.raw_id, f_path=f_path), |
|
500 | commit_id=commit.raw_id, f_path=f_path), | |
501 | status=404) |
|
501 | status=404) | |
502 |
|
502 | |||
503 | msg = ( |
|
503 | msg = ( | |
504 | "There is no file nor directory at the given path: " |
|
504 | "There is no file nor directory at the given path: " | |
505 | "`%s` at commit %s" % (f_path, commit.short_id)) |
|
505 | "`%s` at commit %s" % (f_path, commit.short_id)) | |
506 | response.mustcontain(msg) |
|
506 | response.mustcontain(msg) | |
507 |
|
507 | |||
508 | def test_raw_svg_should_not_be_rendered(self, backend): |
|
508 | def test_raw_svg_should_not_be_rendered(self, backend): | |
509 | backend.create_repo() |
|
509 | backend.create_repo() | |
510 | backend.ensure_file("xss.svg") |
|
510 | backend.ensure_file("xss.svg") | |
511 | response = self.app.get( |
|
511 | response = self.app.get( | |
512 | route_path('repo_file_raw', |
|
512 | route_path('repo_file_raw', | |
513 | repo_name=backend.repo_name, |
|
513 | repo_name=backend.repo_name, | |
514 | commit_id='tip', f_path='xss.svg'),) |
|
514 | commit_id='tip', f_path='xss.svg'),) | |
515 | # If the content type is image/svg+xml then it allows to render HTML |
|
515 | # If the content type is image/svg+xml then it allows to render HTML | |
516 | # and malicious SVG. |
|
516 | # and malicious SVG. | |
517 | assert response.content_type == "text/plain" |
|
517 | assert response.content_type == "text/plain" | |
518 |
|
518 | |||
519 |
|
519 | |||
520 | @pytest.mark.usefixtures("app") |
|
520 | @pytest.mark.usefixtures("app") | |
521 | class TestRepositoryArchival(object): |
|
521 | class TestRepositoryArchival(object): | |
522 |
|
522 | |||
523 | def test_archival(self, backend): |
|
523 | def test_archival(self, backend): | |
524 | backend.enable_downloads() |
|
524 | backend.enable_downloads() | |
525 | commit = backend.repo.get_commit(commit_idx=173) |
|
525 | commit = backend.repo.get_commit(commit_idx=173) | |
526 | for a_type, content_type, extension in settings.ARCHIVE_SPECS: |
|
526 | for a_type, content_type, extension in settings.ARCHIVE_SPECS: | |
527 |
|
527 | |||
528 | short = commit.short_id + extension |
|
528 | short = commit.short_id + extension | |
529 | fname = commit.raw_id + extension |
|
529 | fname = commit.raw_id + extension | |
530 | filename = '%s-%s' % (backend.repo_name, short) |
|
530 | filename = '%s-%s' % (backend.repo_name, short) | |
531 | response = self.app.get( |
|
531 | response = self.app.get( | |
532 | route_path('repo_archivefile', |
|
532 | route_path('repo_archivefile', | |
533 | repo_name=backend.repo_name, |
|
533 | repo_name=backend.repo_name, | |
534 | fname=fname)) |
|
534 | fname=fname)) | |
535 |
|
535 | |||
536 | assert response.status == '200 OK' |
|
536 | assert response.status == '200 OK' | |
537 | headers = [ |
|
537 | headers = [ | |
538 | ('Content-Disposition', 'attachment; filename=%s' % filename), |
|
538 | ('Content-Disposition', 'attachment; filename=%s' % filename), | |
539 | ('Content-Type', '%s' % content_type), |
|
539 | ('Content-Type', '%s' % content_type), | |
540 | ] |
|
540 | ] | |
541 |
|
541 | |||
542 | for header in headers: |
|
542 | for header in headers: | |
543 | assert header in response.headers.items() |
|
543 | assert header in response.headers.items() | |
544 |
|
544 | |||
545 | def test_archival_no_hash(self, backend): |
|
545 | def test_archival_no_hash(self, backend): | |
546 | backend.enable_downloads() |
|
546 | backend.enable_downloads() | |
547 | commit = backend.repo.get_commit(commit_idx=173) |
|
547 | commit = backend.repo.get_commit(commit_idx=173) | |
548 | for a_type, content_type, extension in settings.ARCHIVE_SPECS: |
|
548 | for a_type, content_type, extension in settings.ARCHIVE_SPECS: | |
549 |
|
549 | |||
550 | short = 'plain' + extension |
|
550 | short = 'plain' + extension | |
551 | fname = commit.raw_id + extension |
|
551 | fname = commit.raw_id + extension | |
552 | filename = '%s-%s' % (backend.repo_name, short) |
|
552 | filename = '%s-%s' % (backend.repo_name, short) | |
553 | response = self.app.get( |
|
553 | response = self.app.get( | |
554 | route_path('repo_archivefile', |
|
554 | route_path('repo_archivefile', | |
555 | repo_name=backend.repo_name, |
|
555 | repo_name=backend.repo_name, | |
556 | fname=fname, params={'with_hash': 0})) |
|
556 | fname=fname, params={'with_hash': 0})) | |
557 |
|
557 | |||
558 | assert response.status == '200 OK' |
|
558 | assert response.status == '200 OK' | |
559 | headers = [ |
|
559 | headers = [ | |
560 | ('Content-Disposition', 'attachment; filename=%s' % filename), |
|
560 | ('Content-Disposition', 'attachment; filename=%s' % filename), | |
561 | ('Content-Type', '%s' % content_type), |
|
561 | ('Content-Type', '%s' % content_type), | |
562 | ] |
|
562 | ] | |
563 |
|
563 | |||
564 | for header in headers: |
|
564 | for header in headers: | |
565 | assert header in response.headers.items() |
|
565 | assert header in response.headers.items() | |
566 |
|
566 | |||
567 | @pytest.mark.parametrize('arch_ext',[ |
|
567 | @pytest.mark.parametrize('arch_ext',[ | |
568 | 'tar', 'rar', 'x', '..ax', '.zipz', 'tar.gz.tar']) |
|
568 | 'tar', 'rar', 'x', '..ax', '.zipz', 'tar.gz.tar']) | |
569 | def test_archival_wrong_ext(self, backend, arch_ext): |
|
569 | def test_archival_wrong_ext(self, backend, arch_ext): | |
570 | backend.enable_downloads() |
|
570 | backend.enable_downloads() | |
571 | commit = backend.repo.get_commit(commit_idx=173) |
|
571 | commit = backend.repo.get_commit(commit_idx=173) | |
572 |
|
572 | |||
573 | fname = commit.raw_id + '.' + arch_ext |
|
573 | fname = commit.raw_id + '.' + arch_ext | |
574 |
|
574 | |||
575 | response = self.app.get( |
|
575 | response = self.app.get( | |
576 | route_path('repo_archivefile', |
|
576 | route_path('repo_archivefile', | |
577 | repo_name=backend.repo_name, |
|
577 | repo_name=backend.repo_name, | |
578 | fname=fname)) |
|
578 | fname=fname)) | |
579 | response.mustcontain( |
|
579 | response.mustcontain( | |
580 | 'Unknown archive type for: `{}`'.format(fname)) |
|
580 | 'Unknown archive type for: `{}`'.format(fname)) | |
581 |
|
581 | |||
582 | @pytest.mark.parametrize('commit_id', [ |
|
582 | @pytest.mark.parametrize('commit_id', [ | |
583 | '00x000000', 'tar', 'wrong', '@$@$42413232', '232dffcd']) |
|
583 | '00x000000', 'tar', 'wrong', '@$@$42413232', '232dffcd']) | |
584 | def test_archival_wrong_commit_id(self, backend, commit_id): |
|
584 | def test_archival_wrong_commit_id(self, backend, commit_id): | |
585 | backend.enable_downloads() |
|
585 | backend.enable_downloads() | |
586 | fname = '%s.zip' % commit_id |
|
586 | fname = '%s.zip' % commit_id | |
587 |
|
587 | |||
588 | response = self.app.get( |
|
588 | response = self.app.get( | |
589 | route_path('repo_archivefile', |
|
589 | route_path('repo_archivefile', | |
590 | repo_name=backend.repo_name, |
|
590 | repo_name=backend.repo_name, | |
591 | fname=fname)) |
|
591 | fname=fname)) | |
592 | response.mustcontain('Unknown commit_id') |
|
592 | response.mustcontain('Unknown commit_id') | |
593 |
|
593 | |||
594 |
|
594 | |||
595 | @pytest.mark.usefixtures("app") |
|
595 | @pytest.mark.usefixtures("app") | |
596 | class TestFilesDiff(object): |
|
596 | class TestFilesDiff(object): | |
597 |
|
597 | |||
598 | @pytest.mark.parametrize("diff", ['diff', 'download', 'raw']) |
|
598 | @pytest.mark.parametrize("diff", ['diff', 'download', 'raw']) | |
599 | def test_file_full_diff(self, backend, diff): |
|
599 | def test_file_full_diff(self, backend, diff): | |
600 | commit1 = backend.repo.get_commit(commit_idx=-1) |
|
600 | commit1 = backend.repo.get_commit(commit_idx=-1) | |
601 | commit2 = backend.repo.get_commit(commit_idx=-2) |
|
601 | commit2 = backend.repo.get_commit(commit_idx=-2) | |
602 |
|
602 | |||
603 | response = self.app.get( |
|
603 | response = self.app.get( | |
604 | route_path('repo_files_diff', |
|
604 | route_path('repo_files_diff', | |
605 | repo_name=backend.repo_name, |
|
605 | repo_name=backend.repo_name, | |
606 | f_path='README'), |
|
606 | f_path='README'), | |
607 | params={ |
|
607 | params={ | |
608 | 'diff1': commit2.raw_id, |
|
608 | 'diff1': commit2.raw_id, | |
609 | 'diff2': commit1.raw_id, |
|
609 | 'diff2': commit1.raw_id, | |
610 | 'fulldiff': '1', |
|
610 | 'fulldiff': '1', | |
611 | 'diff': diff, |
|
611 | 'diff': diff, | |
612 | }) |
|
612 | }) | |
613 |
|
613 | |||
614 | if diff == 'diff': |
|
614 | if diff == 'diff': | |
615 | # use redirect since this is OLD view redirecting to compare page |
|
615 | # use redirect since this is OLD view redirecting to compare page | |
616 | response = response.follow() |
|
616 | response = response.follow() | |
617 |
|
617 | |||
618 | # It's a symlink to README.rst |
|
618 | # It's a symlink to README.rst | |
619 | response.mustcontain('README.rst') |
|
619 | response.mustcontain('README.rst') | |
620 | response.mustcontain('No newline at end of file') |
|
620 | response.mustcontain('No newline at end of file') | |
621 |
|
621 | |||
622 | def test_file_binary_diff(self, backend): |
|
622 | def test_file_binary_diff(self, backend): | |
623 | commits = [ |
|
623 | commits = [ | |
624 | {'message': 'First commit'}, |
|
624 | {'message': 'First commit'}, | |
625 | {'message': 'Commit with binary', |
|
625 | {'message': 'Commit with binary', | |
626 | 'added': [nodes.FileNode('file.bin', content='\0BINARY\0')]}, |
|
626 | 'added': [nodes.FileNode('file.bin', content='\0BINARY\0')]}, | |
627 | ] |
|
627 | ] | |
628 | repo = backend.create_repo(commits=commits) |
|
628 | repo = backend.create_repo(commits=commits) | |
629 |
|
629 | |||
630 | response = self.app.get( |
|
630 | response = self.app.get( | |
631 | route_path('repo_files_diff', |
|
631 | route_path('repo_files_diff', | |
632 | repo_name=backend.repo_name, |
|
632 | repo_name=backend.repo_name, | |
633 | f_path='file.bin'), |
|
633 | f_path='file.bin'), | |
634 | params={ |
|
634 | params={ | |
635 | 'diff1': repo.get_commit(commit_idx=0).raw_id, |
|
635 | 'diff1': repo.get_commit(commit_idx=0).raw_id, | |
636 | 'diff2': repo.get_commit(commit_idx=1).raw_id, |
|
636 | 'diff2': repo.get_commit(commit_idx=1).raw_id, | |
637 | 'fulldiff': '1', |
|
637 | 'fulldiff': '1', | |
638 | 'diff': 'diff', |
|
638 | 'diff': 'diff', | |
639 | }) |
|
639 | }) | |
640 | # use redirect since this is OLD view redirecting to compare page |
|
640 | # use redirect since this is OLD view redirecting to compare page | |
641 | response = response.follow() |
|
641 | response = response.follow() | |
642 | response.mustcontain('Collapse 1 commit') |
|
642 | response.mustcontain('Collapse 1 commit') | |
643 | file_changes = (1, 0, 0) |
|
643 | file_changes = (1, 0, 0) | |
644 |
|
644 | |||
645 | compare_page = ComparePage(response) |
|
645 | compare_page = ComparePage(response) | |
646 | compare_page.contains_change_summary(*file_changes) |
|
646 | compare_page.contains_change_summary(*file_changes) | |
647 |
|
647 | |||
648 | if backend.alias == 'svn': |
|
648 | if backend.alias == 'svn': | |
649 | response.mustcontain('new file 10644') |
|
649 | response.mustcontain('new file 10644') | |
650 | # TODO(marcink): SVN doesn't yet detect binary changes |
|
650 | # TODO(marcink): SVN doesn't yet detect binary changes | |
651 | else: |
|
651 | else: | |
652 | response.mustcontain('new file 100644') |
|
652 | response.mustcontain('new file 100644') | |
653 | response.mustcontain('binary diff hidden') |
|
653 | response.mustcontain('binary diff hidden') | |
654 |
|
654 | |||
655 | def test_diff_2way(self, backend): |
|
655 | def test_diff_2way(self, backend): | |
656 | commit1 = backend.repo.get_commit(commit_idx=-1) |
|
656 | commit1 = backend.repo.get_commit(commit_idx=-1) | |
657 | commit2 = backend.repo.get_commit(commit_idx=-2) |
|
657 | commit2 = backend.repo.get_commit(commit_idx=-2) | |
658 | response = self.app.get( |
|
658 | response = self.app.get( | |
659 | route_path('repo_files_diff_2way_redirect', |
|
659 | route_path('repo_files_diff_2way_redirect', | |
660 | repo_name=backend.repo_name, |
|
660 | repo_name=backend.repo_name, | |
661 | f_path='README'), |
|
661 | f_path='README'), | |
662 | params={ |
|
662 | params={ | |
663 | 'diff1': commit2.raw_id, |
|
663 | 'diff1': commit2.raw_id, | |
664 | 'diff2': commit1.raw_id, |
|
664 | 'diff2': commit1.raw_id, | |
665 | }) |
|
665 | }) | |
666 | # use redirect since this is OLD view redirecting to compare page |
|
666 | # use redirect since this is OLD view redirecting to compare page | |
667 | response = response.follow() |
|
667 | response = response.follow() | |
668 |
|
668 | |||
669 | # It's a symlink to README.rst |
|
669 | # It's a symlink to README.rst | |
670 | response.mustcontain('README.rst') |
|
670 | response.mustcontain('README.rst') | |
671 | response.mustcontain('No newline at end of file') |
|
671 | response.mustcontain('No newline at end of file') | |
672 |
|
672 | |||
673 | def test_requires_one_commit_id(self, backend, autologin_user): |
|
673 | def test_requires_one_commit_id(self, backend, autologin_user): | |
674 | response = self.app.get( |
|
674 | response = self.app.get( | |
675 | route_path('repo_files_diff', |
|
675 | route_path('repo_files_diff', | |
676 | repo_name=backend.repo_name, |
|
676 | repo_name=backend.repo_name, | |
677 | f_path='README.rst'), |
|
677 | f_path='README.rst'), | |
678 | status=400) |
|
678 | status=400) | |
679 | response.mustcontain( |
|
679 | response.mustcontain( | |
680 | 'Need query parameter', 'diff1', 'diff2', 'to generate a diff.') |
|
680 | 'Need query parameter', 'diff1', 'diff2', 'to generate a diff.') | |
681 |
|
681 | |||
682 | def test_returns_no_files_if_file_does_not_exist(self, vcsbackend): |
|
682 | def test_returns_no_files_if_file_does_not_exist(self, vcsbackend): | |
683 | repo = vcsbackend.repo |
|
683 | repo = vcsbackend.repo | |
684 | response = self.app.get( |
|
684 | response = self.app.get( | |
685 | route_path('repo_files_diff', |
|
685 | route_path('repo_files_diff', | |
686 | repo_name=repo.name, |
|
686 | repo_name=repo.name, | |
687 | f_path='does-not-exist-in-any-commit'), |
|
687 | f_path='does-not-exist-in-any-commit'), | |
688 | params={ |
|
688 | params={ | |
689 | 'diff1': repo[0].raw_id, |
|
689 | 'diff1': repo[0].raw_id, | |
690 | 'diff2': repo[1].raw_id |
|
690 | 'diff2': repo[1].raw_id | |
691 | }) |
|
691 | }) | |
692 |
|
692 | |||
693 | response = response.follow() |
|
693 | response = response.follow() | |
694 | response.mustcontain('No files') |
|
694 | response.mustcontain('No files') | |
695 |
|
695 | |||
696 | def test_returns_redirect_if_file_not_changed(self, backend): |
|
696 | def test_returns_redirect_if_file_not_changed(self, backend): | |
697 | commit = backend.repo.get_commit(commit_idx=-1) |
|
697 | commit = backend.repo.get_commit(commit_idx=-1) | |
698 | response = self.app.get( |
|
698 | response = self.app.get( | |
699 | route_path('repo_files_diff_2way_redirect', |
|
699 | route_path('repo_files_diff_2way_redirect', | |
700 | repo_name=backend.repo_name, |
|
700 | repo_name=backend.repo_name, | |
701 | f_path='README'), |
|
701 | f_path='README'), | |
702 | params={ |
|
702 | params={ | |
703 | 'diff1': commit.raw_id, |
|
703 | 'diff1': commit.raw_id, | |
704 | 'diff2': commit.raw_id, |
|
704 | 'diff2': commit.raw_id, | |
705 | }) |
|
705 | }) | |
706 |
|
706 | |||
707 | response = response.follow() |
|
707 | response = response.follow() | |
708 | response.mustcontain('No files') |
|
708 | response.mustcontain('No files') | |
709 | response.mustcontain('No commits in this compare') |
|
709 | response.mustcontain('No commits in this compare') | |
710 |
|
710 | |||
711 | def test_supports_diff_to_different_path_svn(self, backend_svn): |
|
711 | def test_supports_diff_to_different_path_svn(self, backend_svn): | |
712 | #TODO: check this case |
|
712 | #TODO: check this case | |
713 | return |
|
713 | return | |
714 |
|
714 | |||
715 | repo = backend_svn['svn-simple-layout'].scm_instance() |
|
715 | repo = backend_svn['svn-simple-layout'].scm_instance() | |
716 | commit_id_1 = '24' |
|
716 | commit_id_1 = '24' | |
717 | commit_id_2 = '26' |
|
717 | commit_id_2 = '26' | |
718 |
|
718 | |||
719 | response = self.app.get( |
|
719 | response = self.app.get( | |
720 | route_path('repo_files_diff', |
|
720 | route_path('repo_files_diff', | |
721 | repo_name=backend_svn.repo_name, |
|
721 | repo_name=backend_svn.repo_name, | |
722 | f_path='trunk/example.py'), |
|
722 | f_path='trunk/example.py'), | |
723 | params={ |
|
723 | params={ | |
724 | 'diff1': 'tags/v0.2/example.py@' + commit_id_1, |
|
724 | 'diff1': 'tags/v0.2/example.py@' + commit_id_1, | |
725 | 'diff2': commit_id_2, |
|
725 | 'diff2': commit_id_2, | |
726 | }) |
|
726 | }) | |
727 |
|
727 | |||
728 | response = response.follow() |
|
728 | response = response.follow() | |
729 | response.mustcontain( |
|
729 | response.mustcontain( | |
730 | # diff contains this |
|
730 | # diff contains this | |
731 | "Will print out a useful message on invocation.") |
|
731 | "Will print out a useful message on invocation.") | |
732 |
|
732 | |||
733 | # Note: Expecting that we indicate the user what's being compared |
|
733 | # Note: Expecting that we indicate the user what's being compared | |
734 | response.mustcontain("trunk/example.py") |
|
734 | response.mustcontain("trunk/example.py") | |
735 | response.mustcontain("tags/v0.2/example.py") |
|
735 | response.mustcontain("tags/v0.2/example.py") | |
736 |
|
736 | |||
737 | def test_show_rev_redirects_to_svn_path(self, backend_svn): |
|
737 | def test_show_rev_redirects_to_svn_path(self, backend_svn): | |
738 | #TODO: check this case |
|
738 | #TODO: check this case | |
739 | return |
|
739 | return | |
740 |
|
740 | |||
741 | repo = backend_svn['svn-simple-layout'].scm_instance() |
|
741 | repo = backend_svn['svn-simple-layout'].scm_instance() | |
742 | commit_id = repo[-1].raw_id |
|
742 | commit_id = repo[-1].raw_id | |
743 |
|
743 | |||
744 | response = self.app.get( |
|
744 | response = self.app.get( | |
745 | route_path('repo_files_diff', |
|
745 | route_path('repo_files_diff', | |
746 | repo_name=backend_svn.repo_name, |
|
746 | repo_name=backend_svn.repo_name, | |
747 | f_path='trunk/example.py'), |
|
747 | f_path='trunk/example.py'), | |
748 | params={ |
|
748 | params={ | |
749 | 'diff1': 'branches/argparse/example.py@' + commit_id, |
|
749 | 'diff1': 'branches/argparse/example.py@' + commit_id, | |
750 | 'diff2': commit_id, |
|
750 | 'diff2': commit_id, | |
751 | }, |
|
751 | }, | |
752 | status=302) |
|
752 | status=302) | |
753 | response = response.follow() |
|
753 | response = response.follow() | |
754 | assert response.headers['Location'].endswith( |
|
754 | assert response.headers['Location'].endswith( | |
755 | 'svn-svn-simple-layout/files/26/branches/argparse/example.py') |
|
755 | 'svn-svn-simple-layout/files/26/branches/argparse/example.py') | |
756 |
|
756 | |||
757 | def test_show_rev_and_annotate_redirects_to_svn_path(self, backend_svn): |
|
757 | def test_show_rev_and_annotate_redirects_to_svn_path(self, backend_svn): | |
758 | #TODO: check this case |
|
758 | #TODO: check this case | |
759 | return |
|
759 | return | |
760 |
|
760 | |||
761 | repo = backend_svn['svn-simple-layout'].scm_instance() |
|
761 | repo = backend_svn['svn-simple-layout'].scm_instance() | |
762 | commit_id = repo[-1].raw_id |
|
762 | commit_id = repo[-1].raw_id | |
763 | response = self.app.get( |
|
763 | response = self.app.get( | |
764 | route_path('repo_files_diff', |
|
764 | route_path('repo_files_diff', | |
765 | repo_name=backend_svn.repo_name, |
|
765 | repo_name=backend_svn.repo_name, | |
766 | f_path='trunk/example.py'), |
|
766 | f_path='trunk/example.py'), | |
767 | params={ |
|
767 | params={ | |
768 | 'diff1': 'branches/argparse/example.py@' + commit_id, |
|
768 | 'diff1': 'branches/argparse/example.py@' + commit_id, | |
769 | 'diff2': commit_id, |
|
769 | 'diff2': commit_id, | |
770 | 'show_rev': 'Show at Revision', |
|
770 | 'show_rev': 'Show at Revision', | |
771 | 'annotate': 'true', |
|
771 | 'annotate': 'true', | |
772 | }, |
|
772 | }, | |
773 | status=302) |
|
773 | status=302) | |
774 | response = response.follow() |
|
774 | response = response.follow() | |
775 | assert response.headers['Location'].endswith( |
|
775 | assert response.headers['Location'].endswith( | |
776 | 'svn-svn-simple-layout/annotate/26/branches/argparse/example.py') |
|
776 | 'svn-svn-simple-layout/annotate/26/branches/argparse/example.py') | |
777 |
|
777 | |||
778 |
|
778 | |||
779 | @pytest.mark.usefixtures("app", "autologin_user") |
|
779 | @pytest.mark.usefixtures("app", "autologin_user") | |
780 | class TestModifyFilesWithWebInterface(object): |
|
780 | class TestModifyFilesWithWebInterface(object): | |
781 |
|
781 | |||
782 | def test_add_file_view(self, backend): |
|
782 | def test_add_file_view(self, backend): | |
783 | self.app.get( |
|
783 | self.app.get( | |
784 | route_path('repo_files_add_file', |
|
784 | route_path('repo_files_add_file', | |
785 | repo_name=backend.repo_name, |
|
785 | repo_name=backend.repo_name, | |
786 | commit_id='tip', f_path='/') |
|
786 | commit_id='tip', f_path='/') | |
787 | ) |
|
787 | ) | |
788 |
|
788 | |||
789 | @pytest.mark.xfail_backends("svn", reason="Depends on online editing") |
|
789 | @pytest.mark.xfail_backends("svn", reason="Depends on online editing") | |
790 | def test_add_file_into_repo_missing_content(self, backend, csrf_token): |
|
790 | def test_add_file_into_repo_missing_content(self, backend, csrf_token): | |
791 | backend.create_repo() |
|
791 | backend.create_repo() | |
792 | filename = 'init.py' |
|
792 | filename = 'init.py' | |
793 | response = self.app.post( |
|
793 | response = self.app.post( | |
794 | route_path('repo_files_create_file', |
|
794 | route_path('repo_files_create_file', | |
795 | repo_name=backend.repo_name, |
|
795 | repo_name=backend.repo_name, | |
796 | commit_id='tip', f_path='/'), |
|
796 | commit_id='tip', f_path='/'), | |
797 | params={ |
|
797 | params={ | |
798 | 'content': "", |
|
798 | 'content': "", | |
799 | 'filename': filename, |
|
799 | 'filename': filename, | |
800 | 'csrf_token': csrf_token, |
|
800 | 'csrf_token': csrf_token, | |
801 | }, |
|
801 | }, | |
802 | status=302) |
|
802 | status=302) | |
803 | expected_msg = 'Successfully committed new file `{}`'.format(os.path.join(filename)) |
|
803 | expected_msg = 'Successfully committed new file `{}`'.format(os.path.join(filename)) | |
804 | assert_session_flash(response, expected_msg) |
|
804 | assert_session_flash(response, expected_msg) | |
805 |
|
805 | |||
806 | def test_add_file_into_repo_missing_filename(self, backend, csrf_token): |
|
806 | def test_add_file_into_repo_missing_filename(self, backend, csrf_token): | |
807 | commit_id = backend.repo.get_commit().raw_id |
|
807 | commit_id = backend.repo.get_commit().raw_id | |
808 | response = self.app.post( |
|
808 | response = self.app.post( | |
809 | route_path('repo_files_create_file', |
|
809 | route_path('repo_files_create_file', | |
810 | repo_name=backend.repo_name, |
|
810 | repo_name=backend.repo_name, | |
811 | commit_id=commit_id, f_path='/'), |
|
811 | commit_id=commit_id, f_path='/'), | |
812 | params={ |
|
812 | params={ | |
813 | 'content': "foo", |
|
813 | 'content': "foo", | |
814 | 'csrf_token': csrf_token, |
|
814 | 'csrf_token': csrf_token, | |
815 | }, |
|
815 | }, | |
816 | status=302) |
|
816 | status=302) | |
817 |
|
817 | |||
818 | assert_session_flash(response, 'No filename specified') |
|
818 | assert_session_flash(response, 'No filename specified') | |
819 |
|
819 | |||
820 | def test_add_file_into_repo_errors_and_no_commits( |
|
820 | def test_add_file_into_repo_errors_and_no_commits( | |
821 | self, backend, csrf_token): |
|
821 | self, backend, csrf_token): | |
822 | repo = backend.create_repo() |
|
822 | repo = backend.create_repo() | |
823 | # Create a file with no filename, it will display an error but |
|
823 | # Create a file with no filename, it will display an error but | |
824 | # the repo has no commits yet |
|
824 | # the repo has no commits yet | |
825 | response = self.app.post( |
|
825 | response = self.app.post( | |
826 | route_path('repo_files_create_file', |
|
826 | route_path('repo_files_create_file', | |
827 | repo_name=repo.repo_name, |
|
827 | repo_name=repo.repo_name, | |
828 | commit_id='tip', f_path='/'), |
|
828 | commit_id='tip', f_path='/'), | |
829 | params={ |
|
829 | params={ | |
830 | 'content': "foo", |
|
830 | 'content': "foo", | |
831 | 'csrf_token': csrf_token, |
|
831 | 'csrf_token': csrf_token, | |
832 | }, |
|
832 | }, | |
833 | status=302) |
|
833 | status=302) | |
834 |
|
834 | |||
835 | assert_session_flash(response, 'No filename specified') |
|
835 | assert_session_flash(response, 'No filename specified') | |
836 |
|
836 | |||
837 | # Not allowed, redirect to the summary |
|
837 | # Not allowed, redirect to the summary | |
838 | redirected = response.follow() |
|
838 | redirected = response.follow() | |
839 | summary_url = h.route_path('repo_summary', repo_name=repo.repo_name) |
|
839 | summary_url = h.route_path('repo_summary', repo_name=repo.repo_name) | |
840 |
|
840 | |||
841 | # As there are no commits, displays the summary page with the error of |
|
841 | # As there are no commits, displays the summary page with the error of | |
842 | # creating a file with no filename |
|
842 | # creating a file with no filename | |
843 |
|
843 | |||
844 | assert redirected.request.path == summary_url |
|
844 | assert redirected.request.path == summary_url | |
845 |
|
845 | |||
846 | @pytest.mark.parametrize("filename, clean_filename", [ |
|
846 | @pytest.mark.parametrize("filename, clean_filename", [ | |
847 | ('/abs/foo', 'abs/foo'), |
|
847 | ('/abs/foo', 'abs/foo'), | |
848 | ('../rel/foo', 'rel/foo'), |
|
848 | ('../rel/foo', 'rel/foo'), | |
849 | ('file/../foo/foo', 'file/foo/foo'), |
|
849 | ('file/../foo/foo', 'file/foo/foo'), | |
850 | ]) |
|
850 | ]) | |
851 | def test_add_file_into_repo_bad_filenames(self, filename, clean_filename, backend, csrf_token): |
|
851 | def test_add_file_into_repo_bad_filenames(self, filename, clean_filename, backend, csrf_token): | |
852 | repo = backend.create_repo() |
|
852 | repo = backend.create_repo() | |
853 | commit_id = repo.get_commit().raw_id |
|
853 | commit_id = repo.get_commit().raw_id | |
854 |
|
854 | |||
855 | response = self.app.post( |
|
855 | response = self.app.post( | |
856 | route_path('repo_files_create_file', |
|
856 | route_path('repo_files_create_file', | |
857 | repo_name=repo.repo_name, |
|
857 | repo_name=repo.repo_name, | |
858 | commit_id=commit_id, f_path='/'), |
|
858 | commit_id=commit_id, f_path='/'), | |
859 | params={ |
|
859 | params={ | |
860 | 'content': "foo", |
|
860 | 'content': "foo", | |
861 | 'filename': filename, |
|
861 | 'filename': filename, | |
862 | 'csrf_token': csrf_token, |
|
862 | 'csrf_token': csrf_token, | |
863 | }, |
|
863 | }, | |
864 | status=302) |
|
864 | status=302) | |
865 |
|
865 | |||
866 | expected_msg = 'Successfully committed new file `{}`'.format(clean_filename) |
|
866 | expected_msg = 'Successfully committed new file `{}`'.format(clean_filename) | |
867 | assert_session_flash(response, expected_msg) |
|
867 | assert_session_flash(response, expected_msg) | |
868 |
|
868 | |||
869 | @pytest.mark.parametrize("cnt, filename, content", [ |
|
869 | @pytest.mark.parametrize("cnt, filename, content", [ | |
870 | (1, 'foo.txt', "Content"), |
|
870 | (1, 'foo.txt', "Content"), | |
871 | (2, 'dir/foo.rst', "Content"), |
|
871 | (2, 'dir/foo.rst', "Content"), | |
872 | (3, 'dir/foo-second.rst', "Content"), |
|
872 | (3, 'dir/foo-second.rst', "Content"), | |
873 | (4, 'rel/dir/foo.bar', "Content"), |
|
873 | (4, 'rel/dir/foo.bar', "Content"), | |
874 | ]) |
|
874 | ]) | |
875 | def test_add_file_into_empty_repo(self, cnt, filename, content, backend, csrf_token): |
|
875 | def test_add_file_into_empty_repo(self, cnt, filename, content, backend, csrf_token): | |
876 | repo = backend.create_repo() |
|
876 | repo = backend.create_repo() | |
877 | commit_id = repo.get_commit().raw_id |
|
877 | commit_id = repo.get_commit().raw_id | |
878 | response = self.app.post( |
|
878 | response = self.app.post( | |
879 | route_path('repo_files_create_file', |
|
879 | route_path('repo_files_create_file', | |
880 | repo_name=repo.repo_name, |
|
880 | repo_name=repo.repo_name, | |
881 | commit_id=commit_id, f_path='/'), |
|
881 | commit_id=commit_id, f_path='/'), | |
882 | params={ |
|
882 | params={ | |
883 | 'content': content, |
|
883 | 'content': content, | |
884 | 'filename': filename, |
|
884 | 'filename': filename, | |
885 | 'csrf_token': csrf_token, |
|
885 | 'csrf_token': csrf_token, | |
886 | }, |
|
886 | }, | |
887 | status=302) |
|
887 | status=302) | |
888 |
|
888 | |||
889 | expected_msg = 'Successfully committed new file `{}`'.format(filename) |
|
889 | expected_msg = 'Successfully committed new file `{}`'.format(filename) | |
890 | assert_session_flash(response, expected_msg) |
|
890 | assert_session_flash(response, expected_msg) | |
891 |
|
891 | |||
892 | def test_edit_file_view(self, backend): |
|
892 | def test_edit_file_view(self, backend): | |
893 | response = self.app.get( |
|
893 | response = self.app.get( | |
894 | route_path('repo_files_edit_file', |
|
894 | route_path('repo_files_edit_file', | |
895 | repo_name=backend.repo_name, |
|
895 | repo_name=backend.repo_name, | |
896 | commit_id=backend.default_head_id, |
|
896 | commit_id=backend.default_head_id, | |
897 | f_path='vcs/nodes.py'), |
|
897 | f_path='vcs/nodes.py'), | |
898 | status=200) |
|
898 | status=200) | |
899 | response.mustcontain("Module holding everything related to vcs nodes.") |
|
899 | response.mustcontain("Module holding everything related to vcs nodes.") | |
900 |
|
900 | |||
901 | def test_edit_file_view_not_on_branch(self, backend): |
|
901 | def test_edit_file_view_not_on_branch(self, backend): | |
902 | repo = backend.create_repo() |
|
902 | repo = backend.create_repo() | |
903 | backend.ensure_file("vcs/nodes.py") |
|
903 | backend.ensure_file("vcs/nodes.py") | |
904 |
|
904 | |||
905 | response = self.app.get( |
|
905 | response = self.app.get( | |
906 | route_path('repo_files_edit_file', |
|
906 | route_path('repo_files_edit_file', | |
907 | repo_name=repo.repo_name, |
|
907 | repo_name=repo.repo_name, | |
908 | commit_id='tip', |
|
908 | commit_id='tip', | |
909 | f_path='vcs/nodes.py'), |
|
909 | f_path='vcs/nodes.py'), | |
910 | status=302) |
|
910 | status=302) | |
911 | assert_session_flash( |
|
911 | assert_session_flash( | |
912 | response, 'Cannot modify file. Given commit `tip` is not head of a branch.') |
|
912 | response, 'Cannot modify file. Given commit `tip` is not head of a branch.') | |
913 |
|
913 | |||
914 | def test_edit_file_view_commit_changes(self, backend, csrf_token): |
|
914 | def test_edit_file_view_commit_changes(self, backend, csrf_token): | |
915 | repo = backend.create_repo() |
|
915 | repo = backend.create_repo() | |
916 | backend.ensure_file("vcs/nodes.py", content="print 'hello'") |
|
916 | backend.ensure_file("vcs/nodes.py", content="print 'hello'") | |
917 |
|
917 | |||
918 | response = self.app.post( |
|
918 | response = self.app.post( | |
919 | route_path('repo_files_update_file', |
|
919 | route_path('repo_files_update_file', | |
920 | repo_name=repo.repo_name, |
|
920 | repo_name=repo.repo_name, | |
921 | commit_id=backend.default_head_id, |
|
921 | commit_id=backend.default_head_id, | |
922 | f_path='vcs/nodes.py'), |
|
922 | f_path='vcs/nodes.py'), | |
923 | params={ |
|
923 | params={ | |
924 | 'content': "print 'hello world'", |
|
924 | 'content': "print 'hello world'", | |
925 | 'message': 'I committed', |
|
925 | 'message': 'I committed', | |
926 | 'filename': "vcs/nodes.py", |
|
926 | 'filename': "vcs/nodes.py", | |
927 | 'csrf_token': csrf_token, |
|
927 | 'csrf_token': csrf_token, | |
928 | }, |
|
928 | }, | |
929 | status=302) |
|
929 | status=302) | |
930 | assert_session_flash( |
|
930 | assert_session_flash( | |
931 | response, 'Successfully committed changes to file `vcs/nodes.py`') |
|
931 | response, 'Successfully committed changes to file `vcs/nodes.py`') | |
932 | tip = repo.get_commit(commit_idx=-1) |
|
932 | tip = repo.get_commit(commit_idx=-1) | |
933 | assert tip.message == 'I committed' |
|
933 | assert tip.message == 'I committed' | |
934 |
|
934 | |||
935 | def test_edit_file_view_commit_changes_default_message(self, backend, |
|
935 | def test_edit_file_view_commit_changes_default_message(self, backend, | |
936 | csrf_token): |
|
936 | csrf_token): | |
937 | repo = backend.create_repo() |
|
937 | repo = backend.create_repo() | |
938 | backend.ensure_file("vcs/nodes.py", content="print 'hello'") |
|
938 | backend.ensure_file("vcs/nodes.py", content="print 'hello'") | |
939 |
|
939 | |||
940 | commit_id = ( |
|
940 | commit_id = ( | |
941 | backend.default_branch_name or |
|
941 | backend.default_branch_name or | |
942 | backend.repo.scm_instance().commit_ids[-1]) |
|
942 | backend.repo.scm_instance().commit_ids[-1]) | |
943 |
|
943 | |||
944 | response = self.app.post( |
|
944 | response = self.app.post( | |
945 | route_path('repo_files_update_file', |
|
945 | route_path('repo_files_update_file', | |
946 | repo_name=repo.repo_name, |
|
946 | repo_name=repo.repo_name, | |
947 | commit_id=commit_id, |
|
947 | commit_id=commit_id, | |
948 | f_path='vcs/nodes.py'), |
|
948 | f_path='vcs/nodes.py'), | |
949 | params={ |
|
949 | params={ | |
950 | 'content': "print 'hello world'", |
|
950 | 'content': "print 'hello world'", | |
951 | 'message': '', |
|
951 | 'message': '', | |
952 | 'filename': "vcs/nodes.py", |
|
952 | 'filename': "vcs/nodes.py", | |
953 | 'csrf_token': csrf_token, |
|
953 | 'csrf_token': csrf_token, | |
954 | }, |
|
954 | }, | |
955 | status=302) |
|
955 | status=302) | |
956 | assert_session_flash( |
|
956 | assert_session_flash( | |
957 | response, 'Successfully committed changes to file `vcs/nodes.py`') |
|
957 | response, 'Successfully committed changes to file `vcs/nodes.py`') | |
958 | tip = repo.get_commit(commit_idx=-1) |
|
958 | tip = repo.get_commit(commit_idx=-1) | |
959 | assert tip.message == 'Edited file vcs/nodes.py via RhodeCode Enterprise' |
|
959 | assert tip.message == 'Edited file vcs/nodes.py via RhodeCode Enterprise' | |
960 |
|
960 | |||
961 | def test_delete_file_view(self, backend): |
|
961 | def test_delete_file_view(self, backend): | |
962 | self.app.get( |
|
962 | self.app.get( | |
963 | route_path('repo_files_remove_file', |
|
963 | route_path('repo_files_remove_file', | |
964 | repo_name=backend.repo_name, |
|
964 | repo_name=backend.repo_name, | |
965 | commit_id=backend.default_head_id, |
|
965 | commit_id=backend.default_head_id, | |
966 | f_path='vcs/nodes.py'), |
|
966 | f_path='vcs/nodes.py'), | |
967 | status=200) |
|
967 | status=200) | |
968 |
|
968 | |||
969 | def test_delete_file_view_not_on_branch(self, backend): |
|
969 | def test_delete_file_view_not_on_branch(self, backend): | |
970 | repo = backend.create_repo() |
|
970 | repo = backend.create_repo() | |
971 | backend.ensure_file('vcs/nodes.py') |
|
971 | backend.ensure_file('vcs/nodes.py') | |
972 |
|
972 | |||
973 | response = self.app.get( |
|
973 | response = self.app.get( | |
974 | route_path('repo_files_remove_file', |
|
974 | route_path('repo_files_remove_file', | |
975 | repo_name=repo.repo_name, |
|
975 | repo_name=repo.repo_name, | |
976 | commit_id='tip', |
|
976 | commit_id='tip', | |
977 | f_path='vcs/nodes.py'), |
|
977 | f_path='vcs/nodes.py'), | |
978 | status=302) |
|
978 | status=302) | |
979 | assert_session_flash( |
|
979 | assert_session_flash( | |
980 | response, 'Cannot modify file. Given commit `tip` is not head of a branch.') |
|
980 | response, 'Cannot modify file. Given commit `tip` is not head of a branch.') | |
981 |
|
981 | |||
982 | def test_delete_file_view_commit_changes(self, backend, csrf_token): |
|
982 | def test_delete_file_view_commit_changes(self, backend, csrf_token): | |
983 | repo = backend.create_repo() |
|
983 | repo = backend.create_repo() | |
984 | backend.ensure_file("vcs/nodes.py") |
|
984 | backend.ensure_file("vcs/nodes.py") | |
985 |
|
985 | |||
986 | response = self.app.post( |
|
986 | response = self.app.post( | |
987 | route_path('repo_files_delete_file', |
|
987 | route_path('repo_files_delete_file', | |
988 | repo_name=repo.repo_name, |
|
988 | repo_name=repo.repo_name, | |
989 | commit_id=backend.default_head_id, |
|
989 | commit_id=backend.default_head_id, | |
990 | f_path='vcs/nodes.py'), |
|
990 | f_path='vcs/nodes.py'), | |
991 | params={ |
|
991 | params={ | |
992 | 'message': 'i commited', |
|
992 | 'message': 'i committed', | |
993 | 'csrf_token': csrf_token, |
|
993 | 'csrf_token': csrf_token, | |
994 | }, |
|
994 | }, | |
995 | status=302) |
|
995 | status=302) | |
996 | assert_session_flash( |
|
996 | assert_session_flash( | |
997 | response, 'Successfully deleted file `vcs/nodes.py`') |
|
997 | response, 'Successfully deleted file `vcs/nodes.py`') | |
998 |
|
998 | |||
999 |
|
999 | |||
1000 | @pytest.mark.usefixtures("app") |
|
1000 | @pytest.mark.usefixtures("app") | |
1001 | class TestFilesViewOtherCases(object): |
|
1001 | class TestFilesViewOtherCases(object): | |
1002 |
|
1002 | |||
1003 | def test_access_empty_repo_redirect_to_summary_with_alert_write_perms( |
|
1003 | def test_access_empty_repo_redirect_to_summary_with_alert_write_perms( | |
1004 | self, backend_stub, autologin_regular_user, user_regular, |
|
1004 | self, backend_stub, autologin_regular_user, user_regular, | |
1005 | user_util): |
|
1005 | user_util): | |
1006 |
|
1006 | |||
1007 | repo = backend_stub.create_repo() |
|
1007 | repo = backend_stub.create_repo() | |
1008 | user_util.grant_user_permission_to_repo( |
|
1008 | user_util.grant_user_permission_to_repo( | |
1009 | repo, user_regular, 'repository.write') |
|
1009 | repo, user_regular, 'repository.write') | |
1010 | response = self.app.get( |
|
1010 | response = self.app.get( | |
1011 | route_path('repo_files', |
|
1011 | route_path('repo_files', | |
1012 | repo_name=repo.repo_name, |
|
1012 | repo_name=repo.repo_name, | |
1013 | commit_id='tip', f_path='/')) |
|
1013 | commit_id='tip', f_path='/')) | |
1014 |
|
1014 | |||
1015 | repo_file_add_url = route_path( |
|
1015 | repo_file_add_url = route_path( | |
1016 | 'repo_files_add_file', |
|
1016 | 'repo_files_add_file', | |
1017 | repo_name=repo.repo_name, |
|
1017 | repo_name=repo.repo_name, | |
1018 | commit_id=0, f_path='') |
|
1018 | commit_id=0, f_path='') | |
1019 |
|
1019 | |||
1020 | assert_session_flash( |
|
1020 | assert_session_flash( | |
1021 | response, |
|
1021 | response, | |
1022 | 'There are no files yet. <a class="alert-link" ' |
|
1022 | 'There are no files yet. <a class="alert-link" ' | |
1023 | 'href="{}">Click here to add a new file.</a>' |
|
1023 | 'href="{}">Click here to add a new file.</a>' | |
1024 | .format(repo_file_add_url)) |
|
1024 | .format(repo_file_add_url)) | |
1025 |
|
1025 | |||
1026 | def test_access_empty_repo_redirect_to_summary_with_alert_no_write_perms( |
|
1026 | def test_access_empty_repo_redirect_to_summary_with_alert_no_write_perms( | |
1027 | self, backend_stub, autologin_regular_user): |
|
1027 | self, backend_stub, autologin_regular_user): | |
1028 | repo = backend_stub.create_repo() |
|
1028 | repo = backend_stub.create_repo() | |
1029 | # init session for anon user |
|
1029 | # init session for anon user | |
1030 | route_path('repo_summary', repo_name=repo.repo_name) |
|
1030 | route_path('repo_summary', repo_name=repo.repo_name) | |
1031 |
|
1031 | |||
1032 | repo_file_add_url = route_path( |
|
1032 | repo_file_add_url = route_path( | |
1033 | 'repo_files_add_file', |
|
1033 | 'repo_files_add_file', | |
1034 | repo_name=repo.repo_name, |
|
1034 | repo_name=repo.repo_name, | |
1035 | commit_id=0, f_path='') |
|
1035 | commit_id=0, f_path='') | |
1036 |
|
1036 | |||
1037 | response = self.app.get( |
|
1037 | response = self.app.get( | |
1038 | route_path('repo_files', |
|
1038 | route_path('repo_files', | |
1039 | repo_name=repo.repo_name, |
|
1039 | repo_name=repo.repo_name, | |
1040 | commit_id='tip', f_path='/')) |
|
1040 | commit_id='tip', f_path='/')) | |
1041 |
|
1041 | |||
1042 | assert_session_flash(response, no_=repo_file_add_url) |
|
1042 | assert_session_flash(response, no_=repo_file_add_url) | |
1043 |
|
1043 | |||
1044 | @pytest.mark.parametrize('file_node', [ |
|
1044 | @pytest.mark.parametrize('file_node', [ | |
1045 | 'archive/file.zip', |
|
1045 | 'archive/file.zip', | |
1046 | 'diff/my-file.txt', |
|
1046 | 'diff/my-file.txt', | |
1047 | 'render.py', |
|
1047 | 'render.py', | |
1048 | 'render', |
|
1048 | 'render', | |
1049 | 'remove_file', |
|
1049 | 'remove_file', | |
1050 | 'remove_file/to-delete.txt', |
|
1050 | 'remove_file/to-delete.txt', | |
1051 | ]) |
|
1051 | ]) | |
1052 | def test_file_names_equal_to_routes_parts(self, backend, file_node): |
|
1052 | def test_file_names_equal_to_routes_parts(self, backend, file_node): | |
1053 | backend.create_repo() |
|
1053 | backend.create_repo() | |
1054 | backend.ensure_file(file_node) |
|
1054 | backend.ensure_file(file_node) | |
1055 |
|
1055 | |||
1056 | self.app.get( |
|
1056 | self.app.get( | |
1057 | route_path('repo_files', |
|
1057 | route_path('repo_files', | |
1058 | repo_name=backend.repo_name, |
|
1058 | repo_name=backend.repo_name, | |
1059 | commit_id='tip', f_path=file_node), |
|
1059 | commit_id='tip', f_path=file_node), | |
1060 | status=200) |
|
1060 | status=200) | |
1061 |
|
1061 | |||
1062 |
|
1062 | |||
1063 | class TestAdjustFilePathForSvn(object): |
|
1063 | class TestAdjustFilePathForSvn(object): | |
1064 | """ |
|
1064 | """ | |
1065 | SVN specific adjustments of node history in RepoFilesView. |
|
1065 | SVN specific adjustments of node history in RepoFilesView. | |
1066 | """ |
|
1066 | """ | |
1067 |
|
1067 | |||
1068 | def test_returns_path_relative_to_matched_reference(self): |
|
1068 | def test_returns_path_relative_to_matched_reference(self): | |
1069 | repo = self._repo(branches=['trunk']) |
|
1069 | repo = self._repo(branches=['trunk']) | |
1070 | self.assert_file_adjustment('trunk/file', 'file', repo) |
|
1070 | self.assert_file_adjustment('trunk/file', 'file', repo) | |
1071 |
|
1071 | |||
1072 | def test_does_not_modify_file_if_no_reference_matches(self): |
|
1072 | def test_does_not_modify_file_if_no_reference_matches(self): | |
1073 | repo = self._repo(branches=['trunk']) |
|
1073 | repo = self._repo(branches=['trunk']) | |
1074 | self.assert_file_adjustment('notes/file', 'notes/file', repo) |
|
1074 | self.assert_file_adjustment('notes/file', 'notes/file', repo) | |
1075 |
|
1075 | |||
1076 | def test_does_not_adjust_partial_directory_names(self): |
|
1076 | def test_does_not_adjust_partial_directory_names(self): | |
1077 | repo = self._repo(branches=['trun']) |
|
1077 | repo = self._repo(branches=['trun']) | |
1078 | self.assert_file_adjustment('trunk/file', 'trunk/file', repo) |
|
1078 | self.assert_file_adjustment('trunk/file', 'trunk/file', repo) | |
1079 |
|
1079 | |||
1080 | def test_is_robust_to_patterns_which_prefix_other_patterns(self): |
|
1080 | def test_is_robust_to_patterns_which_prefix_other_patterns(self): | |
1081 | repo = self._repo(branches=['trunk', 'trunk/new', 'trunk/old']) |
|
1081 | repo = self._repo(branches=['trunk', 'trunk/new', 'trunk/old']) | |
1082 | self.assert_file_adjustment('trunk/new/file', 'file', repo) |
|
1082 | self.assert_file_adjustment('trunk/new/file', 'file', repo) | |
1083 |
|
1083 | |||
1084 | def assert_file_adjustment(self, f_path, expected, repo): |
|
1084 | def assert_file_adjustment(self, f_path, expected, repo): | |
1085 | result = RepoFilesView.adjust_file_path_for_svn(f_path, repo) |
|
1085 | result = RepoFilesView.adjust_file_path_for_svn(f_path, repo) | |
1086 | assert result == expected |
|
1086 | assert result == expected | |
1087 |
|
1087 | |||
1088 | def _repo(self, branches=None): |
|
1088 | def _repo(self, branches=None): | |
1089 | repo = mock.Mock() |
|
1089 | repo = mock.Mock() | |
1090 | repo.branches = OrderedDict((name, '0') for name in branches or []) |
|
1090 | repo.branches = OrderedDict((name, '0') for name in branches or []) | |
1091 | repo.tags = {} |
|
1091 | repo.tags = {} | |
1092 | return repo |
|
1092 | return repo |
@@ -1,1028 +1,1028 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | Scm model for RhodeCode |
|
22 | Scm model for RhodeCode | |
23 | """ |
|
23 | """ | |
24 |
|
24 | |||
25 | import os.path |
|
25 | import os.path | |
26 | import traceback |
|
26 | import traceback | |
27 | import logging |
|
27 | import logging | |
28 | import cStringIO |
|
28 | import cStringIO | |
29 |
|
29 | |||
30 | from sqlalchemy import func |
|
30 | from sqlalchemy import func | |
31 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
31 | from zope.cachedescriptors.property import Lazy as LazyProperty | |
32 |
|
32 | |||
33 | import rhodecode |
|
33 | import rhodecode | |
34 | from rhodecode.lib.vcs import get_backend |
|
34 | from rhodecode.lib.vcs import get_backend | |
35 | from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError |
|
35 | from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError | |
36 | from rhodecode.lib.vcs.nodes import FileNode |
|
36 | from rhodecode.lib.vcs.nodes import FileNode | |
37 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
37 | from rhodecode.lib.vcs.backends.base import EmptyCommit | |
38 | from rhodecode.lib import helpers as h, rc_cache |
|
38 | from rhodecode.lib import helpers as h, rc_cache | |
39 | from rhodecode.lib.auth import ( |
|
39 | from rhodecode.lib.auth import ( | |
40 | HasRepoPermissionAny, HasRepoGroupPermissionAny, |
|
40 | HasRepoPermissionAny, HasRepoGroupPermissionAny, | |
41 | HasUserGroupPermissionAny) |
|
41 | HasUserGroupPermissionAny) | |
42 | from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError |
|
42 | from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError | |
43 | from rhodecode.lib import hooks_utils |
|
43 | from rhodecode.lib import hooks_utils | |
44 | from rhodecode.lib.utils import ( |
|
44 | from rhodecode.lib.utils import ( | |
45 | get_filesystem_repos, make_db_config) |
|
45 | get_filesystem_repos, make_db_config) | |
46 | from rhodecode.lib.utils2 import (safe_str, safe_unicode) |
|
46 | from rhodecode.lib.utils2 import (safe_str, safe_unicode) | |
47 | from rhodecode.lib.system_info import get_system_info |
|
47 | from rhodecode.lib.system_info import get_system_info | |
48 | from rhodecode.model import BaseModel |
|
48 | from rhodecode.model import BaseModel | |
49 | from rhodecode.model.db import ( |
|
49 | from rhodecode.model.db import ( | |
50 | or_, false, |
|
50 | or_, false, | |
51 | Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup, |
|
51 | Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup, | |
52 | PullRequest, FileStore) |
|
52 | PullRequest, FileStore) | |
53 | from rhodecode.model.settings import VcsSettingsModel |
|
53 | from rhodecode.model.settings import VcsSettingsModel | |
54 | from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl |
|
54 | from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl | |
55 |
|
55 | |||
56 | log = logging.getLogger(__name__) |
|
56 | log = logging.getLogger(__name__) | |
57 |
|
57 | |||
58 |
|
58 | |||
59 | class UserTemp(object): |
|
59 | class UserTemp(object): | |
60 | def __init__(self, user_id): |
|
60 | def __init__(self, user_id): | |
61 | self.user_id = user_id |
|
61 | self.user_id = user_id | |
62 |
|
62 | |||
63 | def __repr__(self): |
|
63 | def __repr__(self): | |
64 | return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id) |
|
64 | return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id) | |
65 |
|
65 | |||
66 |
|
66 | |||
67 | class RepoTemp(object): |
|
67 | class RepoTemp(object): | |
68 | def __init__(self, repo_id): |
|
68 | def __init__(self, repo_id): | |
69 | self.repo_id = repo_id |
|
69 | self.repo_id = repo_id | |
70 |
|
70 | |||
71 | def __repr__(self): |
|
71 | def __repr__(self): | |
72 | return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id) |
|
72 | return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id) | |
73 |
|
73 | |||
74 |
|
74 | |||
75 | class SimpleCachedRepoList(object): |
|
75 | class SimpleCachedRepoList(object): | |
76 | """ |
|
76 | """ | |
77 | Lighter version of of iteration of repos without the scm initialisation, |
|
77 | Lighter version of of iteration of repos without the scm initialisation, | |
78 | and with cache usage |
|
78 | and with cache usage | |
79 | """ |
|
79 | """ | |
80 | def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None): |
|
80 | def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None): | |
81 | self.db_repo_list = db_repo_list |
|
81 | self.db_repo_list = db_repo_list | |
82 | self.repos_path = repos_path |
|
82 | self.repos_path = repos_path | |
83 | self.order_by = order_by |
|
83 | self.order_by = order_by | |
84 | self.reversed = (order_by or '').startswith('-') |
|
84 | self.reversed = (order_by or '').startswith('-') | |
85 | if not perm_set: |
|
85 | if not perm_set: | |
86 | perm_set = ['repository.read', 'repository.write', |
|
86 | perm_set = ['repository.read', 'repository.write', | |
87 | 'repository.admin'] |
|
87 | 'repository.admin'] | |
88 | self.perm_set = perm_set |
|
88 | self.perm_set = perm_set | |
89 |
|
89 | |||
90 | def __len__(self): |
|
90 | def __len__(self): | |
91 | return len(self.db_repo_list) |
|
91 | return len(self.db_repo_list) | |
92 |
|
92 | |||
93 | def __repr__(self): |
|
93 | def __repr__(self): | |
94 | return '<%s (%s)>' % (self.__class__.__name__, self.__len__()) |
|
94 | return '<%s (%s)>' % (self.__class__.__name__, self.__len__()) | |
95 |
|
95 | |||
96 | def __iter__(self): |
|
96 | def __iter__(self): | |
97 | for dbr in self.db_repo_list: |
|
97 | for dbr in self.db_repo_list: | |
98 | # check permission at this level |
|
98 | # check permission at this level | |
99 | has_perm = HasRepoPermissionAny(*self.perm_set)( |
|
99 | has_perm = HasRepoPermissionAny(*self.perm_set)( | |
100 | dbr.repo_name, 'SimpleCachedRepoList check') |
|
100 | dbr.repo_name, 'SimpleCachedRepoList check') | |
101 | if not has_perm: |
|
101 | if not has_perm: | |
102 | continue |
|
102 | continue | |
103 |
|
103 | |||
104 | tmp_d = { |
|
104 | tmp_d = { | |
105 | 'name': dbr.repo_name, |
|
105 | 'name': dbr.repo_name, | |
106 | 'dbrepo': dbr.get_dict(), |
|
106 | 'dbrepo': dbr.get_dict(), | |
107 | 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {} |
|
107 | 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {} | |
108 | } |
|
108 | } | |
109 | yield tmp_d |
|
109 | yield tmp_d | |
110 |
|
110 | |||
111 |
|
111 | |||
112 | class _PermCheckIterator(object): |
|
112 | class _PermCheckIterator(object): | |
113 |
|
113 | |||
114 | def __init__( |
|
114 | def __init__( | |
115 | self, obj_list, obj_attr, perm_set, perm_checker, |
|
115 | self, obj_list, obj_attr, perm_set, perm_checker, | |
116 | extra_kwargs=None): |
|
116 | extra_kwargs=None): | |
117 | """ |
|
117 | """ | |
118 | Creates iterator from given list of objects, additionally |
|
118 | Creates iterator from given list of objects, additionally | |
119 | checking permission for them from perm_set var |
|
119 | checking permission for them from perm_set var | |
120 |
|
120 | |||
121 | :param obj_list: list of db objects |
|
121 | :param obj_list: list of db objects | |
122 | :param obj_attr: attribute of object to pass into perm_checker |
|
122 | :param obj_attr: attribute of object to pass into perm_checker | |
123 | :param perm_set: list of permissions to check |
|
123 | :param perm_set: list of permissions to check | |
124 | :param perm_checker: callable to check permissions against |
|
124 | :param perm_checker: callable to check permissions against | |
125 | """ |
|
125 | """ | |
126 | self.obj_list = obj_list |
|
126 | self.obj_list = obj_list | |
127 | self.obj_attr = obj_attr |
|
127 | self.obj_attr = obj_attr | |
128 | self.perm_set = perm_set |
|
128 | self.perm_set = perm_set | |
129 | self.perm_checker = perm_checker(*self.perm_set) |
|
129 | self.perm_checker = perm_checker(*self.perm_set) | |
130 | self.extra_kwargs = extra_kwargs or {} |
|
130 | self.extra_kwargs = extra_kwargs or {} | |
131 |
|
131 | |||
132 | def __len__(self): |
|
132 | def __len__(self): | |
133 | return len(self.obj_list) |
|
133 | return len(self.obj_list) | |
134 |
|
134 | |||
135 | def __repr__(self): |
|
135 | def __repr__(self): | |
136 | return '<%s (%s)>' % (self.__class__.__name__, self.__len__()) |
|
136 | return '<%s (%s)>' % (self.__class__.__name__, self.__len__()) | |
137 |
|
137 | |||
138 | def __iter__(self): |
|
138 | def __iter__(self): | |
139 | for db_obj in self.obj_list: |
|
139 | for db_obj in self.obj_list: | |
140 | # check permission at this level |
|
140 | # check permission at this level | |
141 | # NOTE(marcink): the __dict__.get() is ~4x faster then getattr() |
|
141 | # NOTE(marcink): the __dict__.get() is ~4x faster then getattr() | |
142 | name = db_obj.__dict__.get(self.obj_attr, None) |
|
142 | name = db_obj.__dict__.get(self.obj_attr, None) | |
143 | if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs): |
|
143 | if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs): | |
144 | continue |
|
144 | continue | |
145 |
|
145 | |||
146 | yield db_obj |
|
146 | yield db_obj | |
147 |
|
147 | |||
148 |
|
148 | |||
149 | class RepoList(_PermCheckIterator): |
|
149 | class RepoList(_PermCheckIterator): | |
150 |
|
150 | |||
151 | def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None): |
|
151 | def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None): | |
152 | if not perm_set: |
|
152 | if not perm_set: | |
153 | perm_set = ['repository.read', 'repository.write', 'repository.admin'] |
|
153 | perm_set = ['repository.read', 'repository.write', 'repository.admin'] | |
154 |
|
154 | |||
155 | super(RepoList, self).__init__( |
|
155 | super(RepoList, self).__init__( | |
156 | obj_list=db_repo_list, |
|
156 | obj_list=db_repo_list, | |
157 | obj_attr='_repo_name', perm_set=perm_set, |
|
157 | obj_attr='_repo_name', perm_set=perm_set, | |
158 | perm_checker=HasRepoPermissionAny, |
|
158 | perm_checker=HasRepoPermissionAny, | |
159 | extra_kwargs=extra_kwargs) |
|
159 | extra_kwargs=extra_kwargs) | |
160 |
|
160 | |||
161 |
|
161 | |||
162 | class RepoGroupList(_PermCheckIterator): |
|
162 | class RepoGroupList(_PermCheckIterator): | |
163 |
|
163 | |||
164 | def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None): |
|
164 | def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None): | |
165 | if not perm_set: |
|
165 | if not perm_set: | |
166 | perm_set = ['group.read', 'group.write', 'group.admin'] |
|
166 | perm_set = ['group.read', 'group.write', 'group.admin'] | |
167 |
|
167 | |||
168 | super(RepoGroupList, self).__init__( |
|
168 | super(RepoGroupList, self).__init__( | |
169 | obj_list=db_repo_group_list, |
|
169 | obj_list=db_repo_group_list, | |
170 | obj_attr='_group_name', perm_set=perm_set, |
|
170 | obj_attr='_group_name', perm_set=perm_set, | |
171 | perm_checker=HasRepoGroupPermissionAny, |
|
171 | perm_checker=HasRepoGroupPermissionAny, | |
172 | extra_kwargs=extra_kwargs) |
|
172 | extra_kwargs=extra_kwargs) | |
173 |
|
173 | |||
174 |
|
174 | |||
175 | class UserGroupList(_PermCheckIterator): |
|
175 | class UserGroupList(_PermCheckIterator): | |
176 |
|
176 | |||
177 | def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None): |
|
177 | def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None): | |
178 | if not perm_set: |
|
178 | if not perm_set: | |
179 | perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin'] |
|
179 | perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin'] | |
180 |
|
180 | |||
181 | super(UserGroupList, self).__init__( |
|
181 | super(UserGroupList, self).__init__( | |
182 | obj_list=db_user_group_list, |
|
182 | obj_list=db_user_group_list, | |
183 | obj_attr='users_group_name', perm_set=perm_set, |
|
183 | obj_attr='users_group_name', perm_set=perm_set, | |
184 | perm_checker=HasUserGroupPermissionAny, |
|
184 | perm_checker=HasUserGroupPermissionAny, | |
185 | extra_kwargs=extra_kwargs) |
|
185 | extra_kwargs=extra_kwargs) | |
186 |
|
186 | |||
187 |
|
187 | |||
188 | class ScmModel(BaseModel): |
|
188 | class ScmModel(BaseModel): | |
189 | """ |
|
189 | """ | |
190 | Generic Scm Model |
|
190 | Generic Scm Model | |
191 | """ |
|
191 | """ | |
192 |
|
192 | |||
193 | @LazyProperty |
|
193 | @LazyProperty | |
194 | def repos_path(self): |
|
194 | def repos_path(self): | |
195 | """ |
|
195 | """ | |
196 | Gets the repositories root path from database |
|
196 | Gets the repositories root path from database | |
197 | """ |
|
197 | """ | |
198 |
|
198 | |||
199 | settings_model = VcsSettingsModel(sa=self.sa) |
|
199 | settings_model = VcsSettingsModel(sa=self.sa) | |
200 | return settings_model.get_repos_location() |
|
200 | return settings_model.get_repos_location() | |
201 |
|
201 | |||
202 | def repo_scan(self, repos_path=None): |
|
202 | def repo_scan(self, repos_path=None): | |
203 | """ |
|
203 | """ | |
204 | Listing of repositories in given path. This path should not be a |
|
204 | Listing of repositories in given path. This path should not be a | |
205 | repository itself. Return a dictionary of repository objects |
|
205 | repository itself. Return a dictionary of repository objects | |
206 |
|
206 | |||
207 | :param repos_path: path to directory containing repositories |
|
207 | :param repos_path: path to directory containing repositories | |
208 | """ |
|
208 | """ | |
209 |
|
209 | |||
210 | if repos_path is None: |
|
210 | if repos_path is None: | |
211 | repos_path = self.repos_path |
|
211 | repos_path = self.repos_path | |
212 |
|
212 | |||
213 | log.info('scanning for repositories in %s', repos_path) |
|
213 | log.info('scanning for repositories in %s', repos_path) | |
214 |
|
214 | |||
215 | config = make_db_config() |
|
215 | config = make_db_config() | |
216 | config.set('extensions', 'largefiles', '') |
|
216 | config.set('extensions', 'largefiles', '') | |
217 | repos = {} |
|
217 | repos = {} | |
218 |
|
218 | |||
219 | for name, path in get_filesystem_repos(repos_path, recursive=True): |
|
219 | for name, path in get_filesystem_repos(repos_path, recursive=True): | |
220 | # name need to be decomposed and put back together using the / |
|
220 | # name need to be decomposed and put back together using the / | |
221 | # since this is internal storage separator for rhodecode |
|
221 | # since this is internal storage separator for rhodecode | |
222 | name = Repository.normalize_repo_name(name) |
|
222 | name = Repository.normalize_repo_name(name) | |
223 |
|
223 | |||
224 | try: |
|
224 | try: | |
225 | if name in repos: |
|
225 | if name in repos: | |
226 | raise RepositoryError('Duplicate repository name %s ' |
|
226 | raise RepositoryError('Duplicate repository name %s ' | |
227 | 'found in %s' % (name, path)) |
|
227 | 'found in %s' % (name, path)) | |
228 | elif path[0] in rhodecode.BACKENDS: |
|
228 | elif path[0] in rhodecode.BACKENDS: | |
229 | backend = get_backend(path[0]) |
|
229 | backend = get_backend(path[0]) | |
230 | repos[name] = backend(path[1], config=config, |
|
230 | repos[name] = backend(path[1], config=config, | |
231 | with_wire={"cache": False}) |
|
231 | with_wire={"cache": False}) | |
232 | except OSError: |
|
232 | except OSError: | |
233 | continue |
|
233 | continue | |
234 | except RepositoryError: |
|
234 | except RepositoryError: | |
235 | log.exception('Failed to create a repo') |
|
235 | log.exception('Failed to create a repo') | |
236 | continue |
|
236 | continue | |
237 |
|
237 | |||
238 | log.debug('found %s paths with repositories', len(repos)) |
|
238 | log.debug('found %s paths with repositories', len(repos)) | |
239 | return repos |
|
239 | return repos | |
240 |
|
240 | |||
241 | def get_repos(self, all_repos=None, sort_key=None): |
|
241 | def get_repos(self, all_repos=None, sort_key=None): | |
242 | """ |
|
242 | """ | |
243 | Get all repositories from db and for each repo create it's |
|
243 | Get all repositories from db and for each repo create it's | |
244 | backend instance and fill that backed with information from database |
|
244 | backend instance and fill that backed with information from database | |
245 |
|
245 | |||
246 | :param all_repos: list of repository names as strings |
|
246 | :param all_repos: list of repository names as strings | |
247 | give specific repositories list, good for filtering |
|
247 | give specific repositories list, good for filtering | |
248 |
|
248 | |||
249 | :param sort_key: initial sorting of repositories |
|
249 | :param sort_key: initial sorting of repositories | |
250 | """ |
|
250 | """ | |
251 | if all_repos is None: |
|
251 | if all_repos is None: | |
252 | all_repos = self.sa.query(Repository)\ |
|
252 | all_repos = self.sa.query(Repository)\ | |
253 | .filter(Repository.group_id == None)\ |
|
253 | .filter(Repository.group_id == None)\ | |
254 | .order_by(func.lower(Repository.repo_name)).all() |
|
254 | .order_by(func.lower(Repository.repo_name)).all() | |
255 | repo_iter = SimpleCachedRepoList( |
|
255 | repo_iter = SimpleCachedRepoList( | |
256 | all_repos, repos_path=self.repos_path, order_by=sort_key) |
|
256 | all_repos, repos_path=self.repos_path, order_by=sort_key) | |
257 | return repo_iter |
|
257 | return repo_iter | |
258 |
|
258 | |||
259 | def get_repo_groups(self, all_groups=None): |
|
259 | def get_repo_groups(self, all_groups=None): | |
260 | if all_groups is None: |
|
260 | if all_groups is None: | |
261 | all_groups = RepoGroup.query()\ |
|
261 | all_groups = RepoGroup.query()\ | |
262 | .filter(RepoGroup.group_parent_id == None).all() |
|
262 | .filter(RepoGroup.group_parent_id == None).all() | |
263 | return [x for x in RepoGroupList(all_groups)] |
|
263 | return [x for x in RepoGroupList(all_groups)] | |
264 |
|
264 | |||
265 | def mark_for_invalidation(self, repo_name, delete=False): |
|
265 | def mark_for_invalidation(self, repo_name, delete=False): | |
266 | """ |
|
266 | """ | |
267 | Mark caches of this repo invalid in the database. `delete` flag |
|
267 | Mark caches of this repo invalid in the database. `delete` flag | |
268 | removes the cache entries |
|
268 | removes the cache entries | |
269 |
|
269 | |||
270 | :param repo_name: the repo_name for which caches should be marked |
|
270 | :param repo_name: the repo_name for which caches should be marked | |
271 | invalid, or deleted |
|
271 | invalid, or deleted | |
272 | :param delete: delete the entry keys instead of setting bool |
|
272 | :param delete: delete the entry keys instead of setting bool | |
273 | flag on them, and also purge caches used by the dogpile |
|
273 | flag on them, and also purge caches used by the dogpile | |
274 | """ |
|
274 | """ | |
275 | repo = Repository.get_by_repo_name(repo_name) |
|
275 | repo = Repository.get_by_repo_name(repo_name) | |
276 |
|
276 | |||
277 | if repo: |
|
277 | if repo: | |
278 | invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format( |
|
278 | invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format( | |
279 | repo_id=repo.repo_id) |
|
279 | repo_id=repo.repo_id) | |
280 | CacheKey.set_invalidate(invalidation_namespace, delete=delete) |
|
280 | CacheKey.set_invalidate(invalidation_namespace, delete=delete) | |
281 |
|
281 | |||
282 | repo_id = repo.repo_id |
|
282 | repo_id = repo.repo_id | |
283 | config = repo._config |
|
283 | config = repo._config | |
284 | config.set('extensions', 'largefiles', '') |
|
284 | config.set('extensions', 'largefiles', '') | |
285 | repo.update_commit_cache(config=config, cs_cache=None) |
|
285 | repo.update_commit_cache(config=config, cs_cache=None) | |
286 | if delete: |
|
286 | if delete: | |
287 | cache_namespace_uid = 'cache_repo.{}'.format(repo_id) |
|
287 | cache_namespace_uid = 'cache_repo.{}'.format(repo_id) | |
288 | rc_cache.clear_cache_namespace( |
|
288 | rc_cache.clear_cache_namespace( | |
289 | 'cache_repo', cache_namespace_uid, invalidate=True) |
|
289 | 'cache_repo', cache_namespace_uid, invalidate=True) | |
290 |
|
290 | |||
291 | def toggle_following_repo(self, follow_repo_id, user_id): |
|
291 | def toggle_following_repo(self, follow_repo_id, user_id): | |
292 |
|
292 | |||
293 | f = self.sa.query(UserFollowing)\ |
|
293 | f = self.sa.query(UserFollowing)\ | |
294 | .filter(UserFollowing.follows_repo_id == follow_repo_id)\ |
|
294 | .filter(UserFollowing.follows_repo_id == follow_repo_id)\ | |
295 | .filter(UserFollowing.user_id == user_id).scalar() |
|
295 | .filter(UserFollowing.user_id == user_id).scalar() | |
296 |
|
296 | |||
297 | if f is not None: |
|
297 | if f is not None: | |
298 | try: |
|
298 | try: | |
299 | self.sa.delete(f) |
|
299 | self.sa.delete(f) | |
300 | return |
|
300 | return | |
301 | except Exception: |
|
301 | except Exception: | |
302 | log.error(traceback.format_exc()) |
|
302 | log.error(traceback.format_exc()) | |
303 | raise |
|
303 | raise | |
304 |
|
304 | |||
305 | try: |
|
305 | try: | |
306 | f = UserFollowing() |
|
306 | f = UserFollowing() | |
307 | f.user_id = user_id |
|
307 | f.user_id = user_id | |
308 | f.follows_repo_id = follow_repo_id |
|
308 | f.follows_repo_id = follow_repo_id | |
309 | self.sa.add(f) |
|
309 | self.sa.add(f) | |
310 | except Exception: |
|
310 | except Exception: | |
311 | log.error(traceback.format_exc()) |
|
311 | log.error(traceback.format_exc()) | |
312 | raise |
|
312 | raise | |
313 |
|
313 | |||
314 | def toggle_following_user(self, follow_user_id, user_id): |
|
314 | def toggle_following_user(self, follow_user_id, user_id): | |
315 | f = self.sa.query(UserFollowing)\ |
|
315 | f = self.sa.query(UserFollowing)\ | |
316 | .filter(UserFollowing.follows_user_id == follow_user_id)\ |
|
316 | .filter(UserFollowing.follows_user_id == follow_user_id)\ | |
317 | .filter(UserFollowing.user_id == user_id).scalar() |
|
317 | .filter(UserFollowing.user_id == user_id).scalar() | |
318 |
|
318 | |||
319 | if f is not None: |
|
319 | if f is not None: | |
320 | try: |
|
320 | try: | |
321 | self.sa.delete(f) |
|
321 | self.sa.delete(f) | |
322 | return |
|
322 | return | |
323 | except Exception: |
|
323 | except Exception: | |
324 | log.error(traceback.format_exc()) |
|
324 | log.error(traceback.format_exc()) | |
325 | raise |
|
325 | raise | |
326 |
|
326 | |||
327 | try: |
|
327 | try: | |
328 | f = UserFollowing() |
|
328 | f = UserFollowing() | |
329 | f.user_id = user_id |
|
329 | f.user_id = user_id | |
330 | f.follows_user_id = follow_user_id |
|
330 | f.follows_user_id = follow_user_id | |
331 | self.sa.add(f) |
|
331 | self.sa.add(f) | |
332 | except Exception: |
|
332 | except Exception: | |
333 | log.error(traceback.format_exc()) |
|
333 | log.error(traceback.format_exc()) | |
334 | raise |
|
334 | raise | |
335 |
|
335 | |||
336 | def is_following_repo(self, repo_name, user_id, cache=False): |
|
336 | def is_following_repo(self, repo_name, user_id, cache=False): | |
337 | r = self.sa.query(Repository)\ |
|
337 | r = self.sa.query(Repository)\ | |
338 | .filter(Repository.repo_name == repo_name).scalar() |
|
338 | .filter(Repository.repo_name == repo_name).scalar() | |
339 |
|
339 | |||
340 | f = self.sa.query(UserFollowing)\ |
|
340 | f = self.sa.query(UserFollowing)\ | |
341 | .filter(UserFollowing.follows_repository == r)\ |
|
341 | .filter(UserFollowing.follows_repository == r)\ | |
342 | .filter(UserFollowing.user_id == user_id).scalar() |
|
342 | .filter(UserFollowing.user_id == user_id).scalar() | |
343 |
|
343 | |||
344 | return f is not None |
|
344 | return f is not None | |
345 |
|
345 | |||
346 | def is_following_user(self, username, user_id, cache=False): |
|
346 | def is_following_user(self, username, user_id, cache=False): | |
347 | u = User.get_by_username(username) |
|
347 | u = User.get_by_username(username) | |
348 |
|
348 | |||
349 | f = self.sa.query(UserFollowing)\ |
|
349 | f = self.sa.query(UserFollowing)\ | |
350 | .filter(UserFollowing.follows_user == u)\ |
|
350 | .filter(UserFollowing.follows_user == u)\ | |
351 | .filter(UserFollowing.user_id == user_id).scalar() |
|
351 | .filter(UserFollowing.user_id == user_id).scalar() | |
352 |
|
352 | |||
353 | return f is not None |
|
353 | return f is not None | |
354 |
|
354 | |||
355 | def get_followers(self, repo): |
|
355 | def get_followers(self, repo): | |
356 | repo = self._get_repo(repo) |
|
356 | repo = self._get_repo(repo) | |
357 |
|
357 | |||
358 | return self.sa.query(UserFollowing)\ |
|
358 | return self.sa.query(UserFollowing)\ | |
359 | .filter(UserFollowing.follows_repository == repo).count() |
|
359 | .filter(UserFollowing.follows_repository == repo).count() | |
360 |
|
360 | |||
361 | def get_forks(self, repo): |
|
361 | def get_forks(self, repo): | |
362 | repo = self._get_repo(repo) |
|
362 | repo = self._get_repo(repo) | |
363 | return self.sa.query(Repository)\ |
|
363 | return self.sa.query(Repository)\ | |
364 | .filter(Repository.fork == repo).count() |
|
364 | .filter(Repository.fork == repo).count() | |
365 |
|
365 | |||
366 | def get_pull_requests(self, repo): |
|
366 | def get_pull_requests(self, repo): | |
367 | repo = self._get_repo(repo) |
|
367 | repo = self._get_repo(repo) | |
368 | return self.sa.query(PullRequest)\ |
|
368 | return self.sa.query(PullRequest)\ | |
369 | .filter(PullRequest.target_repo == repo)\ |
|
369 | .filter(PullRequest.target_repo == repo)\ | |
370 | .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count() |
|
370 | .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count() | |
371 |
|
371 | |||
372 | def get_artifacts(self, repo): |
|
372 | def get_artifacts(self, repo): | |
373 | repo = self._get_repo(repo) |
|
373 | repo = self._get_repo(repo) | |
374 | return self.sa.query(FileStore)\ |
|
374 | return self.sa.query(FileStore)\ | |
375 | .filter(FileStore.repo == repo)\ |
|
375 | .filter(FileStore.repo == repo)\ | |
376 | .filter(or_(FileStore.hidden == None, FileStore.hidden == false())).count() |
|
376 | .filter(or_(FileStore.hidden == None, FileStore.hidden == false())).count() | |
377 |
|
377 | |||
378 | def mark_as_fork(self, repo, fork, user): |
|
378 | def mark_as_fork(self, repo, fork, user): | |
379 | repo = self._get_repo(repo) |
|
379 | repo = self._get_repo(repo) | |
380 | fork = self._get_repo(fork) |
|
380 | fork = self._get_repo(fork) | |
381 | if fork and repo.repo_id == fork.repo_id: |
|
381 | if fork and repo.repo_id == fork.repo_id: | |
382 | raise Exception("Cannot set repository as fork of itself") |
|
382 | raise Exception("Cannot set repository as fork of itself") | |
383 |
|
383 | |||
384 | if fork and repo.repo_type != fork.repo_type: |
|
384 | if fork and repo.repo_type != fork.repo_type: | |
385 | raise RepositoryError( |
|
385 | raise RepositoryError( | |
386 | "Cannot set repository as fork of repository with other type") |
|
386 | "Cannot set repository as fork of repository with other type") | |
387 |
|
387 | |||
388 | repo.fork = fork |
|
388 | repo.fork = fork | |
389 | self.sa.add(repo) |
|
389 | self.sa.add(repo) | |
390 | return repo |
|
390 | return repo | |
391 |
|
391 | |||
392 | def pull_changes(self, repo, username, remote_uri=None, validate_uri=True): |
|
392 | def pull_changes(self, repo, username, remote_uri=None, validate_uri=True): | |
393 | dbrepo = self._get_repo(repo) |
|
393 | dbrepo = self._get_repo(repo) | |
394 | remote_uri = remote_uri or dbrepo.clone_uri |
|
394 | remote_uri = remote_uri or dbrepo.clone_uri | |
395 | if not remote_uri: |
|
395 | if not remote_uri: | |
396 | raise Exception("This repository doesn't have a clone uri") |
|
396 | raise Exception("This repository doesn't have a clone uri") | |
397 |
|
397 | |||
398 | repo = dbrepo.scm_instance(cache=False) |
|
398 | repo = dbrepo.scm_instance(cache=False) | |
399 | repo.config.clear_section('hooks') |
|
399 | repo.config.clear_section('hooks') | |
400 |
|
400 | |||
401 | try: |
|
401 | try: | |
402 | # NOTE(marcink): add extra validation so we skip invalid urls |
|
402 | # NOTE(marcink): add extra validation so we skip invalid urls | |
403 | # this is due this tasks can be executed via scheduler without |
|
403 | # this is due this tasks can be executed via scheduler without | |
404 | # proper validation of remote_uri |
|
404 | # proper validation of remote_uri | |
405 | if validate_uri: |
|
405 | if validate_uri: | |
406 | config = make_db_config(clear_session=False) |
|
406 | config = make_db_config(clear_session=False) | |
407 | url_validator(remote_uri, dbrepo.repo_type, config) |
|
407 | url_validator(remote_uri, dbrepo.repo_type, config) | |
408 | except InvalidCloneUrl: |
|
408 | except InvalidCloneUrl: | |
409 | raise |
|
409 | raise | |
410 |
|
410 | |||
411 | repo_name = dbrepo.repo_name |
|
411 | repo_name = dbrepo.repo_name | |
412 | try: |
|
412 | try: | |
413 | # TODO: we need to make sure those operations call proper hooks ! |
|
413 | # TODO: we need to make sure those operations call proper hooks ! | |
414 | repo.fetch(remote_uri) |
|
414 | repo.fetch(remote_uri) | |
415 |
|
415 | |||
416 | self.mark_for_invalidation(repo_name) |
|
416 | self.mark_for_invalidation(repo_name) | |
417 | except Exception: |
|
417 | except Exception: | |
418 | log.error(traceback.format_exc()) |
|
418 | log.error(traceback.format_exc()) | |
419 | raise |
|
419 | raise | |
420 |
|
420 | |||
421 | def push_changes(self, repo, username, remote_uri=None, validate_uri=True): |
|
421 | def push_changes(self, repo, username, remote_uri=None, validate_uri=True): | |
422 | dbrepo = self._get_repo(repo) |
|
422 | dbrepo = self._get_repo(repo) | |
423 | remote_uri = remote_uri or dbrepo.push_uri |
|
423 | remote_uri = remote_uri or dbrepo.push_uri | |
424 | if not remote_uri: |
|
424 | if not remote_uri: | |
425 | raise Exception("This repository doesn't have a clone uri") |
|
425 | raise Exception("This repository doesn't have a clone uri") | |
426 |
|
426 | |||
427 | repo = dbrepo.scm_instance(cache=False) |
|
427 | repo = dbrepo.scm_instance(cache=False) | |
428 | repo.config.clear_section('hooks') |
|
428 | repo.config.clear_section('hooks') | |
429 |
|
429 | |||
430 | try: |
|
430 | try: | |
431 | # NOTE(marcink): add extra validation so we skip invalid urls |
|
431 | # NOTE(marcink): add extra validation so we skip invalid urls | |
432 | # this is due this tasks can be executed via scheduler without |
|
432 | # this is due this tasks can be executed via scheduler without | |
433 | # proper validation of remote_uri |
|
433 | # proper validation of remote_uri | |
434 | if validate_uri: |
|
434 | if validate_uri: | |
435 | config = make_db_config(clear_session=False) |
|
435 | config = make_db_config(clear_session=False) | |
436 | url_validator(remote_uri, dbrepo.repo_type, config) |
|
436 | url_validator(remote_uri, dbrepo.repo_type, config) | |
437 | except InvalidCloneUrl: |
|
437 | except InvalidCloneUrl: | |
438 | raise |
|
438 | raise | |
439 |
|
439 | |||
440 | try: |
|
440 | try: | |
441 | repo.push(remote_uri) |
|
441 | repo.push(remote_uri) | |
442 | except Exception: |
|
442 | except Exception: | |
443 | log.error(traceback.format_exc()) |
|
443 | log.error(traceback.format_exc()) | |
444 | raise |
|
444 | raise | |
445 |
|
445 | |||
446 | def commit_change(self, repo, repo_name, commit, user, author, message, |
|
446 | def commit_change(self, repo, repo_name, commit, user, author, message, | |
447 | content, f_path): |
|
447 | content, f_path): | |
448 | """ |
|
448 | """ | |
449 | Commits changes |
|
449 | Commits changes | |
450 |
|
450 | |||
451 | :param repo: SCM instance |
|
451 | :param repo: SCM instance | |
452 |
|
452 | |||
453 | """ |
|
453 | """ | |
454 | user = self._get_user(user) |
|
454 | user = self._get_user(user) | |
455 |
|
455 | |||
456 | # decoding here will force that we have proper encoded values |
|
456 | # decoding here will force that we have proper encoded values | |
457 | # in any other case this will throw exceptions and deny commit |
|
457 | # in any other case this will throw exceptions and deny commit | |
458 | content = safe_str(content) |
|
458 | content = safe_str(content) | |
459 | path = safe_str(f_path) |
|
459 | path = safe_str(f_path) | |
460 | # message and author needs to be unicode |
|
460 | # message and author needs to be unicode | |
461 | # proper backend should then translate that into required type |
|
461 | # proper backend should then translate that into required type | |
462 | message = safe_unicode(message) |
|
462 | message = safe_unicode(message) | |
463 | author = safe_unicode(author) |
|
463 | author = safe_unicode(author) | |
464 | imc = repo.in_memory_commit |
|
464 | imc = repo.in_memory_commit | |
465 | imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path))) |
|
465 | imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path))) | |
466 | try: |
|
466 | try: | |
467 | # TODO: handle pre-push action ! |
|
467 | # TODO: handle pre-push action ! | |
468 | tip = imc.commit( |
|
468 | tip = imc.commit( | |
469 | message=message, author=author, parents=[commit], |
|
469 | message=message, author=author, parents=[commit], | |
470 | branch=commit.branch) |
|
470 | branch=commit.branch) | |
471 | except Exception as e: |
|
471 | except Exception as e: | |
472 | log.error(traceback.format_exc()) |
|
472 | log.error(traceback.format_exc()) | |
473 | raise IMCCommitError(str(e)) |
|
473 | raise IMCCommitError(str(e)) | |
474 | finally: |
|
474 | finally: | |
475 | # always clear caches, if commit fails we want fresh object also |
|
475 | # always clear caches, if commit fails we want fresh object also | |
476 | self.mark_for_invalidation(repo_name) |
|
476 | self.mark_for_invalidation(repo_name) | |
477 |
|
477 | |||
478 | # We trigger the post-push action |
|
478 | # We trigger the post-push action | |
479 | hooks_utils.trigger_post_push_hook( |
|
479 | hooks_utils.trigger_post_push_hook( | |
480 | username=user.username, action='push_local', hook_type='post_push', |
|
480 | username=user.username, action='push_local', hook_type='post_push', | |
481 | repo_name=repo_name, repo_type=repo.alias, commit_ids=[tip.raw_id]) |
|
481 | repo_name=repo_name, repo_type=repo.alias, commit_ids=[tip.raw_id]) | |
482 | return tip |
|
482 | return tip | |
483 |
|
483 | |||
484 | def _sanitize_path(self, f_path): |
|
484 | def _sanitize_path(self, f_path): | |
485 | if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path: |
|
485 | if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path: | |
486 | raise NonRelativePathError('%s is not an relative path' % f_path) |
|
486 | raise NonRelativePathError('%s is not an relative path' % f_path) | |
487 | if f_path: |
|
487 | if f_path: | |
488 | f_path = os.path.normpath(f_path) |
|
488 | f_path = os.path.normpath(f_path) | |
489 | return f_path |
|
489 | return f_path | |
490 |
|
490 | |||
491 | def get_dirnode_metadata(self, request, commit, dir_node): |
|
491 | def get_dirnode_metadata(self, request, commit, dir_node): | |
492 | if not dir_node.is_dir(): |
|
492 | if not dir_node.is_dir(): | |
493 | return [] |
|
493 | return [] | |
494 |
|
494 | |||
495 | data = [] |
|
495 | data = [] | |
496 | for node in dir_node: |
|
496 | for node in dir_node: | |
497 | if not node.is_file(): |
|
497 | if not node.is_file(): | |
498 | # we skip file-nodes |
|
498 | # we skip file-nodes | |
499 | continue |
|
499 | continue | |
500 |
|
500 | |||
501 | last_commit = node.last_commit |
|
501 | last_commit = node.last_commit | |
502 | last_commit_date = last_commit.date |
|
502 | last_commit_date = last_commit.date | |
503 | data.append({ |
|
503 | data.append({ | |
504 | 'name': node.name, |
|
504 | 'name': node.name, | |
505 | 'size': h.format_byte_size_binary(node.size), |
|
505 | 'size': h.format_byte_size_binary(node.size), | |
506 | 'modified_at': h.format_date(last_commit_date), |
|
506 | 'modified_at': h.format_date(last_commit_date), | |
507 | 'modified_ts': last_commit_date.isoformat(), |
|
507 | 'modified_ts': last_commit_date.isoformat(), | |
508 | 'revision': last_commit.revision, |
|
508 | 'revision': last_commit.revision, | |
509 | 'short_id': last_commit.short_id, |
|
509 | 'short_id': last_commit.short_id, | |
510 | 'message': h.escape(last_commit.message), |
|
510 | 'message': h.escape(last_commit.message), | |
511 | 'author': h.escape(last_commit.author), |
|
511 | 'author': h.escape(last_commit.author), | |
512 | 'user_profile': h.gravatar_with_user( |
|
512 | 'user_profile': h.gravatar_with_user( | |
513 | request, last_commit.author), |
|
513 | request, last_commit.author), | |
514 | }) |
|
514 | }) | |
515 |
|
515 | |||
516 | return data |
|
516 | return data | |
517 |
|
517 | |||
518 | def get_nodes(self, repo_name, commit_id, root_path='/', flat=True, |
|
518 | def get_nodes(self, repo_name, commit_id, root_path='/', flat=True, | |
519 | extended_info=False, content=False, max_file_bytes=None): |
|
519 | extended_info=False, content=False, max_file_bytes=None): | |
520 | """ |
|
520 | """ | |
521 | recursive walk in root dir and return a set of all path in that dir |
|
521 | recursive walk in root dir and return a set of all path in that dir | |
522 | based on repository walk function |
|
522 | based on repository walk function | |
523 |
|
523 | |||
524 | :param repo_name: name of repository |
|
524 | :param repo_name: name of repository | |
525 | :param commit_id: commit id for which to list nodes |
|
525 | :param commit_id: commit id for which to list nodes | |
526 | :param root_path: root path to list |
|
526 | :param root_path: root path to list | |
527 | :param flat: return as a list, if False returns a dict with description |
|
527 | :param flat: return as a list, if False returns a dict with description | |
528 | :param extended_info: show additional info such as md5, binary, size etc |
|
528 | :param extended_info: show additional info such as md5, binary, size etc | |
529 | :param content: add nodes content to the return data |
|
529 | :param content: add nodes content to the return data | |
530 | :param max_file_bytes: will not return file contents over this limit |
|
530 | :param max_file_bytes: will not return file contents over this limit | |
531 |
|
531 | |||
532 | """ |
|
532 | """ | |
533 | _files = list() |
|
533 | _files = list() | |
534 | _dirs = list() |
|
534 | _dirs = list() | |
535 | try: |
|
535 | try: | |
536 | _repo = self._get_repo(repo_name) |
|
536 | _repo = self._get_repo(repo_name) | |
537 | commit = _repo.scm_instance().get_commit(commit_id=commit_id) |
|
537 | commit = _repo.scm_instance().get_commit(commit_id=commit_id) | |
538 | root_path = root_path.lstrip('/') |
|
538 | root_path = root_path.lstrip('/') | |
539 | for __, dirs, files in commit.walk(root_path): |
|
539 | for __, dirs, files in commit.walk(root_path): | |
540 |
|
540 | |||
541 | for f in files: |
|
541 | for f in files: | |
542 | _content = None |
|
542 | _content = None | |
543 | _data = f_name = f.unicode_path |
|
543 | _data = f_name = f.unicode_path | |
544 |
|
544 | |||
545 | if not flat: |
|
545 | if not flat: | |
546 | _data = { |
|
546 | _data = { | |
547 | "name": h.escape(f_name), |
|
547 | "name": h.escape(f_name), | |
548 | "type": "file", |
|
548 | "type": "file", | |
549 | } |
|
549 | } | |
550 | if extended_info: |
|
550 | if extended_info: | |
551 | _data.update({ |
|
551 | _data.update({ | |
552 | "md5": f.md5, |
|
552 | "md5": f.md5, | |
553 | "binary": f.is_binary, |
|
553 | "binary": f.is_binary, | |
554 | "size": f.size, |
|
554 | "size": f.size, | |
555 | "extension": f.extension, |
|
555 | "extension": f.extension, | |
556 | "mimetype": f.mimetype, |
|
556 | "mimetype": f.mimetype, | |
557 | "lines": f.lines()[0] |
|
557 | "lines": f.lines()[0] | |
558 | }) |
|
558 | }) | |
559 |
|
559 | |||
560 | if content: |
|
560 | if content: | |
561 | over_size_limit = (max_file_bytes is not None |
|
561 | over_size_limit = (max_file_bytes is not None | |
562 | and f.size > max_file_bytes) |
|
562 | and f.size > max_file_bytes) | |
563 | full_content = None |
|
563 | full_content = None | |
564 | if not f.is_binary and not over_size_limit: |
|
564 | if not f.is_binary and not over_size_limit: | |
565 | full_content = safe_str(f.content) |
|
565 | full_content = safe_str(f.content) | |
566 |
|
566 | |||
567 | _data.update({ |
|
567 | _data.update({ | |
568 | "content": full_content, |
|
568 | "content": full_content, | |
569 | }) |
|
569 | }) | |
570 | _files.append(_data) |
|
570 | _files.append(_data) | |
571 |
|
571 | |||
572 | for d in dirs: |
|
572 | for d in dirs: | |
573 | _data = d_name = d.unicode_path |
|
573 | _data = d_name = d.unicode_path | |
574 | if not flat: |
|
574 | if not flat: | |
575 | _data = { |
|
575 | _data = { | |
576 | "name": h.escape(d_name), |
|
576 | "name": h.escape(d_name), | |
577 | "type": "dir", |
|
577 | "type": "dir", | |
578 | } |
|
578 | } | |
579 | if extended_info: |
|
579 | if extended_info: | |
580 | _data.update({ |
|
580 | _data.update({ | |
581 | "md5": None, |
|
581 | "md5": None, | |
582 | "binary": None, |
|
582 | "binary": None, | |
583 | "size": None, |
|
583 | "size": None, | |
584 | "extension": None, |
|
584 | "extension": None, | |
585 | }) |
|
585 | }) | |
586 | if content: |
|
586 | if content: | |
587 | _data.update({ |
|
587 | _data.update({ | |
588 | "content": None |
|
588 | "content": None | |
589 | }) |
|
589 | }) | |
590 | _dirs.append(_data) |
|
590 | _dirs.append(_data) | |
591 | except RepositoryError: |
|
591 | except RepositoryError: | |
592 | log.exception("Exception in get_nodes") |
|
592 | log.exception("Exception in get_nodes") | |
593 | raise |
|
593 | raise | |
594 |
|
594 | |||
595 | return _dirs, _files |
|
595 | return _dirs, _files | |
596 |
|
596 | |||
597 | def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'): |
|
597 | def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'): | |
598 | """ |
|
598 | """ | |
599 | Generate files for quick filter in files view |
|
599 | Generate files for quick filter in files view | |
600 | """ |
|
600 | """ | |
601 |
|
601 | |||
602 | _files = list() |
|
602 | _files = list() | |
603 | _dirs = list() |
|
603 | _dirs = list() | |
604 | try: |
|
604 | try: | |
605 | _repo = self._get_repo(repo_name) |
|
605 | _repo = self._get_repo(repo_name) | |
606 | commit = _repo.scm_instance().get_commit(commit_id=commit_id) |
|
606 | commit = _repo.scm_instance().get_commit(commit_id=commit_id) | |
607 | root_path = root_path.lstrip('/') |
|
607 | root_path = root_path.lstrip('/') | |
608 | for __, dirs, files in commit.walk(root_path): |
|
608 | for __, dirs, files in commit.walk(root_path): | |
609 |
|
609 | |||
610 | for f in files: |
|
610 | for f in files: | |
611 |
|
611 | |||
612 | _data = { |
|
612 | _data = { | |
613 | "name": h.escape(f.unicode_path), |
|
613 | "name": h.escape(f.unicode_path), | |
614 | "type": "file", |
|
614 | "type": "file", | |
615 | } |
|
615 | } | |
616 |
|
616 | |||
617 | _files.append(_data) |
|
617 | _files.append(_data) | |
618 |
|
618 | |||
619 | for d in dirs: |
|
619 | for d in dirs: | |
620 |
|
620 | |||
621 | _data = { |
|
621 | _data = { | |
622 | "name": h.escape(d.unicode_path), |
|
622 | "name": h.escape(d.unicode_path), | |
623 | "type": "dir", |
|
623 | "type": "dir", | |
624 | } |
|
624 | } | |
625 |
|
625 | |||
626 | _dirs.append(_data) |
|
626 | _dirs.append(_data) | |
627 | except RepositoryError: |
|
627 | except RepositoryError: | |
628 | log.exception("Exception in get_quick_filter_nodes") |
|
628 | log.exception("Exception in get_quick_filter_nodes") | |
629 | raise |
|
629 | raise | |
630 |
|
630 | |||
631 | return _dirs, _files |
|
631 | return _dirs, _files | |
632 |
|
632 | |||
633 | def get_node(self, repo_name, commit_id, file_path, |
|
633 | def get_node(self, repo_name, commit_id, file_path, | |
634 | extended_info=False, content=False, max_file_bytes=None, cache=True): |
|
634 | extended_info=False, content=False, max_file_bytes=None, cache=True): | |
635 | """ |
|
635 | """ | |
636 | retrieve single node from commit |
|
636 | retrieve single node from commit | |
637 | """ |
|
637 | """ | |
638 | try: |
|
638 | try: | |
639 |
|
639 | |||
640 | _repo = self._get_repo(repo_name) |
|
640 | _repo = self._get_repo(repo_name) | |
641 | commit = _repo.scm_instance().get_commit(commit_id=commit_id) |
|
641 | commit = _repo.scm_instance().get_commit(commit_id=commit_id) | |
642 |
|
642 | |||
643 | file_node = commit.get_node(file_path) |
|
643 | file_node = commit.get_node(file_path) | |
644 | if file_node.is_dir(): |
|
644 | if file_node.is_dir(): | |
645 | raise RepositoryError('The given path is a directory') |
|
645 | raise RepositoryError('The given path is a directory') | |
646 |
|
646 | |||
647 | _content = None |
|
647 | _content = None | |
648 | f_name = file_node.unicode_path |
|
648 | f_name = file_node.unicode_path | |
649 |
|
649 | |||
650 | file_data = { |
|
650 | file_data = { | |
651 | "name": h.escape(f_name), |
|
651 | "name": h.escape(f_name), | |
652 | "type": "file", |
|
652 | "type": "file", | |
653 | } |
|
653 | } | |
654 |
|
654 | |||
655 | if extended_info: |
|
655 | if extended_info: | |
656 | file_data.update({ |
|
656 | file_data.update({ | |
657 | "extension": file_node.extension, |
|
657 | "extension": file_node.extension, | |
658 | "mimetype": file_node.mimetype, |
|
658 | "mimetype": file_node.mimetype, | |
659 | }) |
|
659 | }) | |
660 |
|
660 | |||
661 | if cache: |
|
661 | if cache: | |
662 | md5 = file_node.md5 |
|
662 | md5 = file_node.md5 | |
663 | is_binary = file_node.is_binary |
|
663 | is_binary = file_node.is_binary | |
664 | size = file_node.size |
|
664 | size = file_node.size | |
665 | else: |
|
665 | else: | |
666 | is_binary, md5, size, _content = file_node.metadata_uncached() |
|
666 | is_binary, md5, size, _content = file_node.metadata_uncached() | |
667 |
|
667 | |||
668 | file_data.update({ |
|
668 | file_data.update({ | |
669 | "md5": md5, |
|
669 | "md5": md5, | |
670 | "binary": is_binary, |
|
670 | "binary": is_binary, | |
671 | "size": size, |
|
671 | "size": size, | |
672 | }) |
|
672 | }) | |
673 |
|
673 | |||
674 | if content and cache: |
|
674 | if content and cache: | |
675 | # get content + cache |
|
675 | # get content + cache | |
676 | size = file_node.size |
|
676 | size = file_node.size | |
677 | over_size_limit = (max_file_bytes is not None and size > max_file_bytes) |
|
677 | over_size_limit = (max_file_bytes is not None and size > max_file_bytes) | |
678 | full_content = None |
|
678 | full_content = None | |
679 | all_lines = 0 |
|
679 | all_lines = 0 | |
680 | if not file_node.is_binary and not over_size_limit: |
|
680 | if not file_node.is_binary and not over_size_limit: | |
681 | full_content = safe_unicode(file_node.content) |
|
681 | full_content = safe_unicode(file_node.content) | |
682 | all_lines, empty_lines = file_node.count_lines(full_content) |
|
682 | all_lines, empty_lines = file_node.count_lines(full_content) | |
683 |
|
683 | |||
684 | file_data.update({ |
|
684 | file_data.update({ | |
685 | "content": full_content, |
|
685 | "content": full_content, | |
686 | "lines": all_lines |
|
686 | "lines": all_lines | |
687 | }) |
|
687 | }) | |
688 | elif content: |
|
688 | elif content: | |
689 | # get content *without* cache |
|
689 | # get content *without* cache | |
690 | if _content is None: |
|
690 | if _content is None: | |
691 | is_binary, md5, size, _content = file_node.metadata_uncached() |
|
691 | is_binary, md5, size, _content = file_node.metadata_uncached() | |
692 |
|
692 | |||
693 | over_size_limit = (max_file_bytes is not None and size > max_file_bytes) |
|
693 | over_size_limit = (max_file_bytes is not None and size > max_file_bytes) | |
694 | full_content = None |
|
694 | full_content = None | |
695 | all_lines = 0 |
|
695 | all_lines = 0 | |
696 | if not is_binary and not over_size_limit: |
|
696 | if not is_binary and not over_size_limit: | |
697 | full_content = safe_unicode(_content) |
|
697 | full_content = safe_unicode(_content) | |
698 | all_lines, empty_lines = file_node.count_lines(full_content) |
|
698 | all_lines, empty_lines = file_node.count_lines(full_content) | |
699 |
|
699 | |||
700 | file_data.update({ |
|
700 | file_data.update({ | |
701 | "content": full_content, |
|
701 | "content": full_content, | |
702 | "lines": all_lines |
|
702 | "lines": all_lines | |
703 | }) |
|
703 | }) | |
704 |
|
704 | |||
705 | except RepositoryError: |
|
705 | except RepositoryError: | |
706 | log.exception("Exception in get_node") |
|
706 | log.exception("Exception in get_node") | |
707 | raise |
|
707 | raise | |
708 |
|
708 | |||
709 | return file_data |
|
709 | return file_data | |
710 |
|
710 | |||
711 | def get_fts_data(self, repo_name, commit_id, root_path='/'): |
|
711 | def get_fts_data(self, repo_name, commit_id, root_path='/'): | |
712 | """ |
|
712 | """ | |
713 | Fetch node tree for usage in full text search |
|
713 | Fetch node tree for usage in full text search | |
714 | """ |
|
714 | """ | |
715 |
|
715 | |||
716 | tree_info = list() |
|
716 | tree_info = list() | |
717 |
|
717 | |||
718 | try: |
|
718 | try: | |
719 | _repo = self._get_repo(repo_name) |
|
719 | _repo = self._get_repo(repo_name) | |
720 | commit = _repo.scm_instance().get_commit(commit_id=commit_id) |
|
720 | commit = _repo.scm_instance().get_commit(commit_id=commit_id) | |
721 | root_path = root_path.lstrip('/') |
|
721 | root_path = root_path.lstrip('/') | |
722 | for __, dirs, files in commit.walk(root_path): |
|
722 | for __, dirs, files in commit.walk(root_path): | |
723 |
|
723 | |||
724 | for f in files: |
|
724 | for f in files: | |
725 | is_binary, md5, size, _content = f.metadata_uncached() |
|
725 | is_binary, md5, size, _content = f.metadata_uncached() | |
726 | _data = { |
|
726 | _data = { | |
727 | "name": f.unicode_path, |
|
727 | "name": f.unicode_path, | |
728 | "md5": md5, |
|
728 | "md5": md5, | |
729 | "extension": f.extension, |
|
729 | "extension": f.extension, | |
730 | "binary": is_binary, |
|
730 | "binary": is_binary, | |
731 | "size": size |
|
731 | "size": size | |
732 | } |
|
732 | } | |
733 |
|
733 | |||
734 | tree_info.append(_data) |
|
734 | tree_info.append(_data) | |
735 |
|
735 | |||
736 | except RepositoryError: |
|
736 | except RepositoryError: | |
737 | log.exception("Exception in get_nodes") |
|
737 | log.exception("Exception in get_nodes") | |
738 | raise |
|
738 | raise | |
739 |
|
739 | |||
740 | return tree_info |
|
740 | return tree_info | |
741 |
|
741 | |||
742 | def create_nodes(self, user, repo, message, nodes, parent_commit=None, |
|
742 | def create_nodes(self, user, repo, message, nodes, parent_commit=None, | |
743 | author=None, trigger_push_hook=True): |
|
743 | author=None, trigger_push_hook=True): | |
744 | """ |
|
744 | """ | |
745 | Commits given multiple nodes into repo |
|
745 | Commits given multiple nodes into repo | |
746 |
|
746 | |||
747 | :param user: RhodeCode User object or user_id, the commiter |
|
747 | :param user: RhodeCode User object or user_id, the commiter | |
748 | :param repo: RhodeCode Repository object |
|
748 | :param repo: RhodeCode Repository object | |
749 | :param message: commit message |
|
749 | :param message: commit message | |
750 | :param nodes: mapping {filename:{'content':content},...} |
|
750 | :param nodes: mapping {filename:{'content':content},...} | |
751 | :param parent_commit: parent commit, can be empty than it's |
|
751 | :param parent_commit: parent commit, can be empty than it's | |
752 | initial commit |
|
752 | initial commit | |
753 | :param author: author of commit, cna be different that commiter |
|
753 | :param author: author of commit, cna be different that commiter | |
754 | only for git |
|
754 | only for git | |
755 | :param trigger_push_hook: trigger push hooks |
|
755 | :param trigger_push_hook: trigger push hooks | |
756 |
|
756 | |||
757 | :returns: new commited commit |
|
757 | :returns: new committed commit | |
758 | """ |
|
758 | """ | |
759 |
|
759 | |||
760 | user = self._get_user(user) |
|
760 | user = self._get_user(user) | |
761 | scm_instance = repo.scm_instance(cache=False) |
|
761 | scm_instance = repo.scm_instance(cache=False) | |
762 |
|
762 | |||
763 | processed_nodes = [] |
|
763 | processed_nodes = [] | |
764 | for f_path in nodes: |
|
764 | for f_path in nodes: | |
765 | f_path = self._sanitize_path(f_path) |
|
765 | f_path = self._sanitize_path(f_path) | |
766 | content = nodes[f_path]['content'] |
|
766 | content = nodes[f_path]['content'] | |
767 | f_path = safe_str(f_path) |
|
767 | f_path = safe_str(f_path) | |
768 | # decoding here will force that we have proper encoded values |
|
768 | # decoding here will force that we have proper encoded values | |
769 | # in any other case this will throw exceptions and deny commit |
|
769 | # in any other case this will throw exceptions and deny commit | |
770 | if isinstance(content, (basestring,)): |
|
770 | if isinstance(content, (basestring,)): | |
771 | content = safe_str(content) |
|
771 | content = safe_str(content) | |
772 | elif isinstance(content, (file, cStringIO.OutputType,)): |
|
772 | elif isinstance(content, (file, cStringIO.OutputType,)): | |
773 | content = content.read() |
|
773 | content = content.read() | |
774 | else: |
|
774 | else: | |
775 | raise Exception('Content is of unrecognized type %s' % ( |
|
775 | raise Exception('Content is of unrecognized type %s' % ( | |
776 | type(content) |
|
776 | type(content) | |
777 | )) |
|
777 | )) | |
778 | processed_nodes.append((f_path, content)) |
|
778 | processed_nodes.append((f_path, content)) | |
779 |
|
779 | |||
780 | message = safe_unicode(message) |
|
780 | message = safe_unicode(message) | |
781 | commiter = user.full_contact |
|
781 | commiter = user.full_contact | |
782 | author = safe_unicode(author) if author else commiter |
|
782 | author = safe_unicode(author) if author else commiter | |
783 |
|
783 | |||
784 | imc = scm_instance.in_memory_commit |
|
784 | imc = scm_instance.in_memory_commit | |
785 |
|
785 | |||
786 | if not parent_commit: |
|
786 | if not parent_commit: | |
787 | parent_commit = EmptyCommit(alias=scm_instance.alias) |
|
787 | parent_commit = EmptyCommit(alias=scm_instance.alias) | |
788 |
|
788 | |||
789 | if isinstance(parent_commit, EmptyCommit): |
|
789 | if isinstance(parent_commit, EmptyCommit): | |
790 | # EmptyCommit means we we're editing empty repository |
|
790 | # EmptyCommit means we we're editing empty repository | |
791 | parents = None |
|
791 | parents = None | |
792 | else: |
|
792 | else: | |
793 | parents = [parent_commit] |
|
793 | parents = [parent_commit] | |
794 | # add multiple nodes |
|
794 | # add multiple nodes | |
795 | for path, content in processed_nodes: |
|
795 | for path, content in processed_nodes: | |
796 | imc.add(FileNode(path, content=content)) |
|
796 | imc.add(FileNode(path, content=content)) | |
797 | # TODO: handle pre push scenario |
|
797 | # TODO: handle pre push scenario | |
798 | tip = imc.commit(message=message, |
|
798 | tip = imc.commit(message=message, | |
799 | author=author, |
|
799 | author=author, | |
800 | parents=parents, |
|
800 | parents=parents, | |
801 | branch=parent_commit.branch) |
|
801 | branch=parent_commit.branch) | |
802 |
|
802 | |||
803 | self.mark_for_invalidation(repo.repo_name) |
|
803 | self.mark_for_invalidation(repo.repo_name) | |
804 | if trigger_push_hook: |
|
804 | if trigger_push_hook: | |
805 | hooks_utils.trigger_post_push_hook( |
|
805 | hooks_utils.trigger_post_push_hook( | |
806 | username=user.username, action='push_local', |
|
806 | username=user.username, action='push_local', | |
807 | repo_name=repo.repo_name, repo_type=scm_instance.alias, |
|
807 | repo_name=repo.repo_name, repo_type=scm_instance.alias, | |
808 | hook_type='post_push', |
|
808 | hook_type='post_push', | |
809 | commit_ids=[tip.raw_id]) |
|
809 | commit_ids=[tip.raw_id]) | |
810 | return tip |
|
810 | return tip | |
811 |
|
811 | |||
812 | def update_nodes(self, user, repo, message, nodes, parent_commit=None, |
|
812 | def update_nodes(self, user, repo, message, nodes, parent_commit=None, | |
813 | author=None, trigger_push_hook=True): |
|
813 | author=None, trigger_push_hook=True): | |
814 | user = self._get_user(user) |
|
814 | user = self._get_user(user) | |
815 | scm_instance = repo.scm_instance(cache=False) |
|
815 | scm_instance = repo.scm_instance(cache=False) | |
816 |
|
816 | |||
817 | message = safe_unicode(message) |
|
817 | message = safe_unicode(message) | |
818 | commiter = user.full_contact |
|
818 | commiter = user.full_contact | |
819 | author = safe_unicode(author) if author else commiter |
|
819 | author = safe_unicode(author) if author else commiter | |
820 |
|
820 | |||
821 | imc = scm_instance.in_memory_commit |
|
821 | imc = scm_instance.in_memory_commit | |
822 |
|
822 | |||
823 | if not parent_commit: |
|
823 | if not parent_commit: | |
824 | parent_commit = EmptyCommit(alias=scm_instance.alias) |
|
824 | parent_commit = EmptyCommit(alias=scm_instance.alias) | |
825 |
|
825 | |||
826 | if isinstance(parent_commit, EmptyCommit): |
|
826 | if isinstance(parent_commit, EmptyCommit): | |
827 | # EmptyCommit means we we're editing empty repository |
|
827 | # EmptyCommit means we we're editing empty repository | |
828 | parents = None |
|
828 | parents = None | |
829 | else: |
|
829 | else: | |
830 | parents = [parent_commit] |
|
830 | parents = [parent_commit] | |
831 |
|
831 | |||
832 | # add multiple nodes |
|
832 | # add multiple nodes | |
833 | for _filename, data in nodes.items(): |
|
833 | for _filename, data in nodes.items(): | |
834 | # new filename, can be renamed from the old one, also sanitaze |
|
834 | # new filename, can be renamed from the old one, also sanitaze | |
835 | # the path for any hack around relative paths like ../../ etc. |
|
835 | # the path for any hack around relative paths like ../../ etc. | |
836 | filename = self._sanitize_path(data['filename']) |
|
836 | filename = self._sanitize_path(data['filename']) | |
837 | old_filename = self._sanitize_path(_filename) |
|
837 | old_filename = self._sanitize_path(_filename) | |
838 | content = data['content'] |
|
838 | content = data['content'] | |
839 | file_mode = data.get('mode') |
|
839 | file_mode = data.get('mode') | |
840 | filenode = FileNode(old_filename, content=content, mode=file_mode) |
|
840 | filenode = FileNode(old_filename, content=content, mode=file_mode) | |
841 | op = data['op'] |
|
841 | op = data['op'] | |
842 | if op == 'add': |
|
842 | if op == 'add': | |
843 | imc.add(filenode) |
|
843 | imc.add(filenode) | |
844 | elif op == 'del': |
|
844 | elif op == 'del': | |
845 | imc.remove(filenode) |
|
845 | imc.remove(filenode) | |
846 | elif op == 'mod': |
|
846 | elif op == 'mod': | |
847 | if filename != old_filename: |
|
847 | if filename != old_filename: | |
848 | # TODO: handle renames more efficient, needs vcs lib changes |
|
848 | # TODO: handle renames more efficient, needs vcs lib changes | |
849 | imc.remove(filenode) |
|
849 | imc.remove(filenode) | |
850 | imc.add(FileNode(filename, content=content, mode=file_mode)) |
|
850 | imc.add(FileNode(filename, content=content, mode=file_mode)) | |
851 | else: |
|
851 | else: | |
852 | imc.change(filenode) |
|
852 | imc.change(filenode) | |
853 |
|
853 | |||
854 | try: |
|
854 | try: | |
855 | # TODO: handle pre push scenario commit changes |
|
855 | # TODO: handle pre push scenario commit changes | |
856 | tip = imc.commit(message=message, |
|
856 | tip = imc.commit(message=message, | |
857 | author=author, |
|
857 | author=author, | |
858 | parents=parents, |
|
858 | parents=parents, | |
859 | branch=parent_commit.branch) |
|
859 | branch=parent_commit.branch) | |
860 | except NodeNotChangedError: |
|
860 | except NodeNotChangedError: | |
861 | raise |
|
861 | raise | |
862 | except Exception as e: |
|
862 | except Exception as e: | |
863 | log.exception("Unexpected exception during call to imc.commit") |
|
863 | log.exception("Unexpected exception during call to imc.commit") | |
864 | raise IMCCommitError(str(e)) |
|
864 | raise IMCCommitError(str(e)) | |
865 | finally: |
|
865 | finally: | |
866 | # always clear caches, if commit fails we want fresh object also |
|
866 | # always clear caches, if commit fails we want fresh object also | |
867 | self.mark_for_invalidation(repo.repo_name) |
|
867 | self.mark_for_invalidation(repo.repo_name) | |
868 |
|
868 | |||
869 | if trigger_push_hook: |
|
869 | if trigger_push_hook: | |
870 | hooks_utils.trigger_post_push_hook( |
|
870 | hooks_utils.trigger_post_push_hook( | |
871 | username=user.username, action='push_local', hook_type='post_push', |
|
871 | username=user.username, action='push_local', hook_type='post_push', | |
872 | repo_name=repo.repo_name, repo_type=scm_instance.alias, |
|
872 | repo_name=repo.repo_name, repo_type=scm_instance.alias, | |
873 | commit_ids=[tip.raw_id]) |
|
873 | commit_ids=[tip.raw_id]) | |
874 |
|
874 | |||
875 | return tip |
|
875 | return tip | |
876 |
|
876 | |||
877 | def delete_nodes(self, user, repo, message, nodes, parent_commit=None, |
|
877 | def delete_nodes(self, user, repo, message, nodes, parent_commit=None, | |
878 | author=None, trigger_push_hook=True): |
|
878 | author=None, trigger_push_hook=True): | |
879 | """ |
|
879 | """ | |
880 | Deletes given multiple nodes into `repo` |
|
880 | Deletes given multiple nodes into `repo` | |
881 |
|
881 | |||
882 | :param user: RhodeCode User object or user_id, the committer |
|
882 | :param user: RhodeCode User object or user_id, the committer | |
883 | :param repo: RhodeCode Repository object |
|
883 | :param repo: RhodeCode Repository object | |
884 | :param message: commit message |
|
884 | :param message: commit message | |
885 | :param nodes: mapping {filename:{'content':content},...} |
|
885 | :param nodes: mapping {filename:{'content':content},...} | |
886 | :param parent_commit: parent commit, can be empty than it's initial |
|
886 | :param parent_commit: parent commit, can be empty than it's initial | |
887 | commit |
|
887 | commit | |
888 | :param author: author of commit, cna be different that commiter only |
|
888 | :param author: author of commit, cna be different that commiter only | |
889 | for git |
|
889 | for git | |
890 | :param trigger_push_hook: trigger push hooks |
|
890 | :param trigger_push_hook: trigger push hooks | |
891 |
|
891 | |||
892 | :returns: new commit after deletion |
|
892 | :returns: new commit after deletion | |
893 | """ |
|
893 | """ | |
894 |
|
894 | |||
895 | user = self._get_user(user) |
|
895 | user = self._get_user(user) | |
896 | scm_instance = repo.scm_instance(cache=False) |
|
896 | scm_instance = repo.scm_instance(cache=False) | |
897 |
|
897 | |||
898 | processed_nodes = [] |
|
898 | processed_nodes = [] | |
899 | for f_path in nodes: |
|
899 | for f_path in nodes: | |
900 | f_path = self._sanitize_path(f_path) |
|
900 | f_path = self._sanitize_path(f_path) | |
901 | # content can be empty but for compatabilty it allows same dicts |
|
901 | # content can be empty but for compatabilty it allows same dicts | |
902 | # structure as add_nodes |
|
902 | # structure as add_nodes | |
903 | content = nodes[f_path].get('content') |
|
903 | content = nodes[f_path].get('content') | |
904 | processed_nodes.append((f_path, content)) |
|
904 | processed_nodes.append((f_path, content)) | |
905 |
|
905 | |||
906 | message = safe_unicode(message) |
|
906 | message = safe_unicode(message) | |
907 | commiter = user.full_contact |
|
907 | commiter = user.full_contact | |
908 | author = safe_unicode(author) if author else commiter |
|
908 | author = safe_unicode(author) if author else commiter | |
909 |
|
909 | |||
910 | imc = scm_instance.in_memory_commit |
|
910 | imc = scm_instance.in_memory_commit | |
911 |
|
911 | |||
912 | if not parent_commit: |
|
912 | if not parent_commit: | |
913 | parent_commit = EmptyCommit(alias=scm_instance.alias) |
|
913 | parent_commit = EmptyCommit(alias=scm_instance.alias) | |
914 |
|
914 | |||
915 | if isinstance(parent_commit, EmptyCommit): |
|
915 | if isinstance(parent_commit, EmptyCommit): | |
916 | # EmptyCommit means we we're editing empty repository |
|
916 | # EmptyCommit means we we're editing empty repository | |
917 | parents = None |
|
917 | parents = None | |
918 | else: |
|
918 | else: | |
919 | parents = [parent_commit] |
|
919 | parents = [parent_commit] | |
920 | # add multiple nodes |
|
920 | # add multiple nodes | |
921 | for path, content in processed_nodes: |
|
921 | for path, content in processed_nodes: | |
922 | imc.remove(FileNode(path, content=content)) |
|
922 | imc.remove(FileNode(path, content=content)) | |
923 |
|
923 | |||
924 | # TODO: handle pre push scenario |
|
924 | # TODO: handle pre push scenario | |
925 | tip = imc.commit(message=message, |
|
925 | tip = imc.commit(message=message, | |
926 | author=author, |
|
926 | author=author, | |
927 | parents=parents, |
|
927 | parents=parents, | |
928 | branch=parent_commit.branch) |
|
928 | branch=parent_commit.branch) | |
929 |
|
929 | |||
930 | self.mark_for_invalidation(repo.repo_name) |
|
930 | self.mark_for_invalidation(repo.repo_name) | |
931 | if trigger_push_hook: |
|
931 | if trigger_push_hook: | |
932 | hooks_utils.trigger_post_push_hook( |
|
932 | hooks_utils.trigger_post_push_hook( | |
933 | username=user.username, action='push_local', hook_type='post_push', |
|
933 | username=user.username, action='push_local', hook_type='post_push', | |
934 | repo_name=repo.repo_name, repo_type=scm_instance.alias, |
|
934 | repo_name=repo.repo_name, repo_type=scm_instance.alias, | |
935 | commit_ids=[tip.raw_id]) |
|
935 | commit_ids=[tip.raw_id]) | |
936 | return tip |
|
936 | return tip | |
937 |
|
937 | |||
938 | def strip(self, repo, commit_id, branch): |
|
938 | def strip(self, repo, commit_id, branch): | |
939 | scm_instance = repo.scm_instance(cache=False) |
|
939 | scm_instance = repo.scm_instance(cache=False) | |
940 | scm_instance.config.clear_section('hooks') |
|
940 | scm_instance.config.clear_section('hooks') | |
941 | scm_instance.strip(commit_id, branch) |
|
941 | scm_instance.strip(commit_id, branch) | |
942 | self.mark_for_invalidation(repo.repo_name) |
|
942 | self.mark_for_invalidation(repo.repo_name) | |
943 |
|
943 | |||
944 | def get_unread_journal(self): |
|
944 | def get_unread_journal(self): | |
945 | return self.sa.query(UserLog).count() |
|
945 | return self.sa.query(UserLog).count() | |
946 |
|
946 | |||
947 | @classmethod |
|
947 | @classmethod | |
948 | def backend_landing_ref(cls, repo_type): |
|
948 | def backend_landing_ref(cls, repo_type): | |
949 | """ |
|
949 | """ | |
950 | Return a default landing ref based on a repository type. |
|
950 | Return a default landing ref based on a repository type. | |
951 | """ |
|
951 | """ | |
952 |
|
952 | |||
953 | landing_ref = { |
|
953 | landing_ref = { | |
954 | 'hg': ('branch:default', 'default'), |
|
954 | 'hg': ('branch:default', 'default'), | |
955 | 'git': ('branch:master', 'master'), |
|
955 | 'git': ('branch:master', 'master'), | |
956 | 'svn': ('rev:tip', 'latest tip'), |
|
956 | 'svn': ('rev:tip', 'latest tip'), | |
957 | 'default': ('rev:tip', 'latest tip'), |
|
957 | 'default': ('rev:tip', 'latest tip'), | |
958 | } |
|
958 | } | |
959 |
|
959 | |||
960 | return landing_ref.get(repo_type) or landing_ref['default'] |
|
960 | return landing_ref.get(repo_type) or landing_ref['default'] | |
961 |
|
961 | |||
962 | def get_repo_landing_revs(self, translator, repo=None): |
|
962 | def get_repo_landing_revs(self, translator, repo=None): | |
963 | """ |
|
963 | """ | |
964 | Generates select option with tags branches and bookmarks (for hg only) |
|
964 | Generates select option with tags branches and bookmarks (for hg only) | |
965 | grouped by type |
|
965 | grouped by type | |
966 |
|
966 | |||
967 | :param repo: |
|
967 | :param repo: | |
968 | """ |
|
968 | """ | |
969 | from rhodecode.lib.vcs.backends.git import GitRepository |
|
969 | from rhodecode.lib.vcs.backends.git import GitRepository | |
970 |
|
970 | |||
971 | _ = translator |
|
971 | _ = translator | |
972 | repo = self._get_repo(repo) |
|
972 | repo = self._get_repo(repo) | |
973 |
|
973 | |||
974 | if repo: |
|
974 | if repo: | |
975 | repo_type = repo.repo_type |
|
975 | repo_type = repo.repo_type | |
976 | else: |
|
976 | else: | |
977 | repo_type = 'default' |
|
977 | repo_type = 'default' | |
978 |
|
978 | |||
979 | default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type) |
|
979 | default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type) | |
980 |
|
980 | |||
981 | default_ref_options = [ |
|
981 | default_ref_options = [ | |
982 | [default_landing_ref, landing_ref_lbl] |
|
982 | [default_landing_ref, landing_ref_lbl] | |
983 | ] |
|
983 | ] | |
984 | default_choices = [ |
|
984 | default_choices = [ | |
985 | default_landing_ref |
|
985 | default_landing_ref | |
986 | ] |
|
986 | ] | |
987 |
|
987 | |||
988 | if not repo: |
|
988 | if not repo: | |
989 | # presented at NEW repo creation |
|
989 | # presented at NEW repo creation | |
990 | return default_choices, default_ref_options |
|
990 | return default_choices, default_ref_options | |
991 |
|
991 | |||
992 | repo = repo.scm_instance() |
|
992 | repo = repo.scm_instance() | |
993 |
|
993 | |||
994 | ref_options = [(default_landing_ref, landing_ref_lbl)] |
|
994 | ref_options = [(default_landing_ref, landing_ref_lbl)] | |
995 | choices = [default_landing_ref] |
|
995 | choices = [default_landing_ref] | |
996 |
|
996 | |||
997 | # branches |
|
997 | # branches | |
998 | branch_group = [(u'branch:%s' % safe_unicode(b), safe_unicode(b)) for b in repo.branches] |
|
998 | branch_group = [(u'branch:%s' % safe_unicode(b), safe_unicode(b)) for b in repo.branches] | |
999 | if not branch_group: |
|
999 | if not branch_group: | |
1000 | # new repo, or without maybe a branch? |
|
1000 | # new repo, or without maybe a branch? | |
1001 | branch_group = default_ref_options |
|
1001 | branch_group = default_ref_options | |
1002 |
|
1002 | |||
1003 | branches_group = (branch_group, _("Branches")) |
|
1003 | branches_group = (branch_group, _("Branches")) | |
1004 | ref_options.append(branches_group) |
|
1004 | ref_options.append(branches_group) | |
1005 | choices.extend([x[0] for x in branches_group[0]]) |
|
1005 | choices.extend([x[0] for x in branches_group[0]]) | |
1006 |
|
1006 | |||
1007 | # bookmarks for HG |
|
1007 | # bookmarks for HG | |
1008 | if repo.alias == 'hg': |
|
1008 | if repo.alias == 'hg': | |
1009 | bookmarks_group = ( |
|
1009 | bookmarks_group = ( | |
1010 | [(u'book:%s' % safe_unicode(b), safe_unicode(b)) |
|
1010 | [(u'book:%s' % safe_unicode(b), safe_unicode(b)) | |
1011 | for b in repo.bookmarks], |
|
1011 | for b in repo.bookmarks], | |
1012 | _("Bookmarks")) |
|
1012 | _("Bookmarks")) | |
1013 | ref_options.append(bookmarks_group) |
|
1013 | ref_options.append(bookmarks_group) | |
1014 | choices.extend([x[0] for x in bookmarks_group[0]]) |
|
1014 | choices.extend([x[0] for x in bookmarks_group[0]]) | |
1015 |
|
1015 | |||
1016 | # tags |
|
1016 | # tags | |
1017 | tags_group = ( |
|
1017 | tags_group = ( | |
1018 | [(u'tag:%s' % safe_unicode(t), safe_unicode(t)) |
|
1018 | [(u'tag:%s' % safe_unicode(t), safe_unicode(t)) | |
1019 | for t in repo.tags], |
|
1019 | for t in repo.tags], | |
1020 | _("Tags")) |
|
1020 | _("Tags")) | |
1021 | ref_options.append(tags_group) |
|
1021 | ref_options.append(tags_group) | |
1022 | choices.extend([x[0] for x in tags_group[0]]) |
|
1022 | choices.extend([x[0] for x in tags_group[0]]) | |
1023 |
|
1023 | |||
1024 | return choices, ref_options |
|
1024 | return choices, ref_options | |
1025 |
|
1025 | |||
1026 | def get_server_info(self, environ=None): |
|
1026 | def get_server_info(self, environ=None): | |
1027 | server_info = get_system_info(environ) |
|
1027 | server_info = get_system_info(environ) | |
1028 | return server_info |
|
1028 | return server_info |
@@ -1,38 +1,38 b'' | |||||
1 | ## -*- coding: utf-8 -*- |
|
1 | ## -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | ${_('%(user)s commited on %(date)s UTC') % { |
|
3 | ${_('%(user)s committed on %(date)s UTC') % { | |
4 | 'user': h.person(commit.author), |
|
4 | 'user': h.person(commit.author), | |
5 | 'date': h.format_date(commit.date) |
|
5 | 'date': h.format_date(commit.date) | |
6 | }} |
|
6 | }} | |
7 | <br/> |
|
7 | <br/> | |
8 | % if commit.branch: |
|
8 | % if commit.branch: | |
9 | branch: ${commit.branch} <br/> |
|
9 | branch: ${commit.branch} <br/> | |
10 | % endif |
|
10 | % endif | |
11 |
|
11 | |||
12 | % for bookmark in getattr(commit, 'bookmarks', []): |
|
12 | % for bookmark in getattr(commit, 'bookmarks', []): | |
13 | bookmark: ${bookmark} <br/> |
|
13 | bookmark: ${bookmark} <br/> | |
14 | % endfor |
|
14 | % endfor | |
15 |
|
15 | |||
16 | % for tag in commit.tags: |
|
16 | % for tag in commit.tags: | |
17 | tag: ${tag} <br/> |
|
17 | tag: ${tag} <br/> | |
18 | % endfor |
|
18 | % endfor | |
19 |
|
19 | |||
20 | % if has_hidden_changes: |
|
20 | % if has_hidden_changes: | |
21 | Has hidden changes<br/> |
|
21 | Has hidden changes<br/> | |
22 | % endif |
|
22 | % endif | |
23 |
|
23 | |||
24 | commit: <a href="${h.route_url('repo_commit', repo_name=c.rhodecode_db_repo.repo_name, commit_id=commit.raw_id)}">${h.show_id(commit)}</a> |
|
24 | commit: <a href="${h.route_url('repo_commit', repo_name=c.rhodecode_db_repo.repo_name, commit_id=commit.raw_id)}">${h.show_id(commit)}</a> | |
25 | <pre> |
|
25 | <pre> | |
26 | ${h.urlify_commit_message(commit.message)} |
|
26 | ${h.urlify_commit_message(commit.message)} | |
27 |
|
27 | |||
28 | % for change in parsed_diff: |
|
28 | % for change in parsed_diff: | |
29 | % if limited_diff: |
|
29 | % if limited_diff: | |
30 | ${_('Commit was too big and was cut off...')} |
|
30 | ${_('Commit was too big and was cut off...')} | |
31 | % endif |
|
31 | % endif | |
32 | ${change['operation']} ${change['filename']} ${'(%(added)s lines added, %(removed)s lines removed)' % {'added': change['stats']['added'], 'removed': change['stats']['deleted']}} |
|
32 | ${change['operation']} ${change['filename']} ${'(%(added)s lines added, %(removed)s lines removed)' % {'added': change['stats']['added'], 'removed': change['stats']['deleted']}} | |
33 | % endfor |
|
33 | % endfor | |
34 |
|
34 | |||
35 | % if feed_include_diff: |
|
35 | % if feed_include_diff: | |
36 | ${c.path_filter.get_raw_patch(diff_processor)} |
|
36 | ${c.path_filter.get_raw_patch(diff_processor)} | |
37 | % endif |
|
37 | % endif | |
38 | </pre> |
|
38 | </pre> |
@@ -1,1840 +1,1842 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import collections |
|
21 | import collections | |
22 | import datetime |
|
22 | import datetime | |
23 | import hashlib |
|
23 | import hashlib | |
24 | import os |
|
24 | import os | |
25 | import re |
|
25 | import re | |
26 | import pprint |
|
26 | import pprint | |
27 | import shutil |
|
27 | import shutil | |
28 | import socket |
|
28 | import socket | |
29 | import subprocess32 |
|
29 | import subprocess32 | |
30 | import time |
|
30 | import time | |
31 | import uuid |
|
31 | import uuid | |
32 | import dateutil.tz |
|
32 | import dateutil.tz | |
|
33 | import logging | |||
33 |
|
34 | |||
34 | import mock |
|
35 | import mock | |
35 | import pyramid.testing |
|
36 | import pyramid.testing | |
36 | import pytest |
|
37 | import pytest | |
37 | import colander |
|
38 | import colander | |
38 | import requests |
|
39 | import requests | |
39 | import pyramid.paster |
|
40 | import pyramid.paster | |
40 |
|
41 | |||
41 | import rhodecode |
|
42 | import rhodecode | |
42 | from rhodecode.lib.utils2 import AttributeDict |
|
43 | from rhodecode.lib.utils2 import AttributeDict | |
43 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
44 | from rhodecode.model.changeset_status import ChangesetStatusModel | |
44 | from rhodecode.model.comment import CommentsModel |
|
45 | from rhodecode.model.comment import CommentsModel | |
45 | from rhodecode.model.db import ( |
|
46 | from rhodecode.model.db import ( | |
46 | PullRequest, PullRequestReviewers, Repository, RhodeCodeSetting, ChangesetStatus, |
|
47 | PullRequest, PullRequestReviewers, Repository, RhodeCodeSetting, ChangesetStatus, | |
47 | RepoGroup, UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi) |
|
48 | RepoGroup, UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi) | |
48 | from rhodecode.model.meta import Session |
|
49 | from rhodecode.model.meta import Session | |
49 | from rhodecode.model.pull_request import PullRequestModel |
|
50 | from rhodecode.model.pull_request import PullRequestModel | |
50 | from rhodecode.model.repo import RepoModel |
|
51 | from rhodecode.model.repo import RepoModel | |
51 | from rhodecode.model.repo_group import RepoGroupModel |
|
52 | from rhodecode.model.repo_group import RepoGroupModel | |
52 | from rhodecode.model.user import UserModel |
|
53 | from rhodecode.model.user import UserModel | |
53 | from rhodecode.model.settings import VcsSettingsModel |
|
54 | from rhodecode.model.settings import VcsSettingsModel | |
54 | from rhodecode.model.user_group import UserGroupModel |
|
55 | from rhodecode.model.user_group import UserGroupModel | |
55 | from rhodecode.model.integration import IntegrationModel |
|
56 | from rhodecode.model.integration import IntegrationModel | |
56 | from rhodecode.integrations import integration_type_registry |
|
57 | from rhodecode.integrations import integration_type_registry | |
57 | from rhodecode.integrations.types.base import IntegrationTypeBase |
|
58 | from rhodecode.integrations.types.base import IntegrationTypeBase | |
58 | from rhodecode.lib.utils import repo2db_mapper |
|
59 | from rhodecode.lib.utils import repo2db_mapper | |
59 | from rhodecode.lib.vcs.backends import get_backend |
|
60 | from rhodecode.lib.vcs.backends import get_backend | |
60 | from rhodecode.lib.vcs.nodes import FileNode |
|
61 | from rhodecode.lib.vcs.nodes import FileNode | |
61 | from rhodecode.tests import ( |
|
62 | from rhodecode.tests import ( | |
62 | login_user_session, get_new_dir, utils, TESTS_TMP_PATH, |
|
63 | login_user_session, get_new_dir, utils, TESTS_TMP_PATH, | |
63 | TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN, |
|
64 | TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN, | |
64 | TEST_USER_REGULAR_PASS) |
|
65 | TEST_USER_REGULAR_PASS) | |
65 | from rhodecode.tests.utils import CustomTestApp, set_anonymous_access |
|
66 | from rhodecode.tests.utils import CustomTestApp, set_anonymous_access | |
66 | from rhodecode.tests.fixture import Fixture |
|
67 | from rhodecode.tests.fixture import Fixture | |
67 | from rhodecode.config import utils as config_utils |
|
68 | from rhodecode.config import utils as config_utils | |
68 |
|
69 | |||
|
70 | log = logging.getLogger(__name__) | |||
69 |
|
71 | |||
70 | def _split_comma(value): |
|
72 | def _split_comma(value): | |
71 | return value.split(',') |
|
73 | return value.split(',') | |
72 |
|
74 | |||
73 |
|
75 | |||
74 | def pytest_addoption(parser): |
|
76 | def pytest_addoption(parser): | |
75 | parser.addoption( |
|
77 | parser.addoption( | |
76 | '--keep-tmp-path', action='store_true', |
|
78 | '--keep-tmp-path', action='store_true', | |
77 | help="Keep the test temporary directories") |
|
79 | help="Keep the test temporary directories") | |
78 | parser.addoption( |
|
80 | parser.addoption( | |
79 | '--backends', action='store', type=_split_comma, |
|
81 | '--backends', action='store', type=_split_comma, | |
80 | default=['git', 'hg', 'svn'], |
|
82 | default=['git', 'hg', 'svn'], | |
81 | help="Select which backends to test for backend specific tests.") |
|
83 | help="Select which backends to test for backend specific tests.") | |
82 | parser.addoption( |
|
84 | parser.addoption( | |
83 | '--dbs', action='store', type=_split_comma, |
|
85 | '--dbs', action='store', type=_split_comma, | |
84 | default=['sqlite'], |
|
86 | default=['sqlite'], | |
85 | help="Select which database to test for database specific tests. " |
|
87 | help="Select which database to test for database specific tests. " | |
86 | "Possible options are sqlite,postgres,mysql") |
|
88 | "Possible options are sqlite,postgres,mysql") | |
87 | parser.addoption( |
|
89 | parser.addoption( | |
88 | '--appenlight', '--ae', action='store_true', |
|
90 | '--appenlight', '--ae', action='store_true', | |
89 | help="Track statistics in appenlight.") |
|
91 | help="Track statistics in appenlight.") | |
90 | parser.addoption( |
|
92 | parser.addoption( | |
91 | '--appenlight-api-key', '--ae-key', |
|
93 | '--appenlight-api-key', '--ae-key', | |
92 | help="API key for Appenlight.") |
|
94 | help="API key for Appenlight.") | |
93 | parser.addoption( |
|
95 | parser.addoption( | |
94 | '--appenlight-url', '--ae-url', |
|
96 | '--appenlight-url', '--ae-url', | |
95 | default="https://ae.rhodecode.com", |
|
97 | default="https://ae.rhodecode.com", | |
96 | help="Appenlight service URL, defaults to https://ae.rhodecode.com") |
|
98 | help="Appenlight service URL, defaults to https://ae.rhodecode.com") | |
97 | parser.addoption( |
|
99 | parser.addoption( | |
98 | '--sqlite-connection-string', action='store', |
|
100 | '--sqlite-connection-string', action='store', | |
99 | default='', help="Connection string for the dbs tests with SQLite") |
|
101 | default='', help="Connection string for the dbs tests with SQLite") | |
100 | parser.addoption( |
|
102 | parser.addoption( | |
101 | '--postgres-connection-string', action='store', |
|
103 | '--postgres-connection-string', action='store', | |
102 | default='', help="Connection string for the dbs tests with Postgres") |
|
104 | default='', help="Connection string for the dbs tests with Postgres") | |
103 | parser.addoption( |
|
105 | parser.addoption( | |
104 | '--mysql-connection-string', action='store', |
|
106 | '--mysql-connection-string', action='store', | |
105 | default='', help="Connection string for the dbs tests with MySQL") |
|
107 | default='', help="Connection string for the dbs tests with MySQL") | |
106 | parser.addoption( |
|
108 | parser.addoption( | |
107 | '--repeat', type=int, default=100, |
|
109 | '--repeat', type=int, default=100, | |
108 | help="Number of repetitions in performance tests.") |
|
110 | help="Number of repetitions in performance tests.") | |
109 |
|
111 | |||
110 |
|
112 | |||
111 | def pytest_configure(config): |
|
113 | def pytest_configure(config): | |
112 | from rhodecode.config import patches |
|
114 | from rhodecode.config import patches | |
113 |
|
115 | |||
114 |
|
116 | |||
115 | def pytest_collection_modifyitems(session, config, items): |
|
117 | def pytest_collection_modifyitems(session, config, items): | |
116 | # nottest marked, compare nose, used for transition from nose to pytest |
|
118 | # nottest marked, compare nose, used for transition from nose to pytest | |
117 | remaining = [ |
|
119 | remaining = [ | |
118 | i for i in items if getattr(i.obj, '__test__', True)] |
|
120 | i for i in items if getattr(i.obj, '__test__', True)] | |
119 | items[:] = remaining |
|
121 | items[:] = remaining | |
120 |
|
122 | |||
121 | # NOTE(marcink): custom test ordering, db tests and vcstests are slowes and should |
|
123 | # NOTE(marcink): custom test ordering, db tests and vcstests are slowes and should | |
122 | # be executed at the end for faster test feedback |
|
124 | # be executed at the end for faster test feedback | |
123 | def sorter(item): |
|
125 | def sorter(item): | |
124 | pos = 0 |
|
126 | pos = 0 | |
125 | key = item._nodeid |
|
127 | key = item._nodeid | |
126 | if key.startswith('rhodecode/tests/database'): |
|
128 | if key.startswith('rhodecode/tests/database'): | |
127 | pos = 1 |
|
129 | pos = 1 | |
128 | elif key.startswith('rhodecode/tests/vcs_operations'): |
|
130 | elif key.startswith('rhodecode/tests/vcs_operations'): | |
129 | pos = 2 |
|
131 | pos = 2 | |
130 |
|
132 | |||
131 | return pos |
|
133 | return pos | |
132 |
|
134 | |||
133 | items.sort(key=sorter) |
|
135 | items.sort(key=sorter) | |
134 |
|
136 | |||
135 |
|
137 | |||
136 | def pytest_generate_tests(metafunc): |
|
138 | def pytest_generate_tests(metafunc): | |
137 |
|
139 | |||
138 | # Support test generation based on --backend parameter |
|
140 | # Support test generation based on --backend parameter | |
139 | if 'backend_alias' in metafunc.fixturenames: |
|
141 | if 'backend_alias' in metafunc.fixturenames: | |
140 | backends = get_backends_from_metafunc(metafunc) |
|
142 | backends = get_backends_from_metafunc(metafunc) | |
141 | scope = None |
|
143 | scope = None | |
142 | if not backends: |
|
144 | if not backends: | |
143 | pytest.skip("Not enabled for any of selected backends") |
|
145 | pytest.skip("Not enabled for any of selected backends") | |
144 |
|
146 | |||
145 | metafunc.parametrize('backend_alias', backends, scope=scope) |
|
147 | metafunc.parametrize('backend_alias', backends, scope=scope) | |
146 |
|
148 | |||
147 | backend_mark = metafunc.definition.get_closest_marker('backends') |
|
149 | backend_mark = metafunc.definition.get_closest_marker('backends') | |
148 | if backend_mark: |
|
150 | if backend_mark: | |
149 | backends = get_backends_from_metafunc(metafunc) |
|
151 | backends = get_backends_from_metafunc(metafunc) | |
150 | if not backends: |
|
152 | if not backends: | |
151 | pytest.skip("Not enabled for any of selected backends") |
|
153 | pytest.skip("Not enabled for any of selected backends") | |
152 |
|
154 | |||
153 |
|
155 | |||
154 | def get_backends_from_metafunc(metafunc): |
|
156 | def get_backends_from_metafunc(metafunc): | |
155 | requested_backends = set(metafunc.config.getoption('--backends')) |
|
157 | requested_backends = set(metafunc.config.getoption('--backends')) | |
156 | backend_mark = metafunc.definition.get_closest_marker('backends') |
|
158 | backend_mark = metafunc.definition.get_closest_marker('backends') | |
157 | if backend_mark: |
|
159 | if backend_mark: | |
158 | # Supported backends by this test function, created from |
|
160 | # Supported backends by this test function, created from | |
159 | # pytest.mark.backends |
|
161 | # pytest.mark.backends | |
160 | backends = backend_mark.args |
|
162 | backends = backend_mark.args | |
161 | elif hasattr(metafunc.cls, 'backend_alias'): |
|
163 | elif hasattr(metafunc.cls, 'backend_alias'): | |
162 | # Support class attribute "backend_alias", this is mainly |
|
164 | # Support class attribute "backend_alias", this is mainly | |
163 | # for legacy reasons for tests not yet using pytest.mark.backends |
|
165 | # for legacy reasons for tests not yet using pytest.mark.backends | |
164 | backends = [metafunc.cls.backend_alias] |
|
166 | backends = [metafunc.cls.backend_alias] | |
165 | else: |
|
167 | else: | |
166 | backends = metafunc.config.getoption('--backends') |
|
168 | backends = metafunc.config.getoption('--backends') | |
167 | return requested_backends.intersection(backends) |
|
169 | return requested_backends.intersection(backends) | |
168 |
|
170 | |||
169 |
|
171 | |||
170 | @pytest.fixture(scope='session', autouse=True) |
|
172 | @pytest.fixture(scope='session', autouse=True) | |
171 | def activate_example_rcextensions(request): |
|
173 | def activate_example_rcextensions(request): | |
172 | """ |
|
174 | """ | |
173 | Patch in an example rcextensions module which verifies passed in kwargs. |
|
175 | Patch in an example rcextensions module which verifies passed in kwargs. | |
174 | """ |
|
176 | """ | |
175 | from rhodecode.config import rcextensions |
|
177 | from rhodecode.config import rcextensions | |
176 |
|
178 | |||
177 | old_extensions = rhodecode.EXTENSIONS |
|
179 | old_extensions = rhodecode.EXTENSIONS | |
178 | rhodecode.EXTENSIONS = rcextensions |
|
180 | rhodecode.EXTENSIONS = rcextensions | |
179 | rhodecode.EXTENSIONS.calls = collections.defaultdict(list) |
|
181 | rhodecode.EXTENSIONS.calls = collections.defaultdict(list) | |
180 |
|
182 | |||
181 | @request.addfinalizer |
|
183 | @request.addfinalizer | |
182 | def cleanup(): |
|
184 | def cleanup(): | |
183 | rhodecode.EXTENSIONS = old_extensions |
|
185 | rhodecode.EXTENSIONS = old_extensions | |
184 |
|
186 | |||
185 |
|
187 | |||
186 | @pytest.fixture() |
|
188 | @pytest.fixture() | |
187 | def capture_rcextensions(): |
|
189 | def capture_rcextensions(): | |
188 | """ |
|
190 | """ | |
189 | Returns the recorded calls to entry points in rcextensions. |
|
191 | Returns the recorded calls to entry points in rcextensions. | |
190 | """ |
|
192 | """ | |
191 | calls = rhodecode.EXTENSIONS.calls |
|
193 | calls = rhodecode.EXTENSIONS.calls | |
192 | calls.clear() |
|
194 | calls.clear() | |
193 | # Note: At this moment, it is still the empty dict, but that will |
|
195 | # Note: At this moment, it is still the empty dict, but that will | |
194 | # be filled during the test run and since it is a reference this |
|
196 | # be filled during the test run and since it is a reference this | |
195 | # is enough to make it work. |
|
197 | # is enough to make it work. | |
196 | return calls |
|
198 | return calls | |
197 |
|
199 | |||
198 |
|
200 | |||
199 | @pytest.fixture(scope='session') |
|
201 | @pytest.fixture(scope='session') | |
200 | def http_environ_session(): |
|
202 | def http_environ_session(): | |
201 | """ |
|
203 | """ | |
202 | Allow to use "http_environ" in session scope. |
|
204 | Allow to use "http_environ" in session scope. | |
203 | """ |
|
205 | """ | |
204 | return plain_http_environ() |
|
206 | return plain_http_environ() | |
205 |
|
207 | |||
206 |
|
208 | |||
207 | def plain_http_host_stub(): |
|
209 | def plain_http_host_stub(): | |
208 | """ |
|
210 | """ | |
209 | Value of HTTP_HOST in the test run. |
|
211 | Value of HTTP_HOST in the test run. | |
210 | """ |
|
212 | """ | |
211 | return 'example.com:80' |
|
213 | return 'example.com:80' | |
212 |
|
214 | |||
213 |
|
215 | |||
214 | @pytest.fixture() |
|
216 | @pytest.fixture() | |
215 | def http_host_stub(): |
|
217 | def http_host_stub(): | |
216 | """ |
|
218 | """ | |
217 | Value of HTTP_HOST in the test run. |
|
219 | Value of HTTP_HOST in the test run. | |
218 | """ |
|
220 | """ | |
219 | return plain_http_host_stub() |
|
221 | return plain_http_host_stub() | |
220 |
|
222 | |||
221 |
|
223 | |||
222 | def plain_http_host_only_stub(): |
|
224 | def plain_http_host_only_stub(): | |
223 | """ |
|
225 | """ | |
224 | Value of HTTP_HOST in the test run. |
|
226 | Value of HTTP_HOST in the test run. | |
225 | """ |
|
227 | """ | |
226 | return plain_http_host_stub().split(':')[0] |
|
228 | return plain_http_host_stub().split(':')[0] | |
227 |
|
229 | |||
228 |
|
230 | |||
229 | @pytest.fixture() |
|
231 | @pytest.fixture() | |
230 | def http_host_only_stub(): |
|
232 | def http_host_only_stub(): | |
231 | """ |
|
233 | """ | |
232 | Value of HTTP_HOST in the test run. |
|
234 | Value of HTTP_HOST in the test run. | |
233 | """ |
|
235 | """ | |
234 | return plain_http_host_only_stub() |
|
236 | return plain_http_host_only_stub() | |
235 |
|
237 | |||
236 |
|
238 | |||
237 | def plain_http_environ(): |
|
239 | def plain_http_environ(): | |
238 | """ |
|
240 | """ | |
239 | HTTP extra environ keys. |
|
241 | HTTP extra environ keys. | |
240 |
|
242 | |||
241 | User by the test application and as well for setting up the pylons |
|
243 | User by the test application and as well for setting up the pylons | |
242 | environment. In the case of the fixture "app" it should be possible |
|
244 | environment. In the case of the fixture "app" it should be possible | |
243 | to override this for a specific test case. |
|
245 | to override this for a specific test case. | |
244 | """ |
|
246 | """ | |
245 | return { |
|
247 | return { | |
246 | 'SERVER_NAME': plain_http_host_only_stub(), |
|
248 | 'SERVER_NAME': plain_http_host_only_stub(), | |
247 | 'SERVER_PORT': plain_http_host_stub().split(':')[1], |
|
249 | 'SERVER_PORT': plain_http_host_stub().split(':')[1], | |
248 | 'HTTP_HOST': plain_http_host_stub(), |
|
250 | 'HTTP_HOST': plain_http_host_stub(), | |
249 | 'HTTP_USER_AGENT': 'rc-test-agent', |
|
251 | 'HTTP_USER_AGENT': 'rc-test-agent', | |
250 | 'REQUEST_METHOD': 'GET' |
|
252 | 'REQUEST_METHOD': 'GET' | |
251 | } |
|
253 | } | |
252 |
|
254 | |||
253 |
|
255 | |||
254 | @pytest.fixture() |
|
256 | @pytest.fixture() | |
255 | def http_environ(): |
|
257 | def http_environ(): | |
256 | """ |
|
258 | """ | |
257 | HTTP extra environ keys. |
|
259 | HTTP extra environ keys. | |
258 |
|
260 | |||
259 | User by the test application and as well for setting up the pylons |
|
261 | User by the test application and as well for setting up the pylons | |
260 | environment. In the case of the fixture "app" it should be possible |
|
262 | environment. In the case of the fixture "app" it should be possible | |
261 | to override this for a specific test case. |
|
263 | to override this for a specific test case. | |
262 | """ |
|
264 | """ | |
263 | return plain_http_environ() |
|
265 | return plain_http_environ() | |
264 |
|
266 | |||
265 |
|
267 | |||
266 | @pytest.fixture(scope='session') |
|
268 | @pytest.fixture(scope='session') | |
267 | def baseapp(ini_config, vcsserver, http_environ_session): |
|
269 | def baseapp(ini_config, vcsserver, http_environ_session): | |
268 | from rhodecode.lib.pyramid_utils import get_app_config |
|
270 | from rhodecode.lib.pyramid_utils import get_app_config | |
269 | from rhodecode.config.middleware import make_pyramid_app |
|
271 | from rhodecode.config.middleware import make_pyramid_app | |
270 |
|
272 | |||
271 |
|
|
273 | log.info("Using the RhodeCode configuration:{}".format(ini_config)) | |
272 | pyramid.paster.setup_logging(ini_config) |
|
274 | pyramid.paster.setup_logging(ini_config) | |
273 |
|
275 | |||
274 | settings = get_app_config(ini_config) |
|
276 | settings = get_app_config(ini_config) | |
275 | app = make_pyramid_app({'__file__': ini_config}, **settings) |
|
277 | app = make_pyramid_app({'__file__': ini_config}, **settings) | |
276 |
|
278 | |||
277 | return app |
|
279 | return app | |
278 |
|
280 | |||
279 |
|
281 | |||
280 | @pytest.fixture(scope='function') |
|
282 | @pytest.fixture(scope='function') | |
281 | def app(request, config_stub, baseapp, http_environ): |
|
283 | def app(request, config_stub, baseapp, http_environ): | |
282 | app = CustomTestApp( |
|
284 | app = CustomTestApp( | |
283 | baseapp, |
|
285 | baseapp, | |
284 | extra_environ=http_environ) |
|
286 | extra_environ=http_environ) | |
285 | if request.cls: |
|
287 | if request.cls: | |
286 | request.cls.app = app |
|
288 | request.cls.app = app | |
287 | return app |
|
289 | return app | |
288 |
|
290 | |||
289 |
|
291 | |||
290 | @pytest.fixture(scope='session') |
|
292 | @pytest.fixture(scope='session') | |
291 | def app_settings(baseapp, ini_config): |
|
293 | def app_settings(baseapp, ini_config): | |
292 | """ |
|
294 | """ | |
293 | Settings dictionary used to create the app. |
|
295 | Settings dictionary used to create the app. | |
294 |
|
296 | |||
295 | Parses the ini file and passes the result through the sanitize and apply |
|
297 | Parses the ini file and passes the result through the sanitize and apply | |
296 | defaults mechanism in `rhodecode.config.middleware`. |
|
298 | defaults mechanism in `rhodecode.config.middleware`. | |
297 | """ |
|
299 | """ | |
298 | return baseapp.config.get_settings() |
|
300 | return baseapp.config.get_settings() | |
299 |
|
301 | |||
300 |
|
302 | |||
301 | @pytest.fixture(scope='session') |
|
303 | @pytest.fixture(scope='session') | |
302 | def db_connection(ini_settings): |
|
304 | def db_connection(ini_settings): | |
303 | # Initialize the database connection. |
|
305 | # Initialize the database connection. | |
304 | config_utils.initialize_database(ini_settings) |
|
306 | config_utils.initialize_database(ini_settings) | |
305 |
|
307 | |||
306 |
|
308 | |||
307 | LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user')) |
|
309 | LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user')) | |
308 |
|
310 | |||
309 |
|
311 | |||
310 | def _autologin_user(app, *args): |
|
312 | def _autologin_user(app, *args): | |
311 | session = login_user_session(app, *args) |
|
313 | session = login_user_session(app, *args) | |
312 | csrf_token = rhodecode.lib.auth.get_csrf_token(session) |
|
314 | csrf_token = rhodecode.lib.auth.get_csrf_token(session) | |
313 | return LoginData(csrf_token, session['rhodecode_user']) |
|
315 | return LoginData(csrf_token, session['rhodecode_user']) | |
314 |
|
316 | |||
315 |
|
317 | |||
316 | @pytest.fixture() |
|
318 | @pytest.fixture() | |
317 | def autologin_user(app): |
|
319 | def autologin_user(app): | |
318 | """ |
|
320 | """ | |
319 | Utility fixture which makes sure that the admin user is logged in |
|
321 | Utility fixture which makes sure that the admin user is logged in | |
320 | """ |
|
322 | """ | |
321 | return _autologin_user(app) |
|
323 | return _autologin_user(app) | |
322 |
|
324 | |||
323 |
|
325 | |||
324 | @pytest.fixture() |
|
326 | @pytest.fixture() | |
325 | def autologin_regular_user(app): |
|
327 | def autologin_regular_user(app): | |
326 | """ |
|
328 | """ | |
327 | Utility fixture which makes sure that the regular user is logged in |
|
329 | Utility fixture which makes sure that the regular user is logged in | |
328 | """ |
|
330 | """ | |
329 | return _autologin_user( |
|
331 | return _autologin_user( | |
330 | app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) |
|
332 | app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) | |
331 |
|
333 | |||
332 |
|
334 | |||
333 | @pytest.fixture(scope='function') |
|
335 | @pytest.fixture(scope='function') | |
334 | def csrf_token(request, autologin_user): |
|
336 | def csrf_token(request, autologin_user): | |
335 | return autologin_user.csrf_token |
|
337 | return autologin_user.csrf_token | |
336 |
|
338 | |||
337 |
|
339 | |||
338 | @pytest.fixture(scope='function') |
|
340 | @pytest.fixture(scope='function') | |
339 | def xhr_header(request): |
|
341 | def xhr_header(request): | |
340 | return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'} |
|
342 | return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'} | |
341 |
|
343 | |||
342 |
|
344 | |||
343 | @pytest.fixture() |
|
345 | @pytest.fixture() | |
344 | def real_crypto_backend(monkeypatch): |
|
346 | def real_crypto_backend(monkeypatch): | |
345 | """ |
|
347 | """ | |
346 | Switch the production crypto backend on for this test. |
|
348 | Switch the production crypto backend on for this test. | |
347 |
|
349 | |||
348 | During the test run the crypto backend is replaced with a faster |
|
350 | During the test run the crypto backend is replaced with a faster | |
349 | implementation based on the MD5 algorithm. |
|
351 | implementation based on the MD5 algorithm. | |
350 | """ |
|
352 | """ | |
351 | monkeypatch.setattr(rhodecode, 'is_test', False) |
|
353 | monkeypatch.setattr(rhodecode, 'is_test', False) | |
352 |
|
354 | |||
353 |
|
355 | |||
354 | @pytest.fixture(scope='class') |
|
356 | @pytest.fixture(scope='class') | |
355 | def index_location(request, baseapp): |
|
357 | def index_location(request, baseapp): | |
356 | index_location = baseapp.config.get_settings()['search.location'] |
|
358 | index_location = baseapp.config.get_settings()['search.location'] | |
357 | if request.cls: |
|
359 | if request.cls: | |
358 | request.cls.index_location = index_location |
|
360 | request.cls.index_location = index_location | |
359 | return index_location |
|
361 | return index_location | |
360 |
|
362 | |||
361 |
|
363 | |||
362 | @pytest.fixture(scope='session', autouse=True) |
|
364 | @pytest.fixture(scope='session', autouse=True) | |
363 | def tests_tmp_path(request): |
|
365 | def tests_tmp_path(request): | |
364 | """ |
|
366 | """ | |
365 | Create temporary directory to be used during the test session. |
|
367 | Create temporary directory to be used during the test session. | |
366 | """ |
|
368 | """ | |
367 | if not os.path.exists(TESTS_TMP_PATH): |
|
369 | if not os.path.exists(TESTS_TMP_PATH): | |
368 | os.makedirs(TESTS_TMP_PATH) |
|
370 | os.makedirs(TESTS_TMP_PATH) | |
369 |
|
371 | |||
370 | if not request.config.getoption('--keep-tmp-path'): |
|
372 | if not request.config.getoption('--keep-tmp-path'): | |
371 | @request.addfinalizer |
|
373 | @request.addfinalizer | |
372 | def remove_tmp_path(): |
|
374 | def remove_tmp_path(): | |
373 | shutil.rmtree(TESTS_TMP_PATH) |
|
375 | shutil.rmtree(TESTS_TMP_PATH) | |
374 |
|
376 | |||
375 | return TESTS_TMP_PATH |
|
377 | return TESTS_TMP_PATH | |
376 |
|
378 | |||
377 |
|
379 | |||
378 | @pytest.fixture() |
|
380 | @pytest.fixture() | |
379 | def test_repo_group(request): |
|
381 | def test_repo_group(request): | |
380 | """ |
|
382 | """ | |
381 | Create a temporary repository group, and destroy it after |
|
383 | Create a temporary repository group, and destroy it after | |
382 | usage automatically |
|
384 | usage automatically | |
383 | """ |
|
385 | """ | |
384 | fixture = Fixture() |
|
386 | fixture = Fixture() | |
385 | repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '') |
|
387 | repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '') | |
386 | repo_group = fixture.create_repo_group(repogroupid) |
|
388 | repo_group = fixture.create_repo_group(repogroupid) | |
387 |
|
389 | |||
388 | def _cleanup(): |
|
390 | def _cleanup(): | |
389 | fixture.destroy_repo_group(repogroupid) |
|
391 | fixture.destroy_repo_group(repogroupid) | |
390 |
|
392 | |||
391 | request.addfinalizer(_cleanup) |
|
393 | request.addfinalizer(_cleanup) | |
392 | return repo_group |
|
394 | return repo_group | |
393 |
|
395 | |||
394 |
|
396 | |||
395 | @pytest.fixture() |
|
397 | @pytest.fixture() | |
396 | def test_user_group(request): |
|
398 | def test_user_group(request): | |
397 | """ |
|
399 | """ | |
398 | Create a temporary user group, and destroy it after |
|
400 | Create a temporary user group, and destroy it after | |
399 | usage automatically |
|
401 | usage automatically | |
400 | """ |
|
402 | """ | |
401 | fixture = Fixture() |
|
403 | fixture = Fixture() | |
402 | usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '') |
|
404 | usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '') | |
403 | user_group = fixture.create_user_group(usergroupid) |
|
405 | user_group = fixture.create_user_group(usergroupid) | |
404 |
|
406 | |||
405 | def _cleanup(): |
|
407 | def _cleanup(): | |
406 | fixture.destroy_user_group(user_group) |
|
408 | fixture.destroy_user_group(user_group) | |
407 |
|
409 | |||
408 | request.addfinalizer(_cleanup) |
|
410 | request.addfinalizer(_cleanup) | |
409 | return user_group |
|
411 | return user_group | |
410 |
|
412 | |||
411 |
|
413 | |||
412 | @pytest.fixture(scope='session') |
|
414 | @pytest.fixture(scope='session') | |
413 | def test_repo(request): |
|
415 | def test_repo(request): | |
414 | container = TestRepoContainer() |
|
416 | container = TestRepoContainer() | |
415 | request.addfinalizer(container._cleanup) |
|
417 | request.addfinalizer(container._cleanup) | |
416 | return container |
|
418 | return container | |
417 |
|
419 | |||
418 |
|
420 | |||
419 | class TestRepoContainer(object): |
|
421 | class TestRepoContainer(object): | |
420 | """ |
|
422 | """ | |
421 | Container for test repositories which are used read only. |
|
423 | Container for test repositories which are used read only. | |
422 |
|
424 | |||
423 | Repositories will be created on demand and re-used during the lifetime |
|
425 | Repositories will be created on demand and re-used during the lifetime | |
424 | of this object. |
|
426 | of this object. | |
425 |
|
427 | |||
426 | Usage to get the svn test repository "minimal":: |
|
428 | Usage to get the svn test repository "minimal":: | |
427 |
|
429 | |||
428 | test_repo = TestContainer() |
|
430 | test_repo = TestContainer() | |
429 | repo = test_repo('minimal', 'svn') |
|
431 | repo = test_repo('minimal', 'svn') | |
430 |
|
432 | |||
431 | """ |
|
433 | """ | |
432 |
|
434 | |||
433 | dump_extractors = { |
|
435 | dump_extractors = { | |
434 | 'git': utils.extract_git_repo_from_dump, |
|
436 | 'git': utils.extract_git_repo_from_dump, | |
435 | 'hg': utils.extract_hg_repo_from_dump, |
|
437 | 'hg': utils.extract_hg_repo_from_dump, | |
436 | 'svn': utils.extract_svn_repo_from_dump, |
|
438 | 'svn': utils.extract_svn_repo_from_dump, | |
437 | } |
|
439 | } | |
438 |
|
440 | |||
439 | def __init__(self): |
|
441 | def __init__(self): | |
440 | self._cleanup_repos = [] |
|
442 | self._cleanup_repos = [] | |
441 | self._fixture = Fixture() |
|
443 | self._fixture = Fixture() | |
442 | self._repos = {} |
|
444 | self._repos = {} | |
443 |
|
445 | |||
444 | def __call__(self, dump_name, backend_alias, config=None): |
|
446 | def __call__(self, dump_name, backend_alias, config=None): | |
445 | key = (dump_name, backend_alias) |
|
447 | key = (dump_name, backend_alias) | |
446 | if key not in self._repos: |
|
448 | if key not in self._repos: | |
447 | repo = self._create_repo(dump_name, backend_alias, config) |
|
449 | repo = self._create_repo(dump_name, backend_alias, config) | |
448 | self._repos[key] = repo.repo_id |
|
450 | self._repos[key] = repo.repo_id | |
449 | return Repository.get(self._repos[key]) |
|
451 | return Repository.get(self._repos[key]) | |
450 |
|
452 | |||
451 | def _create_repo(self, dump_name, backend_alias, config): |
|
453 | def _create_repo(self, dump_name, backend_alias, config): | |
452 | repo_name = '%s-%s' % (backend_alias, dump_name) |
|
454 | repo_name = '%s-%s' % (backend_alias, dump_name) | |
453 | backend = get_backend(backend_alias) |
|
455 | backend = get_backend(backend_alias) | |
454 | dump_extractor = self.dump_extractors[backend_alias] |
|
456 | dump_extractor = self.dump_extractors[backend_alias] | |
455 | repo_path = dump_extractor(dump_name, repo_name) |
|
457 | repo_path = dump_extractor(dump_name, repo_name) | |
456 |
|
458 | |||
457 | vcs_repo = backend(repo_path, config=config) |
|
459 | vcs_repo = backend(repo_path, config=config) | |
458 | repo2db_mapper({repo_name: vcs_repo}) |
|
460 | repo2db_mapper({repo_name: vcs_repo}) | |
459 |
|
461 | |||
460 | repo = RepoModel().get_by_repo_name(repo_name) |
|
462 | repo = RepoModel().get_by_repo_name(repo_name) | |
461 | self._cleanup_repos.append(repo_name) |
|
463 | self._cleanup_repos.append(repo_name) | |
462 | return repo |
|
464 | return repo | |
463 |
|
465 | |||
464 | def _cleanup(self): |
|
466 | def _cleanup(self): | |
465 | for repo_name in reversed(self._cleanup_repos): |
|
467 | for repo_name in reversed(self._cleanup_repos): | |
466 | self._fixture.destroy_repo(repo_name) |
|
468 | self._fixture.destroy_repo(repo_name) | |
467 |
|
469 | |||
468 |
|
470 | |||
469 | def backend_base(request, backend_alias, baseapp, test_repo): |
|
471 | def backend_base(request, backend_alias, baseapp, test_repo): | |
470 | if backend_alias not in request.config.getoption('--backends'): |
|
472 | if backend_alias not in request.config.getoption('--backends'): | |
471 | pytest.skip("Backend %s not selected." % (backend_alias, )) |
|
473 | pytest.skip("Backend %s not selected." % (backend_alias, )) | |
472 |
|
474 | |||
473 | utils.check_xfail_backends(request.node, backend_alias) |
|
475 | utils.check_xfail_backends(request.node, backend_alias) | |
474 | utils.check_skip_backends(request.node, backend_alias) |
|
476 | utils.check_skip_backends(request.node, backend_alias) | |
475 |
|
477 | |||
476 | repo_name = 'vcs_test_%s' % (backend_alias, ) |
|
478 | repo_name = 'vcs_test_%s' % (backend_alias, ) | |
477 | backend = Backend( |
|
479 | backend = Backend( | |
478 | alias=backend_alias, |
|
480 | alias=backend_alias, | |
479 | repo_name=repo_name, |
|
481 | repo_name=repo_name, | |
480 | test_name=request.node.name, |
|
482 | test_name=request.node.name, | |
481 | test_repo_container=test_repo) |
|
483 | test_repo_container=test_repo) | |
482 | request.addfinalizer(backend.cleanup) |
|
484 | request.addfinalizer(backend.cleanup) | |
483 | return backend |
|
485 | return backend | |
484 |
|
486 | |||
485 |
|
487 | |||
486 | @pytest.fixture() |
|
488 | @pytest.fixture() | |
487 | def backend(request, backend_alias, baseapp, test_repo): |
|
489 | def backend(request, backend_alias, baseapp, test_repo): | |
488 | """ |
|
490 | """ | |
489 | Parametrized fixture which represents a single backend implementation. |
|
491 | Parametrized fixture which represents a single backend implementation. | |
490 |
|
492 | |||
491 | It respects the option `--backends` to focus the test run on specific |
|
493 | It respects the option `--backends` to focus the test run on specific | |
492 | backend implementations. |
|
494 | backend implementations. | |
493 |
|
495 | |||
494 | It also supports `pytest.mark.xfail_backends` to mark tests as failing |
|
496 | It also supports `pytest.mark.xfail_backends` to mark tests as failing | |
495 | for specific backends. This is intended as a utility for incremental |
|
497 | for specific backends. This is intended as a utility for incremental | |
496 | development of a new backend implementation. |
|
498 | development of a new backend implementation. | |
497 | """ |
|
499 | """ | |
498 | return backend_base(request, backend_alias, baseapp, test_repo) |
|
500 | return backend_base(request, backend_alias, baseapp, test_repo) | |
499 |
|
501 | |||
500 |
|
502 | |||
501 | @pytest.fixture() |
|
503 | @pytest.fixture() | |
502 | def backend_git(request, baseapp, test_repo): |
|
504 | def backend_git(request, baseapp, test_repo): | |
503 | return backend_base(request, 'git', baseapp, test_repo) |
|
505 | return backend_base(request, 'git', baseapp, test_repo) | |
504 |
|
506 | |||
505 |
|
507 | |||
506 | @pytest.fixture() |
|
508 | @pytest.fixture() | |
507 | def backend_hg(request, baseapp, test_repo): |
|
509 | def backend_hg(request, baseapp, test_repo): | |
508 | return backend_base(request, 'hg', baseapp, test_repo) |
|
510 | return backend_base(request, 'hg', baseapp, test_repo) | |
509 |
|
511 | |||
510 |
|
512 | |||
511 | @pytest.fixture() |
|
513 | @pytest.fixture() | |
512 | def backend_svn(request, baseapp, test_repo): |
|
514 | def backend_svn(request, baseapp, test_repo): | |
513 | return backend_base(request, 'svn', baseapp, test_repo) |
|
515 | return backend_base(request, 'svn', baseapp, test_repo) | |
514 |
|
516 | |||
515 |
|
517 | |||
516 | @pytest.fixture() |
|
518 | @pytest.fixture() | |
517 | def backend_random(backend_git): |
|
519 | def backend_random(backend_git): | |
518 | """ |
|
520 | """ | |
519 | Use this to express that your tests need "a backend. |
|
521 | Use this to express that your tests need "a backend. | |
520 |
|
522 | |||
521 | A few of our tests need a backend, so that we can run the code. This |
|
523 | A few of our tests need a backend, so that we can run the code. This | |
522 | fixture is intended to be used for such cases. It will pick one of the |
|
524 | fixture is intended to be used for such cases. It will pick one of the | |
523 | backends and run the tests. |
|
525 | backends and run the tests. | |
524 |
|
526 | |||
525 | The fixture `backend` would run the test multiple times for each |
|
527 | The fixture `backend` would run the test multiple times for each | |
526 | available backend which is a pure waste of time if the test is |
|
528 | available backend which is a pure waste of time if the test is | |
527 | independent of the backend type. |
|
529 | independent of the backend type. | |
528 | """ |
|
530 | """ | |
529 | # TODO: johbo: Change this to pick a random backend |
|
531 | # TODO: johbo: Change this to pick a random backend | |
530 | return backend_git |
|
532 | return backend_git | |
531 |
|
533 | |||
532 |
|
534 | |||
533 | @pytest.fixture() |
|
535 | @pytest.fixture() | |
534 | def backend_stub(backend_git): |
|
536 | def backend_stub(backend_git): | |
535 | """ |
|
537 | """ | |
536 | Use this to express that your tests need a backend stub |
|
538 | Use this to express that your tests need a backend stub | |
537 |
|
539 | |||
538 | TODO: mikhail: Implement a real stub logic instead of returning |
|
540 | TODO: mikhail: Implement a real stub logic instead of returning | |
539 | a git backend |
|
541 | a git backend | |
540 | """ |
|
542 | """ | |
541 | return backend_git |
|
543 | return backend_git | |
542 |
|
544 | |||
543 |
|
545 | |||
544 | @pytest.fixture() |
|
546 | @pytest.fixture() | |
545 | def repo_stub(backend_stub): |
|
547 | def repo_stub(backend_stub): | |
546 | """ |
|
548 | """ | |
547 | Use this to express that your tests need a repository stub |
|
549 | Use this to express that your tests need a repository stub | |
548 | """ |
|
550 | """ | |
549 | return backend_stub.create_repo() |
|
551 | return backend_stub.create_repo() | |
550 |
|
552 | |||
551 |
|
553 | |||
552 | class Backend(object): |
|
554 | class Backend(object): | |
553 | """ |
|
555 | """ | |
554 | Represents the test configuration for one supported backend |
|
556 | Represents the test configuration for one supported backend | |
555 |
|
557 | |||
556 | Provides easy access to different test repositories based on |
|
558 | Provides easy access to different test repositories based on | |
557 | `__getitem__`. Such repositories will only be created once per test |
|
559 | `__getitem__`. Such repositories will only be created once per test | |
558 | session. |
|
560 | session. | |
559 | """ |
|
561 | """ | |
560 |
|
562 | |||
561 | invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+') |
|
563 | invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+') | |
562 | _master_repo = None |
|
564 | _master_repo = None | |
563 | _master_repo_path = '' |
|
565 | _master_repo_path = '' | |
564 | _commit_ids = {} |
|
566 | _commit_ids = {} | |
565 |
|
567 | |||
566 | def __init__(self, alias, repo_name, test_name, test_repo_container): |
|
568 | def __init__(self, alias, repo_name, test_name, test_repo_container): | |
567 | self.alias = alias |
|
569 | self.alias = alias | |
568 | self.repo_name = repo_name |
|
570 | self.repo_name = repo_name | |
569 | self._cleanup_repos = [] |
|
571 | self._cleanup_repos = [] | |
570 | self._test_name = test_name |
|
572 | self._test_name = test_name | |
571 | self._test_repo_container = test_repo_container |
|
573 | self._test_repo_container = test_repo_container | |
572 | # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or |
|
574 | # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or | |
573 | # Fixture will survive in the end. |
|
575 | # Fixture will survive in the end. | |
574 | self._fixture = Fixture() |
|
576 | self._fixture = Fixture() | |
575 |
|
577 | |||
576 | def __getitem__(self, key): |
|
578 | def __getitem__(self, key): | |
577 | return self._test_repo_container(key, self.alias) |
|
579 | return self._test_repo_container(key, self.alias) | |
578 |
|
580 | |||
579 | def create_test_repo(self, key, config=None): |
|
581 | def create_test_repo(self, key, config=None): | |
580 | return self._test_repo_container(key, self.alias, config) |
|
582 | return self._test_repo_container(key, self.alias, config) | |
581 |
|
583 | |||
582 | @property |
|
584 | @property | |
583 | def repo(self): |
|
585 | def repo(self): | |
584 | """ |
|
586 | """ | |
585 | Returns the "current" repository. This is the vcs_test repo or the |
|
587 | Returns the "current" repository. This is the vcs_test repo or the | |
586 | last repo which has been created with `create_repo`. |
|
588 | last repo which has been created with `create_repo`. | |
587 | """ |
|
589 | """ | |
588 | from rhodecode.model.db import Repository |
|
590 | from rhodecode.model.db import Repository | |
589 | return Repository.get_by_repo_name(self.repo_name) |
|
591 | return Repository.get_by_repo_name(self.repo_name) | |
590 |
|
592 | |||
591 | @property |
|
593 | @property | |
592 | def default_branch_name(self): |
|
594 | def default_branch_name(self): | |
593 | VcsRepository = get_backend(self.alias) |
|
595 | VcsRepository = get_backend(self.alias) | |
594 | return VcsRepository.DEFAULT_BRANCH_NAME |
|
596 | return VcsRepository.DEFAULT_BRANCH_NAME | |
595 |
|
597 | |||
596 | @property |
|
598 | @property | |
597 | def default_head_id(self): |
|
599 | def default_head_id(self): | |
598 | """ |
|
600 | """ | |
599 | Returns the default head id of the underlying backend. |
|
601 | Returns the default head id of the underlying backend. | |
600 |
|
602 | |||
601 | This will be the default branch name in case the backend does have a |
|
603 | This will be the default branch name in case the backend does have a | |
602 | default branch. In the other cases it will point to a valid head |
|
604 | default branch. In the other cases it will point to a valid head | |
603 | which can serve as the base to create a new commit on top of it. |
|
605 | which can serve as the base to create a new commit on top of it. | |
604 | """ |
|
606 | """ | |
605 | vcsrepo = self.repo.scm_instance() |
|
607 | vcsrepo = self.repo.scm_instance() | |
606 | head_id = ( |
|
608 | head_id = ( | |
607 | vcsrepo.DEFAULT_BRANCH_NAME or |
|
609 | vcsrepo.DEFAULT_BRANCH_NAME or | |
608 | vcsrepo.commit_ids[-1]) |
|
610 | vcsrepo.commit_ids[-1]) | |
609 | return head_id |
|
611 | return head_id | |
610 |
|
612 | |||
611 | @property |
|
613 | @property | |
612 | def commit_ids(self): |
|
614 | def commit_ids(self): | |
613 | """ |
|
615 | """ | |
614 | Returns the list of commits for the last created repository |
|
616 | Returns the list of commits for the last created repository | |
615 | """ |
|
617 | """ | |
616 | return self._commit_ids |
|
618 | return self._commit_ids | |
617 |
|
619 | |||
618 | def create_master_repo(self, commits): |
|
620 | def create_master_repo(self, commits): | |
619 | """ |
|
621 | """ | |
620 | Create a repository and remember it as a template. |
|
622 | Create a repository and remember it as a template. | |
621 |
|
623 | |||
622 | This allows to easily create derived repositories to construct |
|
624 | This allows to easily create derived repositories to construct | |
623 | more complex scenarios for diff, compare and pull requests. |
|
625 | more complex scenarios for diff, compare and pull requests. | |
624 |
|
626 | |||
625 | Returns a commit map which maps from commit message to raw_id. |
|
627 | Returns a commit map which maps from commit message to raw_id. | |
626 | """ |
|
628 | """ | |
627 | self._master_repo = self.create_repo(commits=commits) |
|
629 | self._master_repo = self.create_repo(commits=commits) | |
628 | self._master_repo_path = self._master_repo.repo_full_path |
|
630 | self._master_repo_path = self._master_repo.repo_full_path | |
629 |
|
631 | |||
630 | return self._commit_ids |
|
632 | return self._commit_ids | |
631 |
|
633 | |||
632 | def create_repo( |
|
634 | def create_repo( | |
633 | self, commits=None, number_of_commits=0, heads=None, |
|
635 | self, commits=None, number_of_commits=0, heads=None, | |
634 | name_suffix=u'', bare=False, **kwargs): |
|
636 | name_suffix=u'', bare=False, **kwargs): | |
635 | """ |
|
637 | """ | |
636 | Create a repository and record it for later cleanup. |
|
638 | Create a repository and record it for later cleanup. | |
637 |
|
639 | |||
638 | :param commits: Optional. A sequence of dict instances. |
|
640 | :param commits: Optional. A sequence of dict instances. | |
639 | Will add a commit per entry to the new repository. |
|
641 | Will add a commit per entry to the new repository. | |
640 | :param number_of_commits: Optional. If set to a number, this number of |
|
642 | :param number_of_commits: Optional. If set to a number, this number of | |
641 | commits will be added to the new repository. |
|
643 | commits will be added to the new repository. | |
642 | :param heads: Optional. Can be set to a sequence of of commit |
|
644 | :param heads: Optional. Can be set to a sequence of of commit | |
643 | names which shall be pulled in from the master repository. |
|
645 | names which shall be pulled in from the master repository. | |
644 | :param name_suffix: adds special suffix to generated repo name |
|
646 | :param name_suffix: adds special suffix to generated repo name | |
645 | :param bare: set a repo as bare (no checkout) |
|
647 | :param bare: set a repo as bare (no checkout) | |
646 | """ |
|
648 | """ | |
647 | self.repo_name = self._next_repo_name() + name_suffix |
|
649 | self.repo_name = self._next_repo_name() + name_suffix | |
648 | repo = self._fixture.create_repo( |
|
650 | repo = self._fixture.create_repo( | |
649 | self.repo_name, repo_type=self.alias, bare=bare, **kwargs) |
|
651 | self.repo_name, repo_type=self.alias, bare=bare, **kwargs) | |
650 | self._cleanup_repos.append(repo.repo_name) |
|
652 | self._cleanup_repos.append(repo.repo_name) | |
651 |
|
653 | |||
652 | commits = commits or [ |
|
654 | commits = commits or [ | |
653 | {'message': 'Commit %s of %s' % (x, self.repo_name)} |
|
655 | {'message': 'Commit %s of %s' % (x, self.repo_name)} | |
654 | for x in range(number_of_commits)] |
|
656 | for x in range(number_of_commits)] | |
655 | vcs_repo = repo.scm_instance() |
|
657 | vcs_repo = repo.scm_instance() | |
656 | vcs_repo.count() |
|
658 | vcs_repo.count() | |
657 | self._add_commits_to_repo(vcs_repo, commits) |
|
659 | self._add_commits_to_repo(vcs_repo, commits) | |
658 | if heads: |
|
660 | if heads: | |
659 | self.pull_heads(repo, heads) |
|
661 | self.pull_heads(repo, heads) | |
660 |
|
662 | |||
661 | return repo |
|
663 | return repo | |
662 |
|
664 | |||
663 | def pull_heads(self, repo, heads): |
|
665 | def pull_heads(self, repo, heads): | |
664 | """ |
|
666 | """ | |
665 | Make sure that repo contains all commits mentioned in `heads` |
|
667 | Make sure that repo contains all commits mentioned in `heads` | |
666 | """ |
|
668 | """ | |
667 | vcsrepo = repo.scm_instance() |
|
669 | vcsrepo = repo.scm_instance() | |
668 | vcsrepo.config.clear_section('hooks') |
|
670 | vcsrepo.config.clear_section('hooks') | |
669 | commit_ids = [self._commit_ids[h] for h in heads] |
|
671 | commit_ids = [self._commit_ids[h] for h in heads] | |
670 | vcsrepo.pull(self._master_repo_path, commit_ids=commit_ids) |
|
672 | vcsrepo.pull(self._master_repo_path, commit_ids=commit_ids) | |
671 |
|
673 | |||
672 | def create_fork(self): |
|
674 | def create_fork(self): | |
673 | repo_to_fork = self.repo_name |
|
675 | repo_to_fork = self.repo_name | |
674 | self.repo_name = self._next_repo_name() |
|
676 | self.repo_name = self._next_repo_name() | |
675 | repo = self._fixture.create_fork(repo_to_fork, self.repo_name) |
|
677 | repo = self._fixture.create_fork(repo_to_fork, self.repo_name) | |
676 | self._cleanup_repos.append(self.repo_name) |
|
678 | self._cleanup_repos.append(self.repo_name) | |
677 | return repo |
|
679 | return repo | |
678 |
|
680 | |||
679 | def new_repo_name(self, suffix=u''): |
|
681 | def new_repo_name(self, suffix=u''): | |
680 | self.repo_name = self._next_repo_name() + suffix |
|
682 | self.repo_name = self._next_repo_name() + suffix | |
681 | self._cleanup_repos.append(self.repo_name) |
|
683 | self._cleanup_repos.append(self.repo_name) | |
682 | return self.repo_name |
|
684 | return self.repo_name | |
683 |
|
685 | |||
684 | def _next_repo_name(self): |
|
686 | def _next_repo_name(self): | |
685 | return u"%s_%s" % ( |
|
687 | return u"%s_%s" % ( | |
686 | self.invalid_repo_name.sub(u'_', self._test_name), len(self._cleanup_repos)) |
|
688 | self.invalid_repo_name.sub(u'_', self._test_name), len(self._cleanup_repos)) | |
687 |
|
689 | |||
688 | def ensure_file(self, filename, content='Test content\n'): |
|
690 | def ensure_file(self, filename, content='Test content\n'): | |
689 | assert self._cleanup_repos, "Avoid writing into vcs_test repos" |
|
691 | assert self._cleanup_repos, "Avoid writing into vcs_test repos" | |
690 | commits = [ |
|
692 | commits = [ | |
691 | {'added': [ |
|
693 | {'added': [ | |
692 | FileNode(filename, content=content), |
|
694 | FileNode(filename, content=content), | |
693 | ]}, |
|
695 | ]}, | |
694 | ] |
|
696 | ] | |
695 | self._add_commits_to_repo(self.repo.scm_instance(), commits) |
|
697 | self._add_commits_to_repo(self.repo.scm_instance(), commits) | |
696 |
|
698 | |||
697 | def enable_downloads(self): |
|
699 | def enable_downloads(self): | |
698 | repo = self.repo |
|
700 | repo = self.repo | |
699 | repo.enable_downloads = True |
|
701 | repo.enable_downloads = True | |
700 | Session().add(repo) |
|
702 | Session().add(repo) | |
701 | Session().commit() |
|
703 | Session().commit() | |
702 |
|
704 | |||
703 | def cleanup(self): |
|
705 | def cleanup(self): | |
704 | for repo_name in reversed(self._cleanup_repos): |
|
706 | for repo_name in reversed(self._cleanup_repos): | |
705 | self._fixture.destroy_repo(repo_name) |
|
707 | self._fixture.destroy_repo(repo_name) | |
706 |
|
708 | |||
707 | def _add_commits_to_repo(self, repo, commits): |
|
709 | def _add_commits_to_repo(self, repo, commits): | |
708 | commit_ids = _add_commits_to_repo(repo, commits) |
|
710 | commit_ids = _add_commits_to_repo(repo, commits) | |
709 | if not commit_ids: |
|
711 | if not commit_ids: | |
710 | return |
|
712 | return | |
711 | self._commit_ids = commit_ids |
|
713 | self._commit_ids = commit_ids | |
712 |
|
714 | |||
713 | # Creating refs for Git to allow fetching them from remote repository |
|
715 | # Creating refs for Git to allow fetching them from remote repository | |
714 | if self.alias == 'git': |
|
716 | if self.alias == 'git': | |
715 | refs = {} |
|
717 | refs = {} | |
716 | for message in self._commit_ids: |
|
718 | for message in self._commit_ids: | |
717 | # TODO: mikhail: do more special chars replacements |
|
719 | # TODO: mikhail: do more special chars replacements | |
718 | ref_name = 'refs/test-refs/{}'.format( |
|
720 | ref_name = 'refs/test-refs/{}'.format( | |
719 | message.replace(' ', '')) |
|
721 | message.replace(' ', '')) | |
720 | refs[ref_name] = self._commit_ids[message] |
|
722 | refs[ref_name] = self._commit_ids[message] | |
721 | self._create_refs(repo, refs) |
|
723 | self._create_refs(repo, refs) | |
722 |
|
724 | |||
723 | def _create_refs(self, repo, refs): |
|
725 | def _create_refs(self, repo, refs): | |
724 | for ref_name in refs: |
|
726 | for ref_name in refs: | |
725 | repo.set_refs(ref_name, refs[ref_name]) |
|
727 | repo.set_refs(ref_name, refs[ref_name]) | |
726 |
|
728 | |||
727 |
|
729 | |||
728 | def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo): |
|
730 | def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo): | |
729 | if backend_alias not in request.config.getoption('--backends'): |
|
731 | if backend_alias not in request.config.getoption('--backends'): | |
730 | pytest.skip("Backend %s not selected." % (backend_alias, )) |
|
732 | pytest.skip("Backend %s not selected." % (backend_alias, )) | |
731 |
|
733 | |||
732 | utils.check_xfail_backends(request.node, backend_alias) |
|
734 | utils.check_xfail_backends(request.node, backend_alias) | |
733 | utils.check_skip_backends(request.node, backend_alias) |
|
735 | utils.check_skip_backends(request.node, backend_alias) | |
734 |
|
736 | |||
735 | repo_name = 'vcs_test_%s' % (backend_alias, ) |
|
737 | repo_name = 'vcs_test_%s' % (backend_alias, ) | |
736 | repo_path = os.path.join(tests_tmp_path, repo_name) |
|
738 | repo_path = os.path.join(tests_tmp_path, repo_name) | |
737 | backend = VcsBackend( |
|
739 | backend = VcsBackend( | |
738 | alias=backend_alias, |
|
740 | alias=backend_alias, | |
739 | repo_path=repo_path, |
|
741 | repo_path=repo_path, | |
740 | test_name=request.node.name, |
|
742 | test_name=request.node.name, | |
741 | test_repo_container=test_repo) |
|
743 | test_repo_container=test_repo) | |
742 | request.addfinalizer(backend.cleanup) |
|
744 | request.addfinalizer(backend.cleanup) | |
743 | return backend |
|
745 | return backend | |
744 |
|
746 | |||
745 |
|
747 | |||
746 | @pytest.fixture() |
|
748 | @pytest.fixture() | |
747 | def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo): |
|
749 | def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo): | |
748 | """ |
|
750 | """ | |
749 | Parametrized fixture which represents a single vcs backend implementation. |
|
751 | Parametrized fixture which represents a single vcs backend implementation. | |
750 |
|
752 | |||
751 | See the fixture `backend` for more details. This one implements the same |
|
753 | See the fixture `backend` for more details. This one implements the same | |
752 | concept, but on vcs level. So it does not provide model instances etc. |
|
754 | concept, but on vcs level. So it does not provide model instances etc. | |
753 |
|
755 | |||
754 | Parameters are generated dynamically, see :func:`pytest_generate_tests` |
|
756 | Parameters are generated dynamically, see :func:`pytest_generate_tests` | |
755 | for how this works. |
|
757 | for how this works. | |
756 | """ |
|
758 | """ | |
757 | return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo) |
|
759 | return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo) | |
758 |
|
760 | |||
759 |
|
761 | |||
760 | @pytest.fixture() |
|
762 | @pytest.fixture() | |
761 | def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo): |
|
763 | def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo): | |
762 | return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo) |
|
764 | return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo) | |
763 |
|
765 | |||
764 |
|
766 | |||
765 | @pytest.fixture() |
|
767 | @pytest.fixture() | |
766 | def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo): |
|
768 | def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo): | |
767 | return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo) |
|
769 | return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo) | |
768 |
|
770 | |||
769 |
|
771 | |||
770 | @pytest.fixture() |
|
772 | @pytest.fixture() | |
771 | def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo): |
|
773 | def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo): | |
772 | return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo) |
|
774 | return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo) | |
773 |
|
775 | |||
774 |
|
776 | |||
775 | @pytest.fixture() |
|
777 | @pytest.fixture() | |
776 | def vcsbackend_stub(vcsbackend_git): |
|
778 | def vcsbackend_stub(vcsbackend_git): | |
777 | """ |
|
779 | """ | |
778 | Use this to express that your test just needs a stub of a vcsbackend. |
|
780 | Use this to express that your test just needs a stub of a vcsbackend. | |
779 |
|
781 | |||
780 | Plan is to eventually implement an in-memory stub to speed tests up. |
|
782 | Plan is to eventually implement an in-memory stub to speed tests up. | |
781 | """ |
|
783 | """ | |
782 | return vcsbackend_git |
|
784 | return vcsbackend_git | |
783 |
|
785 | |||
784 |
|
786 | |||
785 | class VcsBackend(object): |
|
787 | class VcsBackend(object): | |
786 | """ |
|
788 | """ | |
787 | Represents the test configuration for one supported vcs backend. |
|
789 | Represents the test configuration for one supported vcs backend. | |
788 | """ |
|
790 | """ | |
789 |
|
791 | |||
790 | invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+') |
|
792 | invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+') | |
791 |
|
793 | |||
792 | def __init__(self, alias, repo_path, test_name, test_repo_container): |
|
794 | def __init__(self, alias, repo_path, test_name, test_repo_container): | |
793 | self.alias = alias |
|
795 | self.alias = alias | |
794 | self._repo_path = repo_path |
|
796 | self._repo_path = repo_path | |
795 | self._cleanup_repos = [] |
|
797 | self._cleanup_repos = [] | |
796 | self._test_name = test_name |
|
798 | self._test_name = test_name | |
797 | self._test_repo_container = test_repo_container |
|
799 | self._test_repo_container = test_repo_container | |
798 |
|
800 | |||
799 | def __getitem__(self, key): |
|
801 | def __getitem__(self, key): | |
800 | return self._test_repo_container(key, self.alias).scm_instance() |
|
802 | return self._test_repo_container(key, self.alias).scm_instance() | |
801 |
|
803 | |||
802 | @property |
|
804 | @property | |
803 | def repo(self): |
|
805 | def repo(self): | |
804 | """ |
|
806 | """ | |
805 | Returns the "current" repository. This is the vcs_test repo of the last |
|
807 | Returns the "current" repository. This is the vcs_test repo of the last | |
806 | repo which has been created. |
|
808 | repo which has been created. | |
807 | """ |
|
809 | """ | |
808 | Repository = get_backend(self.alias) |
|
810 | Repository = get_backend(self.alias) | |
809 | return Repository(self._repo_path) |
|
811 | return Repository(self._repo_path) | |
810 |
|
812 | |||
811 | @property |
|
813 | @property | |
812 | def backend(self): |
|
814 | def backend(self): | |
813 | """ |
|
815 | """ | |
814 | Returns the backend implementation class. |
|
816 | Returns the backend implementation class. | |
815 | """ |
|
817 | """ | |
816 | return get_backend(self.alias) |
|
818 | return get_backend(self.alias) | |
817 |
|
819 | |||
818 | def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None, |
|
820 | def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None, | |
819 | bare=False): |
|
821 | bare=False): | |
820 | repo_name = self._next_repo_name() |
|
822 | repo_name = self._next_repo_name() | |
821 | self._repo_path = get_new_dir(repo_name) |
|
823 | self._repo_path = get_new_dir(repo_name) | |
822 | repo_class = get_backend(self.alias) |
|
824 | repo_class = get_backend(self.alias) | |
823 | src_url = None |
|
825 | src_url = None | |
824 | if _clone_repo: |
|
826 | if _clone_repo: | |
825 | src_url = _clone_repo.path |
|
827 | src_url = _clone_repo.path | |
826 | repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare) |
|
828 | repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare) | |
827 | self._cleanup_repos.append(repo) |
|
829 | self._cleanup_repos.append(repo) | |
828 |
|
830 | |||
829 | commits = commits or [ |
|
831 | commits = commits or [ | |
830 | {'message': 'Commit %s of %s' % (x, repo_name)} |
|
832 | {'message': 'Commit %s of %s' % (x, repo_name)} | |
831 | for x in xrange(number_of_commits)] |
|
833 | for x in xrange(number_of_commits)] | |
832 | _add_commits_to_repo(repo, commits) |
|
834 | _add_commits_to_repo(repo, commits) | |
833 | return repo |
|
835 | return repo | |
834 |
|
836 | |||
835 | def clone_repo(self, repo): |
|
837 | def clone_repo(self, repo): | |
836 | return self.create_repo(_clone_repo=repo) |
|
838 | return self.create_repo(_clone_repo=repo) | |
837 |
|
839 | |||
838 | def cleanup(self): |
|
840 | def cleanup(self): | |
839 | for repo in self._cleanup_repos: |
|
841 | for repo in self._cleanup_repos: | |
840 | shutil.rmtree(repo.path) |
|
842 | shutil.rmtree(repo.path) | |
841 |
|
843 | |||
842 | def new_repo_path(self): |
|
844 | def new_repo_path(self): | |
843 | repo_name = self._next_repo_name() |
|
845 | repo_name = self._next_repo_name() | |
844 | self._repo_path = get_new_dir(repo_name) |
|
846 | self._repo_path = get_new_dir(repo_name) | |
845 | return self._repo_path |
|
847 | return self._repo_path | |
846 |
|
848 | |||
847 | def _next_repo_name(self): |
|
849 | def _next_repo_name(self): | |
848 | return "%s_%s" % ( |
|
850 | return "%s_%s" % ( | |
849 | self.invalid_repo_name.sub('_', self._test_name), |
|
851 | self.invalid_repo_name.sub('_', self._test_name), | |
850 | len(self._cleanup_repos)) |
|
852 | len(self._cleanup_repos)) | |
851 |
|
853 | |||
852 | def add_file(self, repo, filename, content='Test content\n'): |
|
854 | def add_file(self, repo, filename, content='Test content\n'): | |
853 | imc = repo.in_memory_commit |
|
855 | imc = repo.in_memory_commit | |
854 | imc.add(FileNode(filename, content=content)) |
|
856 | imc.add(FileNode(filename, content=content)) | |
855 | imc.commit( |
|
857 | imc.commit( | |
856 | message=u'Automatic commit from vcsbackend fixture', |
|
858 | message=u'Automatic commit from vcsbackend fixture', | |
857 | author=u'Automatic <automatic@rhodecode.com>') |
|
859 | author=u'Automatic <automatic@rhodecode.com>') | |
858 |
|
860 | |||
859 | def ensure_file(self, filename, content='Test content\n'): |
|
861 | def ensure_file(self, filename, content='Test content\n'): | |
860 | assert self._cleanup_repos, "Avoid writing into vcs_test repos" |
|
862 | assert self._cleanup_repos, "Avoid writing into vcs_test repos" | |
861 | self.add_file(self.repo, filename, content) |
|
863 | self.add_file(self.repo, filename, content) | |
862 |
|
864 | |||
863 |
|
865 | |||
864 | def _add_commits_to_repo(vcs_repo, commits): |
|
866 | def _add_commits_to_repo(vcs_repo, commits): | |
865 | commit_ids = {} |
|
867 | commit_ids = {} | |
866 | if not commits: |
|
868 | if not commits: | |
867 | return commit_ids |
|
869 | return commit_ids | |
868 |
|
870 | |||
869 | imc = vcs_repo.in_memory_commit |
|
871 | imc = vcs_repo.in_memory_commit | |
870 | commit = None |
|
872 | commit = None | |
871 |
|
873 | |||
872 | for idx, commit in enumerate(commits): |
|
874 | for idx, commit in enumerate(commits): | |
873 | message = unicode(commit.get('message', 'Commit %s' % idx)) |
|
875 | message = unicode(commit.get('message', 'Commit %s' % idx)) | |
874 |
|
876 | |||
875 | for node in commit.get('added', []): |
|
877 | for node in commit.get('added', []): | |
876 | imc.add(FileNode(node.path, content=node.content)) |
|
878 | imc.add(FileNode(node.path, content=node.content)) | |
877 | for node in commit.get('changed', []): |
|
879 | for node in commit.get('changed', []): | |
878 | imc.change(FileNode(node.path, content=node.content)) |
|
880 | imc.change(FileNode(node.path, content=node.content)) | |
879 | for node in commit.get('removed', []): |
|
881 | for node in commit.get('removed', []): | |
880 | imc.remove(FileNode(node.path)) |
|
882 | imc.remove(FileNode(node.path)) | |
881 |
|
883 | |||
882 | parents = [ |
|
884 | parents = [ | |
883 | vcs_repo.get_commit(commit_id=commit_ids[p]) |
|
885 | vcs_repo.get_commit(commit_id=commit_ids[p]) | |
884 | for p in commit.get('parents', [])] |
|
886 | for p in commit.get('parents', [])] | |
885 |
|
887 | |||
886 | operations = ('added', 'changed', 'removed') |
|
888 | operations = ('added', 'changed', 'removed') | |
887 | if not any((commit.get(o) for o in operations)): |
|
889 | if not any((commit.get(o) for o in operations)): | |
888 | imc.add(FileNode('file_%s' % idx, content=message)) |
|
890 | imc.add(FileNode('file_%s' % idx, content=message)) | |
889 |
|
891 | |||
890 | commit = imc.commit( |
|
892 | commit = imc.commit( | |
891 | message=message, |
|
893 | message=message, | |
892 | author=unicode(commit.get('author', 'Automatic <automatic@rhodecode.com>')), |
|
894 | author=unicode(commit.get('author', 'Automatic <automatic@rhodecode.com>')), | |
893 | date=commit.get('date'), |
|
895 | date=commit.get('date'), | |
894 | branch=commit.get('branch'), |
|
896 | branch=commit.get('branch'), | |
895 | parents=parents) |
|
897 | parents=parents) | |
896 |
|
898 | |||
897 | commit_ids[commit.message] = commit.raw_id |
|
899 | commit_ids[commit.message] = commit.raw_id | |
898 |
|
900 | |||
899 | return commit_ids |
|
901 | return commit_ids | |
900 |
|
902 | |||
901 |
|
903 | |||
902 | @pytest.fixture() |
|
904 | @pytest.fixture() | |
903 | def reposerver(request): |
|
905 | def reposerver(request): | |
904 | """ |
|
906 | """ | |
905 | Allows to serve a backend repository |
|
907 | Allows to serve a backend repository | |
906 | """ |
|
908 | """ | |
907 |
|
909 | |||
908 | repo_server = RepoServer() |
|
910 | repo_server = RepoServer() | |
909 | request.addfinalizer(repo_server.cleanup) |
|
911 | request.addfinalizer(repo_server.cleanup) | |
910 | return repo_server |
|
912 | return repo_server | |
911 |
|
913 | |||
912 |
|
914 | |||
913 | class RepoServer(object): |
|
915 | class RepoServer(object): | |
914 | """ |
|
916 | """ | |
915 | Utility to serve a local repository for the duration of a test case. |
|
917 | Utility to serve a local repository for the duration of a test case. | |
916 |
|
918 | |||
917 | Supports only Subversion so far. |
|
919 | Supports only Subversion so far. | |
918 | """ |
|
920 | """ | |
919 |
|
921 | |||
920 | url = None |
|
922 | url = None | |
921 |
|
923 | |||
922 | def __init__(self): |
|
924 | def __init__(self): | |
923 | self._cleanup_servers = [] |
|
925 | self._cleanup_servers = [] | |
924 |
|
926 | |||
925 | def serve(self, vcsrepo): |
|
927 | def serve(self, vcsrepo): | |
926 | if vcsrepo.alias != 'svn': |
|
928 | if vcsrepo.alias != 'svn': | |
927 | raise TypeError("Backend %s not supported" % vcsrepo.alias) |
|
929 | raise TypeError("Backend %s not supported" % vcsrepo.alias) | |
928 |
|
930 | |||
929 | proc = subprocess32.Popen( |
|
931 | proc = subprocess32.Popen( | |
930 | ['svnserve', '-d', '--foreground', '--listen-host', 'localhost', |
|
932 | ['svnserve', '-d', '--foreground', '--listen-host', 'localhost', | |
931 | '--root', vcsrepo.path]) |
|
933 | '--root', vcsrepo.path]) | |
932 | self._cleanup_servers.append(proc) |
|
934 | self._cleanup_servers.append(proc) | |
933 | self.url = 'svn://localhost' |
|
935 | self.url = 'svn://localhost' | |
934 |
|
936 | |||
935 | def cleanup(self): |
|
937 | def cleanup(self): | |
936 | for proc in self._cleanup_servers: |
|
938 | for proc in self._cleanup_servers: | |
937 | proc.terminate() |
|
939 | proc.terminate() | |
938 |
|
940 | |||
939 |
|
941 | |||
940 | @pytest.fixture() |
|
942 | @pytest.fixture() | |
941 | def pr_util(backend, request, config_stub): |
|
943 | def pr_util(backend, request, config_stub): | |
942 | """ |
|
944 | """ | |
943 | Utility for tests of models and for functional tests around pull requests. |
|
945 | Utility for tests of models and for functional tests around pull requests. | |
944 |
|
946 | |||
945 | It gives an instance of :class:`PRTestUtility` which provides various |
|
947 | It gives an instance of :class:`PRTestUtility` which provides various | |
946 | utility methods around one pull request. |
|
948 | utility methods around one pull request. | |
947 |
|
949 | |||
948 | This fixture uses `backend` and inherits its parameterization. |
|
950 | This fixture uses `backend` and inherits its parameterization. | |
949 | """ |
|
951 | """ | |
950 |
|
952 | |||
951 | util = PRTestUtility(backend) |
|
953 | util = PRTestUtility(backend) | |
952 | request.addfinalizer(util.cleanup) |
|
954 | request.addfinalizer(util.cleanup) | |
953 |
|
955 | |||
954 | return util |
|
956 | return util | |
955 |
|
957 | |||
956 |
|
958 | |||
957 | class PRTestUtility(object): |
|
959 | class PRTestUtility(object): | |
958 |
|
960 | |||
959 | pull_request = None |
|
961 | pull_request = None | |
960 | pull_request_id = None |
|
962 | pull_request_id = None | |
961 | mergeable_patcher = None |
|
963 | mergeable_patcher = None | |
962 | mergeable_mock = None |
|
964 | mergeable_mock = None | |
963 | notification_patcher = None |
|
965 | notification_patcher = None | |
964 |
|
966 | |||
965 | def __init__(self, backend): |
|
967 | def __init__(self, backend): | |
966 | self.backend = backend |
|
968 | self.backend = backend | |
967 |
|
969 | |||
968 | def create_pull_request( |
|
970 | def create_pull_request( | |
969 | self, commits=None, target_head=None, source_head=None, |
|
971 | self, commits=None, target_head=None, source_head=None, | |
970 | revisions=None, approved=False, author=None, mergeable=False, |
|
972 | revisions=None, approved=False, author=None, mergeable=False, | |
971 | enable_notifications=True, name_suffix=u'', reviewers=None, observers=None, |
|
973 | enable_notifications=True, name_suffix=u'', reviewers=None, observers=None, | |
972 | title=u"Test", description=u"Description"): |
|
974 | title=u"Test", description=u"Description"): | |
973 | self.set_mergeable(mergeable) |
|
975 | self.set_mergeable(mergeable) | |
974 | if not enable_notifications: |
|
976 | if not enable_notifications: | |
975 | # mock notification side effect |
|
977 | # mock notification side effect | |
976 | self.notification_patcher = mock.patch( |
|
978 | self.notification_patcher = mock.patch( | |
977 | 'rhodecode.model.notification.NotificationModel.create') |
|
979 | 'rhodecode.model.notification.NotificationModel.create') | |
978 | self.notification_patcher.start() |
|
980 | self.notification_patcher.start() | |
979 |
|
981 | |||
980 | if not self.pull_request: |
|
982 | if not self.pull_request: | |
981 | if not commits: |
|
983 | if not commits: | |
982 | commits = [ |
|
984 | commits = [ | |
983 | {'message': 'c1'}, |
|
985 | {'message': 'c1'}, | |
984 | {'message': 'c2'}, |
|
986 | {'message': 'c2'}, | |
985 | {'message': 'c3'}, |
|
987 | {'message': 'c3'}, | |
986 | ] |
|
988 | ] | |
987 | target_head = 'c1' |
|
989 | target_head = 'c1' | |
988 | source_head = 'c2' |
|
990 | source_head = 'c2' | |
989 | revisions = ['c2'] |
|
991 | revisions = ['c2'] | |
990 |
|
992 | |||
991 | self.commit_ids = self.backend.create_master_repo(commits) |
|
993 | self.commit_ids = self.backend.create_master_repo(commits) | |
992 | self.target_repository = self.backend.create_repo( |
|
994 | self.target_repository = self.backend.create_repo( | |
993 | heads=[target_head], name_suffix=name_suffix) |
|
995 | heads=[target_head], name_suffix=name_suffix) | |
994 | self.source_repository = self.backend.create_repo( |
|
996 | self.source_repository = self.backend.create_repo( | |
995 | heads=[source_head], name_suffix=name_suffix) |
|
997 | heads=[source_head], name_suffix=name_suffix) | |
996 | self.author = author or UserModel().get_by_username( |
|
998 | self.author = author or UserModel().get_by_username( | |
997 | TEST_USER_ADMIN_LOGIN) |
|
999 | TEST_USER_ADMIN_LOGIN) | |
998 |
|
1000 | |||
999 | model = PullRequestModel() |
|
1001 | model = PullRequestModel() | |
1000 | self.create_parameters = { |
|
1002 | self.create_parameters = { | |
1001 | 'created_by': self.author, |
|
1003 | 'created_by': self.author, | |
1002 | 'source_repo': self.source_repository.repo_name, |
|
1004 | 'source_repo': self.source_repository.repo_name, | |
1003 | 'source_ref': self._default_branch_reference(source_head), |
|
1005 | 'source_ref': self._default_branch_reference(source_head), | |
1004 | 'target_repo': self.target_repository.repo_name, |
|
1006 | 'target_repo': self.target_repository.repo_name, | |
1005 | 'target_ref': self._default_branch_reference(target_head), |
|
1007 | 'target_ref': self._default_branch_reference(target_head), | |
1006 | 'revisions': [self.commit_ids[r] for r in revisions], |
|
1008 | 'revisions': [self.commit_ids[r] for r in revisions], | |
1007 | 'reviewers': reviewers or self._get_reviewers(), |
|
1009 | 'reviewers': reviewers or self._get_reviewers(), | |
1008 | 'observers': observers or self._get_observers(), |
|
1010 | 'observers': observers or self._get_observers(), | |
1009 | 'title': title, |
|
1011 | 'title': title, | |
1010 | 'description': description, |
|
1012 | 'description': description, | |
1011 | } |
|
1013 | } | |
1012 | self.pull_request = model.create(**self.create_parameters) |
|
1014 | self.pull_request = model.create(**self.create_parameters) | |
1013 | assert model.get_versions(self.pull_request) == [] |
|
1015 | assert model.get_versions(self.pull_request) == [] | |
1014 |
|
1016 | |||
1015 | self.pull_request_id = self.pull_request.pull_request_id |
|
1017 | self.pull_request_id = self.pull_request.pull_request_id | |
1016 |
|
1018 | |||
1017 | if approved: |
|
1019 | if approved: | |
1018 | self.approve() |
|
1020 | self.approve() | |
1019 |
|
1021 | |||
1020 | Session().add(self.pull_request) |
|
1022 | Session().add(self.pull_request) | |
1021 | Session().commit() |
|
1023 | Session().commit() | |
1022 |
|
1024 | |||
1023 | return self.pull_request |
|
1025 | return self.pull_request | |
1024 |
|
1026 | |||
1025 | def approve(self): |
|
1027 | def approve(self): | |
1026 | self.create_status_votes( |
|
1028 | self.create_status_votes( | |
1027 | ChangesetStatus.STATUS_APPROVED, |
|
1029 | ChangesetStatus.STATUS_APPROVED, | |
1028 | *self.pull_request.reviewers) |
|
1030 | *self.pull_request.reviewers) | |
1029 |
|
1031 | |||
1030 | def close(self): |
|
1032 | def close(self): | |
1031 | PullRequestModel().close_pull_request(self.pull_request, self.author) |
|
1033 | PullRequestModel().close_pull_request(self.pull_request, self.author) | |
1032 |
|
1034 | |||
1033 | def _default_branch_reference(self, commit_message): |
|
1035 | def _default_branch_reference(self, commit_message): | |
1034 | reference = '%s:%s:%s' % ( |
|
1036 | reference = '%s:%s:%s' % ( | |
1035 | 'branch', |
|
1037 | 'branch', | |
1036 | self.backend.default_branch_name, |
|
1038 | self.backend.default_branch_name, | |
1037 | self.commit_ids[commit_message]) |
|
1039 | self.commit_ids[commit_message]) | |
1038 | return reference |
|
1040 | return reference | |
1039 |
|
1041 | |||
1040 | def _get_reviewers(self): |
|
1042 | def _get_reviewers(self): | |
1041 | role = PullRequestReviewers.ROLE_REVIEWER |
|
1043 | role = PullRequestReviewers.ROLE_REVIEWER | |
1042 | return [ |
|
1044 | return [ | |
1043 | (TEST_USER_REGULAR_LOGIN, ['default1'], False, role, []), |
|
1045 | (TEST_USER_REGULAR_LOGIN, ['default1'], False, role, []), | |
1044 | (TEST_USER_REGULAR2_LOGIN, ['default2'], False, role, []), |
|
1046 | (TEST_USER_REGULAR2_LOGIN, ['default2'], False, role, []), | |
1045 | ] |
|
1047 | ] | |
1046 |
|
1048 | |||
1047 | def _get_observers(self): |
|
1049 | def _get_observers(self): | |
1048 | return [ |
|
1050 | return [ | |
1049 |
|
1051 | |||
1050 | ] |
|
1052 | ] | |
1051 |
|
1053 | |||
1052 | def update_source_repository(self, head=None): |
|
1054 | def update_source_repository(self, head=None): | |
1053 | heads = [head or 'c3'] |
|
1055 | heads = [head or 'c3'] | |
1054 | self.backend.pull_heads(self.source_repository, heads=heads) |
|
1056 | self.backend.pull_heads(self.source_repository, heads=heads) | |
1055 |
|
1057 | |||
1056 | def add_one_commit(self, head=None): |
|
1058 | def add_one_commit(self, head=None): | |
1057 | self.update_source_repository(head=head) |
|
1059 | self.update_source_repository(head=head) | |
1058 | old_commit_ids = set(self.pull_request.revisions) |
|
1060 | old_commit_ids = set(self.pull_request.revisions) | |
1059 | PullRequestModel().update_commits(self.pull_request, self.pull_request.author) |
|
1061 | PullRequestModel().update_commits(self.pull_request, self.pull_request.author) | |
1060 | commit_ids = set(self.pull_request.revisions) |
|
1062 | commit_ids = set(self.pull_request.revisions) | |
1061 | new_commit_ids = commit_ids - old_commit_ids |
|
1063 | new_commit_ids = commit_ids - old_commit_ids | |
1062 | assert len(new_commit_ids) == 1 |
|
1064 | assert len(new_commit_ids) == 1 | |
1063 | return new_commit_ids.pop() |
|
1065 | return new_commit_ids.pop() | |
1064 |
|
1066 | |||
1065 | def remove_one_commit(self): |
|
1067 | def remove_one_commit(self): | |
1066 | assert len(self.pull_request.revisions) == 2 |
|
1068 | assert len(self.pull_request.revisions) == 2 | |
1067 | source_vcs = self.source_repository.scm_instance() |
|
1069 | source_vcs = self.source_repository.scm_instance() | |
1068 | removed_commit_id = source_vcs.commit_ids[-1] |
|
1070 | removed_commit_id = source_vcs.commit_ids[-1] | |
1069 |
|
1071 | |||
1070 | # TODO: johbo: Git and Mercurial have an inconsistent vcs api here, |
|
1072 | # TODO: johbo: Git and Mercurial have an inconsistent vcs api here, | |
1071 | # remove the if once that's sorted out. |
|
1073 | # remove the if once that's sorted out. | |
1072 | if self.backend.alias == "git": |
|
1074 | if self.backend.alias == "git": | |
1073 | kwargs = {'branch_name': self.backend.default_branch_name} |
|
1075 | kwargs = {'branch_name': self.backend.default_branch_name} | |
1074 | else: |
|
1076 | else: | |
1075 | kwargs = {} |
|
1077 | kwargs = {} | |
1076 | source_vcs.strip(removed_commit_id, **kwargs) |
|
1078 | source_vcs.strip(removed_commit_id, **kwargs) | |
1077 |
|
1079 | |||
1078 | PullRequestModel().update_commits(self.pull_request, self.pull_request.author) |
|
1080 | PullRequestModel().update_commits(self.pull_request, self.pull_request.author) | |
1079 | assert len(self.pull_request.revisions) == 1 |
|
1081 | assert len(self.pull_request.revisions) == 1 | |
1080 | return removed_commit_id |
|
1082 | return removed_commit_id | |
1081 |
|
1083 | |||
1082 | def create_comment(self, linked_to=None): |
|
1084 | def create_comment(self, linked_to=None): | |
1083 | comment = CommentsModel().create( |
|
1085 | comment = CommentsModel().create( | |
1084 | text=u"Test comment", |
|
1086 | text=u"Test comment", | |
1085 | repo=self.target_repository.repo_name, |
|
1087 | repo=self.target_repository.repo_name, | |
1086 | user=self.author, |
|
1088 | user=self.author, | |
1087 | pull_request=self.pull_request) |
|
1089 | pull_request=self.pull_request) | |
1088 | assert comment.pull_request_version_id is None |
|
1090 | assert comment.pull_request_version_id is None | |
1089 |
|
1091 | |||
1090 | if linked_to: |
|
1092 | if linked_to: | |
1091 | PullRequestModel()._link_comments_to_version(linked_to) |
|
1093 | PullRequestModel()._link_comments_to_version(linked_to) | |
1092 |
|
1094 | |||
1093 | return comment |
|
1095 | return comment | |
1094 |
|
1096 | |||
1095 | def create_inline_comment( |
|
1097 | def create_inline_comment( | |
1096 | self, linked_to=None, line_no=u'n1', file_path='file_1'): |
|
1098 | self, linked_to=None, line_no=u'n1', file_path='file_1'): | |
1097 | comment = CommentsModel().create( |
|
1099 | comment = CommentsModel().create( | |
1098 | text=u"Test comment", |
|
1100 | text=u"Test comment", | |
1099 | repo=self.target_repository.repo_name, |
|
1101 | repo=self.target_repository.repo_name, | |
1100 | user=self.author, |
|
1102 | user=self.author, | |
1101 | line_no=line_no, |
|
1103 | line_no=line_no, | |
1102 | f_path=file_path, |
|
1104 | f_path=file_path, | |
1103 | pull_request=self.pull_request) |
|
1105 | pull_request=self.pull_request) | |
1104 | assert comment.pull_request_version_id is None |
|
1106 | assert comment.pull_request_version_id is None | |
1105 |
|
1107 | |||
1106 | if linked_to: |
|
1108 | if linked_to: | |
1107 | PullRequestModel()._link_comments_to_version(linked_to) |
|
1109 | PullRequestModel()._link_comments_to_version(linked_to) | |
1108 |
|
1110 | |||
1109 | return comment |
|
1111 | return comment | |
1110 |
|
1112 | |||
1111 | def create_version_of_pull_request(self): |
|
1113 | def create_version_of_pull_request(self): | |
1112 | pull_request = self.create_pull_request() |
|
1114 | pull_request = self.create_pull_request() | |
1113 | version = PullRequestModel()._create_version_from_snapshot( |
|
1115 | version = PullRequestModel()._create_version_from_snapshot( | |
1114 | pull_request) |
|
1116 | pull_request) | |
1115 | return version |
|
1117 | return version | |
1116 |
|
1118 | |||
1117 | def create_status_votes(self, status, *reviewers): |
|
1119 | def create_status_votes(self, status, *reviewers): | |
1118 | for reviewer in reviewers: |
|
1120 | for reviewer in reviewers: | |
1119 | ChangesetStatusModel().set_status( |
|
1121 | ChangesetStatusModel().set_status( | |
1120 | repo=self.pull_request.target_repo, |
|
1122 | repo=self.pull_request.target_repo, | |
1121 | status=status, |
|
1123 | status=status, | |
1122 | user=reviewer.user_id, |
|
1124 | user=reviewer.user_id, | |
1123 | pull_request=self.pull_request) |
|
1125 | pull_request=self.pull_request) | |
1124 |
|
1126 | |||
1125 | def set_mergeable(self, value): |
|
1127 | def set_mergeable(self, value): | |
1126 | if not self.mergeable_patcher: |
|
1128 | if not self.mergeable_patcher: | |
1127 | self.mergeable_patcher = mock.patch.object( |
|
1129 | self.mergeable_patcher = mock.patch.object( | |
1128 | VcsSettingsModel, 'get_general_settings') |
|
1130 | VcsSettingsModel, 'get_general_settings') | |
1129 | self.mergeable_mock = self.mergeable_patcher.start() |
|
1131 | self.mergeable_mock = self.mergeable_patcher.start() | |
1130 | self.mergeable_mock.return_value = { |
|
1132 | self.mergeable_mock.return_value = { | |
1131 | 'rhodecode_pr_merge_enabled': value} |
|
1133 | 'rhodecode_pr_merge_enabled': value} | |
1132 |
|
1134 | |||
1133 | def cleanup(self): |
|
1135 | def cleanup(self): | |
1134 | # In case the source repository is already cleaned up, the pull |
|
1136 | # In case the source repository is already cleaned up, the pull | |
1135 | # request will already be deleted. |
|
1137 | # request will already be deleted. | |
1136 | pull_request = PullRequest().get(self.pull_request_id) |
|
1138 | pull_request = PullRequest().get(self.pull_request_id) | |
1137 | if pull_request: |
|
1139 | if pull_request: | |
1138 | PullRequestModel().delete(pull_request, pull_request.author) |
|
1140 | PullRequestModel().delete(pull_request, pull_request.author) | |
1139 | Session().commit() |
|
1141 | Session().commit() | |
1140 |
|
1142 | |||
1141 | if self.notification_patcher: |
|
1143 | if self.notification_patcher: | |
1142 | self.notification_patcher.stop() |
|
1144 | self.notification_patcher.stop() | |
1143 |
|
1145 | |||
1144 | if self.mergeable_patcher: |
|
1146 | if self.mergeable_patcher: | |
1145 | self.mergeable_patcher.stop() |
|
1147 | self.mergeable_patcher.stop() | |
1146 |
|
1148 | |||
1147 |
|
1149 | |||
1148 | @pytest.fixture() |
|
1150 | @pytest.fixture() | |
1149 | def user_admin(baseapp): |
|
1151 | def user_admin(baseapp): | |
1150 | """ |
|
1152 | """ | |
1151 | Provides the default admin test user as an instance of `db.User`. |
|
1153 | Provides the default admin test user as an instance of `db.User`. | |
1152 | """ |
|
1154 | """ | |
1153 | user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) |
|
1155 | user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) | |
1154 | return user |
|
1156 | return user | |
1155 |
|
1157 | |||
1156 |
|
1158 | |||
1157 | @pytest.fixture() |
|
1159 | @pytest.fixture() | |
1158 | def user_regular(baseapp): |
|
1160 | def user_regular(baseapp): | |
1159 | """ |
|
1161 | """ | |
1160 | Provides the default regular test user as an instance of `db.User`. |
|
1162 | Provides the default regular test user as an instance of `db.User`. | |
1161 | """ |
|
1163 | """ | |
1162 | user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN) |
|
1164 | user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN) | |
1163 | return user |
|
1165 | return user | |
1164 |
|
1166 | |||
1165 |
|
1167 | |||
1166 | @pytest.fixture() |
|
1168 | @pytest.fixture() | |
1167 | def user_util(request, db_connection): |
|
1169 | def user_util(request, db_connection): | |
1168 | """ |
|
1170 | """ | |
1169 | Provides a wired instance of `UserUtility` with integrated cleanup. |
|
1171 | Provides a wired instance of `UserUtility` with integrated cleanup. | |
1170 | """ |
|
1172 | """ | |
1171 | utility = UserUtility(test_name=request.node.name) |
|
1173 | utility = UserUtility(test_name=request.node.name) | |
1172 | request.addfinalizer(utility.cleanup) |
|
1174 | request.addfinalizer(utility.cleanup) | |
1173 | return utility |
|
1175 | return utility | |
1174 |
|
1176 | |||
1175 |
|
1177 | |||
1176 | # TODO: johbo: Split this up into utilities per domain or something similar |
|
1178 | # TODO: johbo: Split this up into utilities per domain or something similar | |
1177 | class UserUtility(object): |
|
1179 | class UserUtility(object): | |
1178 |
|
1180 | |||
1179 | def __init__(self, test_name="test"): |
|
1181 | def __init__(self, test_name="test"): | |
1180 | self._test_name = self._sanitize_name(test_name) |
|
1182 | self._test_name = self._sanitize_name(test_name) | |
1181 | self.fixture = Fixture() |
|
1183 | self.fixture = Fixture() | |
1182 | self.repo_group_ids = [] |
|
1184 | self.repo_group_ids = [] | |
1183 | self.repos_ids = [] |
|
1185 | self.repos_ids = [] | |
1184 | self.user_ids = [] |
|
1186 | self.user_ids = [] | |
1185 | self.user_group_ids = [] |
|
1187 | self.user_group_ids = [] | |
1186 | self.user_repo_permission_ids = [] |
|
1188 | self.user_repo_permission_ids = [] | |
1187 | self.user_group_repo_permission_ids = [] |
|
1189 | self.user_group_repo_permission_ids = [] | |
1188 | self.user_repo_group_permission_ids = [] |
|
1190 | self.user_repo_group_permission_ids = [] | |
1189 | self.user_group_repo_group_permission_ids = [] |
|
1191 | self.user_group_repo_group_permission_ids = [] | |
1190 | self.user_user_group_permission_ids = [] |
|
1192 | self.user_user_group_permission_ids = [] | |
1191 | self.user_group_user_group_permission_ids = [] |
|
1193 | self.user_group_user_group_permission_ids = [] | |
1192 | self.user_permissions = [] |
|
1194 | self.user_permissions = [] | |
1193 |
|
1195 | |||
1194 | def _sanitize_name(self, name): |
|
1196 | def _sanitize_name(self, name): | |
1195 | for char in ['[', ']']: |
|
1197 | for char in ['[', ']']: | |
1196 | name = name.replace(char, '_') |
|
1198 | name = name.replace(char, '_') | |
1197 | return name |
|
1199 | return name | |
1198 |
|
1200 | |||
1199 | def create_repo_group( |
|
1201 | def create_repo_group( | |
1200 | self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True): |
|
1202 | self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True): | |
1201 | group_name = "{prefix}_repogroup_{count}".format( |
|
1203 | group_name = "{prefix}_repogroup_{count}".format( | |
1202 | prefix=self._test_name, |
|
1204 | prefix=self._test_name, | |
1203 | count=len(self.repo_group_ids)) |
|
1205 | count=len(self.repo_group_ids)) | |
1204 | repo_group = self.fixture.create_repo_group( |
|
1206 | repo_group = self.fixture.create_repo_group( | |
1205 | group_name, cur_user=owner) |
|
1207 | group_name, cur_user=owner) | |
1206 | if auto_cleanup: |
|
1208 | if auto_cleanup: | |
1207 | self.repo_group_ids.append(repo_group.group_id) |
|
1209 | self.repo_group_ids.append(repo_group.group_id) | |
1208 | return repo_group |
|
1210 | return repo_group | |
1209 |
|
1211 | |||
1210 | def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None, |
|
1212 | def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None, | |
1211 | auto_cleanup=True, repo_type='hg', bare=False): |
|
1213 | auto_cleanup=True, repo_type='hg', bare=False): | |
1212 | repo_name = "{prefix}_repository_{count}".format( |
|
1214 | repo_name = "{prefix}_repository_{count}".format( | |
1213 | prefix=self._test_name, |
|
1215 | prefix=self._test_name, | |
1214 | count=len(self.repos_ids)) |
|
1216 | count=len(self.repos_ids)) | |
1215 |
|
1217 | |||
1216 | repository = self.fixture.create_repo( |
|
1218 | repository = self.fixture.create_repo( | |
1217 | repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare) |
|
1219 | repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare) | |
1218 | if auto_cleanup: |
|
1220 | if auto_cleanup: | |
1219 | self.repos_ids.append(repository.repo_id) |
|
1221 | self.repos_ids.append(repository.repo_id) | |
1220 | return repository |
|
1222 | return repository | |
1221 |
|
1223 | |||
1222 | def create_user(self, auto_cleanup=True, **kwargs): |
|
1224 | def create_user(self, auto_cleanup=True, **kwargs): | |
1223 | user_name = "{prefix}_user_{count}".format( |
|
1225 | user_name = "{prefix}_user_{count}".format( | |
1224 | prefix=self._test_name, |
|
1226 | prefix=self._test_name, | |
1225 | count=len(self.user_ids)) |
|
1227 | count=len(self.user_ids)) | |
1226 | user = self.fixture.create_user(user_name, **kwargs) |
|
1228 | user = self.fixture.create_user(user_name, **kwargs) | |
1227 | if auto_cleanup: |
|
1229 | if auto_cleanup: | |
1228 | self.user_ids.append(user.user_id) |
|
1230 | self.user_ids.append(user.user_id) | |
1229 | return user |
|
1231 | return user | |
1230 |
|
1232 | |||
1231 | def create_additional_user_email(self, user, email): |
|
1233 | def create_additional_user_email(self, user, email): | |
1232 | uem = self.fixture.create_additional_user_email(user=user, email=email) |
|
1234 | uem = self.fixture.create_additional_user_email(user=user, email=email) | |
1233 | return uem |
|
1235 | return uem | |
1234 |
|
1236 | |||
1235 | def create_user_with_group(self): |
|
1237 | def create_user_with_group(self): | |
1236 | user = self.create_user() |
|
1238 | user = self.create_user() | |
1237 | user_group = self.create_user_group(members=[user]) |
|
1239 | user_group = self.create_user_group(members=[user]) | |
1238 | return user, user_group |
|
1240 | return user, user_group | |
1239 |
|
1241 | |||
1240 | def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None, |
|
1242 | def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None, | |
1241 | auto_cleanup=True, **kwargs): |
|
1243 | auto_cleanup=True, **kwargs): | |
1242 | group_name = "{prefix}_usergroup_{count}".format( |
|
1244 | group_name = "{prefix}_usergroup_{count}".format( | |
1243 | prefix=self._test_name, |
|
1245 | prefix=self._test_name, | |
1244 | count=len(self.user_group_ids)) |
|
1246 | count=len(self.user_group_ids)) | |
1245 | user_group = self.fixture.create_user_group( |
|
1247 | user_group = self.fixture.create_user_group( | |
1246 | group_name, cur_user=owner, **kwargs) |
|
1248 | group_name, cur_user=owner, **kwargs) | |
1247 |
|
1249 | |||
1248 | if auto_cleanup: |
|
1250 | if auto_cleanup: | |
1249 | self.user_group_ids.append(user_group.users_group_id) |
|
1251 | self.user_group_ids.append(user_group.users_group_id) | |
1250 | if members: |
|
1252 | if members: | |
1251 | for user in members: |
|
1253 | for user in members: | |
1252 | UserGroupModel().add_user_to_group(user_group, user) |
|
1254 | UserGroupModel().add_user_to_group(user_group, user) | |
1253 | return user_group |
|
1255 | return user_group | |
1254 |
|
1256 | |||
1255 | def grant_user_permission(self, user_name, permission_name): |
|
1257 | def grant_user_permission(self, user_name, permission_name): | |
1256 | self.inherit_default_user_permissions(user_name, False) |
|
1258 | self.inherit_default_user_permissions(user_name, False) | |
1257 | self.user_permissions.append((user_name, permission_name)) |
|
1259 | self.user_permissions.append((user_name, permission_name)) | |
1258 |
|
1260 | |||
1259 | def grant_user_permission_to_repo_group( |
|
1261 | def grant_user_permission_to_repo_group( | |
1260 | self, repo_group, user, permission_name): |
|
1262 | self, repo_group, user, permission_name): | |
1261 | permission = RepoGroupModel().grant_user_permission( |
|
1263 | permission = RepoGroupModel().grant_user_permission( | |
1262 | repo_group, user, permission_name) |
|
1264 | repo_group, user, permission_name) | |
1263 | self.user_repo_group_permission_ids.append( |
|
1265 | self.user_repo_group_permission_ids.append( | |
1264 | (repo_group.group_id, user.user_id)) |
|
1266 | (repo_group.group_id, user.user_id)) | |
1265 | return permission |
|
1267 | return permission | |
1266 |
|
1268 | |||
1267 | def grant_user_group_permission_to_repo_group( |
|
1269 | def grant_user_group_permission_to_repo_group( | |
1268 | self, repo_group, user_group, permission_name): |
|
1270 | self, repo_group, user_group, permission_name): | |
1269 | permission = RepoGroupModel().grant_user_group_permission( |
|
1271 | permission = RepoGroupModel().grant_user_group_permission( | |
1270 | repo_group, user_group, permission_name) |
|
1272 | repo_group, user_group, permission_name) | |
1271 | self.user_group_repo_group_permission_ids.append( |
|
1273 | self.user_group_repo_group_permission_ids.append( | |
1272 | (repo_group.group_id, user_group.users_group_id)) |
|
1274 | (repo_group.group_id, user_group.users_group_id)) | |
1273 | return permission |
|
1275 | return permission | |
1274 |
|
1276 | |||
1275 | def grant_user_permission_to_repo( |
|
1277 | def grant_user_permission_to_repo( | |
1276 | self, repo, user, permission_name): |
|
1278 | self, repo, user, permission_name): | |
1277 | permission = RepoModel().grant_user_permission( |
|
1279 | permission = RepoModel().grant_user_permission( | |
1278 | repo, user, permission_name) |
|
1280 | repo, user, permission_name) | |
1279 | self.user_repo_permission_ids.append( |
|
1281 | self.user_repo_permission_ids.append( | |
1280 | (repo.repo_id, user.user_id)) |
|
1282 | (repo.repo_id, user.user_id)) | |
1281 | return permission |
|
1283 | return permission | |
1282 |
|
1284 | |||
1283 | def grant_user_group_permission_to_repo( |
|
1285 | def grant_user_group_permission_to_repo( | |
1284 | self, repo, user_group, permission_name): |
|
1286 | self, repo, user_group, permission_name): | |
1285 | permission = RepoModel().grant_user_group_permission( |
|
1287 | permission = RepoModel().grant_user_group_permission( | |
1286 | repo, user_group, permission_name) |
|
1288 | repo, user_group, permission_name) | |
1287 | self.user_group_repo_permission_ids.append( |
|
1289 | self.user_group_repo_permission_ids.append( | |
1288 | (repo.repo_id, user_group.users_group_id)) |
|
1290 | (repo.repo_id, user_group.users_group_id)) | |
1289 | return permission |
|
1291 | return permission | |
1290 |
|
1292 | |||
1291 | def grant_user_permission_to_user_group( |
|
1293 | def grant_user_permission_to_user_group( | |
1292 | self, target_user_group, user, permission_name): |
|
1294 | self, target_user_group, user, permission_name): | |
1293 | permission = UserGroupModel().grant_user_permission( |
|
1295 | permission = UserGroupModel().grant_user_permission( | |
1294 | target_user_group, user, permission_name) |
|
1296 | target_user_group, user, permission_name) | |
1295 | self.user_user_group_permission_ids.append( |
|
1297 | self.user_user_group_permission_ids.append( | |
1296 | (target_user_group.users_group_id, user.user_id)) |
|
1298 | (target_user_group.users_group_id, user.user_id)) | |
1297 | return permission |
|
1299 | return permission | |
1298 |
|
1300 | |||
1299 | def grant_user_group_permission_to_user_group( |
|
1301 | def grant_user_group_permission_to_user_group( | |
1300 | self, target_user_group, user_group, permission_name): |
|
1302 | self, target_user_group, user_group, permission_name): | |
1301 | permission = UserGroupModel().grant_user_group_permission( |
|
1303 | permission = UserGroupModel().grant_user_group_permission( | |
1302 | target_user_group, user_group, permission_name) |
|
1304 | target_user_group, user_group, permission_name) | |
1303 | self.user_group_user_group_permission_ids.append( |
|
1305 | self.user_group_user_group_permission_ids.append( | |
1304 | (target_user_group.users_group_id, user_group.users_group_id)) |
|
1306 | (target_user_group.users_group_id, user_group.users_group_id)) | |
1305 | return permission |
|
1307 | return permission | |
1306 |
|
1308 | |||
1307 | def revoke_user_permission(self, user_name, permission_name): |
|
1309 | def revoke_user_permission(self, user_name, permission_name): | |
1308 | self.inherit_default_user_permissions(user_name, True) |
|
1310 | self.inherit_default_user_permissions(user_name, True) | |
1309 | UserModel().revoke_perm(user_name, permission_name) |
|
1311 | UserModel().revoke_perm(user_name, permission_name) | |
1310 |
|
1312 | |||
1311 | def inherit_default_user_permissions(self, user_name, value): |
|
1313 | def inherit_default_user_permissions(self, user_name, value): | |
1312 | user = UserModel().get_by_username(user_name) |
|
1314 | user = UserModel().get_by_username(user_name) | |
1313 | user.inherit_default_permissions = value |
|
1315 | user.inherit_default_permissions = value | |
1314 | Session().add(user) |
|
1316 | Session().add(user) | |
1315 | Session().commit() |
|
1317 | Session().commit() | |
1316 |
|
1318 | |||
1317 | def cleanup(self): |
|
1319 | def cleanup(self): | |
1318 | self._cleanup_permissions() |
|
1320 | self._cleanup_permissions() | |
1319 | self._cleanup_repos() |
|
1321 | self._cleanup_repos() | |
1320 | self._cleanup_repo_groups() |
|
1322 | self._cleanup_repo_groups() | |
1321 | self._cleanup_user_groups() |
|
1323 | self._cleanup_user_groups() | |
1322 | self._cleanup_users() |
|
1324 | self._cleanup_users() | |
1323 |
|
1325 | |||
1324 | def _cleanup_permissions(self): |
|
1326 | def _cleanup_permissions(self): | |
1325 | if self.user_permissions: |
|
1327 | if self.user_permissions: | |
1326 | for user_name, permission_name in self.user_permissions: |
|
1328 | for user_name, permission_name in self.user_permissions: | |
1327 | self.revoke_user_permission(user_name, permission_name) |
|
1329 | self.revoke_user_permission(user_name, permission_name) | |
1328 |
|
1330 | |||
1329 | for permission in self.user_repo_permission_ids: |
|
1331 | for permission in self.user_repo_permission_ids: | |
1330 | RepoModel().revoke_user_permission(*permission) |
|
1332 | RepoModel().revoke_user_permission(*permission) | |
1331 |
|
1333 | |||
1332 | for permission in self.user_group_repo_permission_ids: |
|
1334 | for permission in self.user_group_repo_permission_ids: | |
1333 | RepoModel().revoke_user_group_permission(*permission) |
|
1335 | RepoModel().revoke_user_group_permission(*permission) | |
1334 |
|
1336 | |||
1335 | for permission in self.user_repo_group_permission_ids: |
|
1337 | for permission in self.user_repo_group_permission_ids: | |
1336 | RepoGroupModel().revoke_user_permission(*permission) |
|
1338 | RepoGroupModel().revoke_user_permission(*permission) | |
1337 |
|
1339 | |||
1338 | for permission in self.user_group_repo_group_permission_ids: |
|
1340 | for permission in self.user_group_repo_group_permission_ids: | |
1339 | RepoGroupModel().revoke_user_group_permission(*permission) |
|
1341 | RepoGroupModel().revoke_user_group_permission(*permission) | |
1340 |
|
1342 | |||
1341 | for permission in self.user_user_group_permission_ids: |
|
1343 | for permission in self.user_user_group_permission_ids: | |
1342 | UserGroupModel().revoke_user_permission(*permission) |
|
1344 | UserGroupModel().revoke_user_permission(*permission) | |
1343 |
|
1345 | |||
1344 | for permission in self.user_group_user_group_permission_ids: |
|
1346 | for permission in self.user_group_user_group_permission_ids: | |
1345 | UserGroupModel().revoke_user_group_permission(*permission) |
|
1347 | UserGroupModel().revoke_user_group_permission(*permission) | |
1346 |
|
1348 | |||
1347 | def _cleanup_repo_groups(self): |
|
1349 | def _cleanup_repo_groups(self): | |
1348 | def _repo_group_compare(first_group_id, second_group_id): |
|
1350 | def _repo_group_compare(first_group_id, second_group_id): | |
1349 | """ |
|
1351 | """ | |
1350 | Gives higher priority to the groups with the most complex paths |
|
1352 | Gives higher priority to the groups with the most complex paths | |
1351 | """ |
|
1353 | """ | |
1352 | first_group = RepoGroup.get(first_group_id) |
|
1354 | first_group = RepoGroup.get(first_group_id) | |
1353 | second_group = RepoGroup.get(second_group_id) |
|
1355 | second_group = RepoGroup.get(second_group_id) | |
1354 | first_group_parts = ( |
|
1356 | first_group_parts = ( | |
1355 | len(first_group.group_name.split('/')) if first_group else 0) |
|
1357 | len(first_group.group_name.split('/')) if first_group else 0) | |
1356 | second_group_parts = ( |
|
1358 | second_group_parts = ( | |
1357 | len(second_group.group_name.split('/')) if second_group else 0) |
|
1359 | len(second_group.group_name.split('/')) if second_group else 0) | |
1358 | return cmp(second_group_parts, first_group_parts) |
|
1360 | return cmp(second_group_parts, first_group_parts) | |
1359 |
|
1361 | |||
1360 | sorted_repo_group_ids = sorted( |
|
1362 | sorted_repo_group_ids = sorted( | |
1361 | self.repo_group_ids, cmp=_repo_group_compare) |
|
1363 | self.repo_group_ids, cmp=_repo_group_compare) | |
1362 | for repo_group_id in sorted_repo_group_ids: |
|
1364 | for repo_group_id in sorted_repo_group_ids: | |
1363 | self.fixture.destroy_repo_group(repo_group_id) |
|
1365 | self.fixture.destroy_repo_group(repo_group_id) | |
1364 |
|
1366 | |||
1365 | def _cleanup_repos(self): |
|
1367 | def _cleanup_repos(self): | |
1366 | sorted_repos_ids = sorted(self.repos_ids) |
|
1368 | sorted_repos_ids = sorted(self.repos_ids) | |
1367 | for repo_id in sorted_repos_ids: |
|
1369 | for repo_id in sorted_repos_ids: | |
1368 | self.fixture.destroy_repo(repo_id) |
|
1370 | self.fixture.destroy_repo(repo_id) | |
1369 |
|
1371 | |||
1370 | def _cleanup_user_groups(self): |
|
1372 | def _cleanup_user_groups(self): | |
1371 | def _user_group_compare(first_group_id, second_group_id): |
|
1373 | def _user_group_compare(first_group_id, second_group_id): | |
1372 | """ |
|
1374 | """ | |
1373 | Gives higher priority to the groups with the most complex paths |
|
1375 | Gives higher priority to the groups with the most complex paths | |
1374 | """ |
|
1376 | """ | |
1375 | first_group = UserGroup.get(first_group_id) |
|
1377 | first_group = UserGroup.get(first_group_id) | |
1376 | second_group = UserGroup.get(second_group_id) |
|
1378 | second_group = UserGroup.get(second_group_id) | |
1377 | first_group_parts = ( |
|
1379 | first_group_parts = ( | |
1378 | len(first_group.users_group_name.split('/')) |
|
1380 | len(first_group.users_group_name.split('/')) | |
1379 | if first_group else 0) |
|
1381 | if first_group else 0) | |
1380 | second_group_parts = ( |
|
1382 | second_group_parts = ( | |
1381 | len(second_group.users_group_name.split('/')) |
|
1383 | len(second_group.users_group_name.split('/')) | |
1382 | if second_group else 0) |
|
1384 | if second_group else 0) | |
1383 | return cmp(second_group_parts, first_group_parts) |
|
1385 | return cmp(second_group_parts, first_group_parts) | |
1384 |
|
1386 | |||
1385 | sorted_user_group_ids = sorted( |
|
1387 | sorted_user_group_ids = sorted( | |
1386 | self.user_group_ids, cmp=_user_group_compare) |
|
1388 | self.user_group_ids, cmp=_user_group_compare) | |
1387 | for user_group_id in sorted_user_group_ids: |
|
1389 | for user_group_id in sorted_user_group_ids: | |
1388 | self.fixture.destroy_user_group(user_group_id) |
|
1390 | self.fixture.destroy_user_group(user_group_id) | |
1389 |
|
1391 | |||
1390 | def _cleanup_users(self): |
|
1392 | def _cleanup_users(self): | |
1391 | for user_id in self.user_ids: |
|
1393 | for user_id in self.user_ids: | |
1392 | self.fixture.destroy_user(user_id) |
|
1394 | self.fixture.destroy_user(user_id) | |
1393 |
|
1395 | |||
1394 |
|
1396 | |||
1395 | # TODO: Think about moving this into a pytest-pyro package and make it a |
|
1397 | # TODO: Think about moving this into a pytest-pyro package and make it a | |
1396 | # pytest plugin |
|
1398 | # pytest plugin | |
1397 | @pytest.hookimpl(tryfirst=True, hookwrapper=True) |
|
1399 | @pytest.hookimpl(tryfirst=True, hookwrapper=True) | |
1398 | def pytest_runtest_makereport(item, call): |
|
1400 | def pytest_runtest_makereport(item, call): | |
1399 | """ |
|
1401 | """ | |
1400 | Adding the remote traceback if the exception has this information. |
|
1402 | Adding the remote traceback if the exception has this information. | |
1401 |
|
1403 | |||
1402 | VCSServer attaches this information as the attribute `_vcs_server_traceback` |
|
1404 | VCSServer attaches this information as the attribute `_vcs_server_traceback` | |
1403 | to the exception instance. |
|
1405 | to the exception instance. | |
1404 | """ |
|
1406 | """ | |
1405 | outcome = yield |
|
1407 | outcome = yield | |
1406 | report = outcome.get_result() |
|
1408 | report = outcome.get_result() | |
1407 | if call.excinfo: |
|
1409 | if call.excinfo: | |
1408 | _add_vcsserver_remote_traceback(report, call.excinfo.value) |
|
1410 | _add_vcsserver_remote_traceback(report, call.excinfo.value) | |
1409 |
|
1411 | |||
1410 |
|
1412 | |||
1411 | def _add_vcsserver_remote_traceback(report, exc): |
|
1413 | def _add_vcsserver_remote_traceback(report, exc): | |
1412 | vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None) |
|
1414 | vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None) | |
1413 |
|
1415 | |||
1414 | if vcsserver_traceback: |
|
1416 | if vcsserver_traceback: | |
1415 | section = 'VCSServer remote traceback ' + report.when |
|
1417 | section = 'VCSServer remote traceback ' + report.when | |
1416 | report.sections.append((section, vcsserver_traceback)) |
|
1418 | report.sections.append((section, vcsserver_traceback)) | |
1417 |
|
1419 | |||
1418 |
|
1420 | |||
1419 | @pytest.fixture(scope='session') |
|
1421 | @pytest.fixture(scope='session') | |
1420 | def testrun(): |
|
1422 | def testrun(): | |
1421 | return { |
|
1423 | return { | |
1422 | 'uuid': uuid.uuid4(), |
|
1424 | 'uuid': uuid.uuid4(), | |
1423 | 'start': datetime.datetime.utcnow().isoformat(), |
|
1425 | 'start': datetime.datetime.utcnow().isoformat(), | |
1424 | 'timestamp': int(time.time()), |
|
1426 | 'timestamp': int(time.time()), | |
1425 | } |
|
1427 | } | |
1426 |
|
1428 | |||
1427 |
|
1429 | |||
1428 | class AppenlightClient(object): |
|
1430 | class AppenlightClient(object): | |
1429 |
|
1431 | |||
1430 | url_template = '{url}?protocol_version=0.5' |
|
1432 | url_template = '{url}?protocol_version=0.5' | |
1431 |
|
1433 | |||
1432 | def __init__( |
|
1434 | def __init__( | |
1433 | self, url, api_key, add_server=True, add_timestamp=True, |
|
1435 | self, url, api_key, add_server=True, add_timestamp=True, | |
1434 | namespace=None, request=None, testrun=None): |
|
1436 | namespace=None, request=None, testrun=None): | |
1435 | self.url = self.url_template.format(url=url) |
|
1437 | self.url = self.url_template.format(url=url) | |
1436 | self.api_key = api_key |
|
1438 | self.api_key = api_key | |
1437 | self.add_server = add_server |
|
1439 | self.add_server = add_server | |
1438 | self.add_timestamp = add_timestamp |
|
1440 | self.add_timestamp = add_timestamp | |
1439 | self.namespace = namespace |
|
1441 | self.namespace = namespace | |
1440 | self.request = request |
|
1442 | self.request = request | |
1441 | self.server = socket.getfqdn(socket.gethostname()) |
|
1443 | self.server = socket.getfqdn(socket.gethostname()) | |
1442 | self.tags_before = {} |
|
1444 | self.tags_before = {} | |
1443 | self.tags_after = {} |
|
1445 | self.tags_after = {} | |
1444 | self.stats = [] |
|
1446 | self.stats = [] | |
1445 | self.testrun = testrun or {} |
|
1447 | self.testrun = testrun or {} | |
1446 |
|
1448 | |||
1447 | def tag_before(self, tag, value): |
|
1449 | def tag_before(self, tag, value): | |
1448 | self.tags_before[tag] = value |
|
1450 | self.tags_before[tag] = value | |
1449 |
|
1451 | |||
1450 | def tag_after(self, tag, value): |
|
1452 | def tag_after(self, tag, value): | |
1451 | self.tags_after[tag] = value |
|
1453 | self.tags_after[tag] = value | |
1452 |
|
1454 | |||
1453 | def collect(self, data): |
|
1455 | def collect(self, data): | |
1454 | if self.add_server: |
|
1456 | if self.add_server: | |
1455 | data.setdefault('server', self.server) |
|
1457 | data.setdefault('server', self.server) | |
1456 | if self.add_timestamp: |
|
1458 | if self.add_timestamp: | |
1457 | data.setdefault('date', datetime.datetime.utcnow().isoformat()) |
|
1459 | data.setdefault('date', datetime.datetime.utcnow().isoformat()) | |
1458 | if self.namespace: |
|
1460 | if self.namespace: | |
1459 | data.setdefault('namespace', self.namespace) |
|
1461 | data.setdefault('namespace', self.namespace) | |
1460 | if self.request: |
|
1462 | if self.request: | |
1461 | data.setdefault('request', self.request) |
|
1463 | data.setdefault('request', self.request) | |
1462 | self.stats.append(data) |
|
1464 | self.stats.append(data) | |
1463 |
|
1465 | |||
1464 | def send_stats(self): |
|
1466 | def send_stats(self): | |
1465 | tags = [ |
|
1467 | tags = [ | |
1466 | ('testrun', self.request), |
|
1468 | ('testrun', self.request), | |
1467 | ('testrun.start', self.testrun['start']), |
|
1469 | ('testrun.start', self.testrun['start']), | |
1468 | ('testrun.timestamp', self.testrun['timestamp']), |
|
1470 | ('testrun.timestamp', self.testrun['timestamp']), | |
1469 | ('test', self.namespace), |
|
1471 | ('test', self.namespace), | |
1470 | ] |
|
1472 | ] | |
1471 | for key, value in self.tags_before.items(): |
|
1473 | for key, value in self.tags_before.items(): | |
1472 | tags.append((key + '.before', value)) |
|
1474 | tags.append((key + '.before', value)) | |
1473 | try: |
|
1475 | try: | |
1474 | delta = self.tags_after[key] - value |
|
1476 | delta = self.tags_after[key] - value | |
1475 | tags.append((key + '.delta', delta)) |
|
1477 | tags.append((key + '.delta', delta)) | |
1476 | except Exception: |
|
1478 | except Exception: | |
1477 | pass |
|
1479 | pass | |
1478 | for key, value in self.tags_after.items(): |
|
1480 | for key, value in self.tags_after.items(): | |
1479 | tags.append((key + '.after', value)) |
|
1481 | tags.append((key + '.after', value)) | |
1480 | self.collect({ |
|
1482 | self.collect({ | |
1481 | 'message': "Collected tags", |
|
1483 | 'message': "Collected tags", | |
1482 | 'tags': tags, |
|
1484 | 'tags': tags, | |
1483 | }) |
|
1485 | }) | |
1484 |
|
1486 | |||
1485 | response = requests.post( |
|
1487 | response = requests.post( | |
1486 | self.url, |
|
1488 | self.url, | |
1487 | headers={ |
|
1489 | headers={ | |
1488 | 'X-appenlight-api-key': self.api_key}, |
|
1490 | 'X-appenlight-api-key': self.api_key}, | |
1489 | json=self.stats, |
|
1491 | json=self.stats, | |
1490 | ) |
|
1492 | ) | |
1491 |
|
1493 | |||
1492 | if not response.status_code == 200: |
|
1494 | if not response.status_code == 200: | |
1493 | pprint.pprint(self.stats) |
|
1495 | pprint.pprint(self.stats) | |
1494 | print(response.headers) |
|
1496 | print(response.headers) | |
1495 | print(response.text) |
|
1497 | print(response.text) | |
1496 | raise Exception('Sending to appenlight failed') |
|
1498 | raise Exception('Sending to appenlight failed') | |
1497 |
|
1499 | |||
1498 |
|
1500 | |||
1499 | @pytest.fixture() |
|
1501 | @pytest.fixture() | |
1500 | def gist_util(request, db_connection): |
|
1502 | def gist_util(request, db_connection): | |
1501 | """ |
|
1503 | """ | |
1502 | Provides a wired instance of `GistUtility` with integrated cleanup. |
|
1504 | Provides a wired instance of `GistUtility` with integrated cleanup. | |
1503 | """ |
|
1505 | """ | |
1504 | utility = GistUtility() |
|
1506 | utility = GistUtility() | |
1505 | request.addfinalizer(utility.cleanup) |
|
1507 | request.addfinalizer(utility.cleanup) | |
1506 | return utility |
|
1508 | return utility | |
1507 |
|
1509 | |||
1508 |
|
1510 | |||
1509 | class GistUtility(object): |
|
1511 | class GistUtility(object): | |
1510 | def __init__(self): |
|
1512 | def __init__(self): | |
1511 | self.fixture = Fixture() |
|
1513 | self.fixture = Fixture() | |
1512 | self.gist_ids = [] |
|
1514 | self.gist_ids = [] | |
1513 |
|
1515 | |||
1514 | def create_gist(self, **kwargs): |
|
1516 | def create_gist(self, **kwargs): | |
1515 | gist = self.fixture.create_gist(**kwargs) |
|
1517 | gist = self.fixture.create_gist(**kwargs) | |
1516 | self.gist_ids.append(gist.gist_id) |
|
1518 | self.gist_ids.append(gist.gist_id) | |
1517 | return gist |
|
1519 | return gist | |
1518 |
|
1520 | |||
1519 | def cleanup(self): |
|
1521 | def cleanup(self): | |
1520 | for id_ in self.gist_ids: |
|
1522 | for id_ in self.gist_ids: | |
1521 | self.fixture.destroy_gists(str(id_)) |
|
1523 | self.fixture.destroy_gists(str(id_)) | |
1522 |
|
1524 | |||
1523 |
|
1525 | |||
1524 | @pytest.fixture() |
|
1526 | @pytest.fixture() | |
1525 | def enabled_backends(request): |
|
1527 | def enabled_backends(request): | |
1526 | backends = request.config.option.backends |
|
1528 | backends = request.config.option.backends | |
1527 | return backends[:] |
|
1529 | return backends[:] | |
1528 |
|
1530 | |||
1529 |
|
1531 | |||
1530 | @pytest.fixture() |
|
1532 | @pytest.fixture() | |
1531 | def settings_util(request, db_connection): |
|
1533 | def settings_util(request, db_connection): | |
1532 | """ |
|
1534 | """ | |
1533 | Provides a wired instance of `SettingsUtility` with integrated cleanup. |
|
1535 | Provides a wired instance of `SettingsUtility` with integrated cleanup. | |
1534 | """ |
|
1536 | """ | |
1535 | utility = SettingsUtility() |
|
1537 | utility = SettingsUtility() | |
1536 | request.addfinalizer(utility.cleanup) |
|
1538 | request.addfinalizer(utility.cleanup) | |
1537 | return utility |
|
1539 | return utility | |
1538 |
|
1540 | |||
1539 |
|
1541 | |||
1540 | class SettingsUtility(object): |
|
1542 | class SettingsUtility(object): | |
1541 | def __init__(self): |
|
1543 | def __init__(self): | |
1542 | self.rhodecode_ui_ids = [] |
|
1544 | self.rhodecode_ui_ids = [] | |
1543 | self.rhodecode_setting_ids = [] |
|
1545 | self.rhodecode_setting_ids = [] | |
1544 | self.repo_rhodecode_ui_ids = [] |
|
1546 | self.repo_rhodecode_ui_ids = [] | |
1545 | self.repo_rhodecode_setting_ids = [] |
|
1547 | self.repo_rhodecode_setting_ids = [] | |
1546 |
|
1548 | |||
1547 | def create_repo_rhodecode_ui( |
|
1549 | def create_repo_rhodecode_ui( | |
1548 | self, repo, section, value, key=None, active=True, cleanup=True): |
|
1550 | self, repo, section, value, key=None, active=True, cleanup=True): | |
1549 | key = key or hashlib.sha1( |
|
1551 | key = key or hashlib.sha1( | |
1550 | '{}{}{}'.format(section, value, repo.repo_id)).hexdigest() |
|
1552 | '{}{}{}'.format(section, value, repo.repo_id)).hexdigest() | |
1551 |
|
1553 | |||
1552 | setting = RepoRhodeCodeUi() |
|
1554 | setting = RepoRhodeCodeUi() | |
1553 | setting.repository_id = repo.repo_id |
|
1555 | setting.repository_id = repo.repo_id | |
1554 | setting.ui_section = section |
|
1556 | setting.ui_section = section | |
1555 | setting.ui_value = value |
|
1557 | setting.ui_value = value | |
1556 | setting.ui_key = key |
|
1558 | setting.ui_key = key | |
1557 | setting.ui_active = active |
|
1559 | setting.ui_active = active | |
1558 | Session().add(setting) |
|
1560 | Session().add(setting) | |
1559 | Session().commit() |
|
1561 | Session().commit() | |
1560 |
|
1562 | |||
1561 | if cleanup: |
|
1563 | if cleanup: | |
1562 | self.repo_rhodecode_ui_ids.append(setting.ui_id) |
|
1564 | self.repo_rhodecode_ui_ids.append(setting.ui_id) | |
1563 | return setting |
|
1565 | return setting | |
1564 |
|
1566 | |||
1565 | def create_rhodecode_ui( |
|
1567 | def create_rhodecode_ui( | |
1566 | self, section, value, key=None, active=True, cleanup=True): |
|
1568 | self, section, value, key=None, active=True, cleanup=True): | |
1567 | key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest() |
|
1569 | key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest() | |
1568 |
|
1570 | |||
1569 | setting = RhodeCodeUi() |
|
1571 | setting = RhodeCodeUi() | |
1570 | setting.ui_section = section |
|
1572 | setting.ui_section = section | |
1571 | setting.ui_value = value |
|
1573 | setting.ui_value = value | |
1572 | setting.ui_key = key |
|
1574 | setting.ui_key = key | |
1573 | setting.ui_active = active |
|
1575 | setting.ui_active = active | |
1574 | Session().add(setting) |
|
1576 | Session().add(setting) | |
1575 | Session().commit() |
|
1577 | Session().commit() | |
1576 |
|
1578 | |||
1577 | if cleanup: |
|
1579 | if cleanup: | |
1578 | self.rhodecode_ui_ids.append(setting.ui_id) |
|
1580 | self.rhodecode_ui_ids.append(setting.ui_id) | |
1579 | return setting |
|
1581 | return setting | |
1580 |
|
1582 | |||
1581 | def create_repo_rhodecode_setting( |
|
1583 | def create_repo_rhodecode_setting( | |
1582 | self, repo, name, value, type_, cleanup=True): |
|
1584 | self, repo, name, value, type_, cleanup=True): | |
1583 | setting = RepoRhodeCodeSetting( |
|
1585 | setting = RepoRhodeCodeSetting( | |
1584 | repo.repo_id, key=name, val=value, type=type_) |
|
1586 | repo.repo_id, key=name, val=value, type=type_) | |
1585 | Session().add(setting) |
|
1587 | Session().add(setting) | |
1586 | Session().commit() |
|
1588 | Session().commit() | |
1587 |
|
1589 | |||
1588 | if cleanup: |
|
1590 | if cleanup: | |
1589 | self.repo_rhodecode_setting_ids.append(setting.app_settings_id) |
|
1591 | self.repo_rhodecode_setting_ids.append(setting.app_settings_id) | |
1590 | return setting |
|
1592 | return setting | |
1591 |
|
1593 | |||
1592 | def create_rhodecode_setting(self, name, value, type_, cleanup=True): |
|
1594 | def create_rhodecode_setting(self, name, value, type_, cleanup=True): | |
1593 | setting = RhodeCodeSetting(key=name, val=value, type=type_) |
|
1595 | setting = RhodeCodeSetting(key=name, val=value, type=type_) | |
1594 | Session().add(setting) |
|
1596 | Session().add(setting) | |
1595 | Session().commit() |
|
1597 | Session().commit() | |
1596 |
|
1598 | |||
1597 | if cleanup: |
|
1599 | if cleanup: | |
1598 | self.rhodecode_setting_ids.append(setting.app_settings_id) |
|
1600 | self.rhodecode_setting_ids.append(setting.app_settings_id) | |
1599 |
|
1601 | |||
1600 | return setting |
|
1602 | return setting | |
1601 |
|
1603 | |||
1602 | def cleanup(self): |
|
1604 | def cleanup(self): | |
1603 | for id_ in self.rhodecode_ui_ids: |
|
1605 | for id_ in self.rhodecode_ui_ids: | |
1604 | setting = RhodeCodeUi.get(id_) |
|
1606 | setting = RhodeCodeUi.get(id_) | |
1605 | Session().delete(setting) |
|
1607 | Session().delete(setting) | |
1606 |
|
1608 | |||
1607 | for id_ in self.rhodecode_setting_ids: |
|
1609 | for id_ in self.rhodecode_setting_ids: | |
1608 | setting = RhodeCodeSetting.get(id_) |
|
1610 | setting = RhodeCodeSetting.get(id_) | |
1609 | Session().delete(setting) |
|
1611 | Session().delete(setting) | |
1610 |
|
1612 | |||
1611 | for id_ in self.repo_rhodecode_ui_ids: |
|
1613 | for id_ in self.repo_rhodecode_ui_ids: | |
1612 | setting = RepoRhodeCodeUi.get(id_) |
|
1614 | setting = RepoRhodeCodeUi.get(id_) | |
1613 | Session().delete(setting) |
|
1615 | Session().delete(setting) | |
1614 |
|
1616 | |||
1615 | for id_ in self.repo_rhodecode_setting_ids: |
|
1617 | for id_ in self.repo_rhodecode_setting_ids: | |
1616 | setting = RepoRhodeCodeSetting.get(id_) |
|
1618 | setting = RepoRhodeCodeSetting.get(id_) | |
1617 | Session().delete(setting) |
|
1619 | Session().delete(setting) | |
1618 |
|
1620 | |||
1619 | Session().commit() |
|
1621 | Session().commit() | |
1620 |
|
1622 | |||
1621 |
|
1623 | |||
1622 | @pytest.fixture() |
|
1624 | @pytest.fixture() | |
1623 | def no_notifications(request): |
|
1625 | def no_notifications(request): | |
1624 | notification_patcher = mock.patch( |
|
1626 | notification_patcher = mock.patch( | |
1625 | 'rhodecode.model.notification.NotificationModel.create') |
|
1627 | 'rhodecode.model.notification.NotificationModel.create') | |
1626 | notification_patcher.start() |
|
1628 | notification_patcher.start() | |
1627 | request.addfinalizer(notification_patcher.stop) |
|
1629 | request.addfinalizer(notification_patcher.stop) | |
1628 |
|
1630 | |||
1629 |
|
1631 | |||
1630 | @pytest.fixture(scope='session') |
|
1632 | @pytest.fixture(scope='session') | |
1631 | def repeat(request): |
|
1633 | def repeat(request): | |
1632 | """ |
|
1634 | """ | |
1633 | The number of repetitions is based on this fixture. |
|
1635 | The number of repetitions is based on this fixture. | |
1634 |
|
1636 | |||
1635 | Slower calls may divide it by 10 or 100. It is chosen in a way so that the |
|
1637 | Slower calls may divide it by 10 or 100. It is chosen in a way so that the | |
1636 | tests are not too slow in our default test suite. |
|
1638 | tests are not too slow in our default test suite. | |
1637 | """ |
|
1639 | """ | |
1638 | return request.config.getoption('--repeat') |
|
1640 | return request.config.getoption('--repeat') | |
1639 |
|
1641 | |||
1640 |
|
1642 | |||
1641 | @pytest.fixture() |
|
1643 | @pytest.fixture() | |
1642 | def rhodecode_fixtures(): |
|
1644 | def rhodecode_fixtures(): | |
1643 | return Fixture() |
|
1645 | return Fixture() | |
1644 |
|
1646 | |||
1645 |
|
1647 | |||
1646 | @pytest.fixture() |
|
1648 | @pytest.fixture() | |
1647 | def context_stub(): |
|
1649 | def context_stub(): | |
1648 | """ |
|
1650 | """ | |
1649 | Stub context object. |
|
1651 | Stub context object. | |
1650 | """ |
|
1652 | """ | |
1651 | context = pyramid.testing.DummyResource() |
|
1653 | context = pyramid.testing.DummyResource() | |
1652 | return context |
|
1654 | return context | |
1653 |
|
1655 | |||
1654 |
|
1656 | |||
1655 | @pytest.fixture() |
|
1657 | @pytest.fixture() | |
1656 | def request_stub(): |
|
1658 | def request_stub(): | |
1657 | """ |
|
1659 | """ | |
1658 | Stub request object. |
|
1660 | Stub request object. | |
1659 | """ |
|
1661 | """ | |
1660 | from rhodecode.lib.base import bootstrap_request |
|
1662 | from rhodecode.lib.base import bootstrap_request | |
1661 | request = bootstrap_request(scheme='https') |
|
1663 | request = bootstrap_request(scheme='https') | |
1662 | return request |
|
1664 | return request | |
1663 |
|
1665 | |||
1664 |
|
1666 | |||
1665 | @pytest.fixture() |
|
1667 | @pytest.fixture() | |
1666 | def config_stub(request, request_stub): |
|
1668 | def config_stub(request, request_stub): | |
1667 | """ |
|
1669 | """ | |
1668 | Set up pyramid.testing and return the Configurator. |
|
1670 | Set up pyramid.testing and return the Configurator. | |
1669 | """ |
|
1671 | """ | |
1670 | from rhodecode.lib.base import bootstrap_config |
|
1672 | from rhodecode.lib.base import bootstrap_config | |
1671 | config = bootstrap_config(request=request_stub) |
|
1673 | config = bootstrap_config(request=request_stub) | |
1672 |
|
1674 | |||
1673 | @request.addfinalizer |
|
1675 | @request.addfinalizer | |
1674 | def cleanup(): |
|
1676 | def cleanup(): | |
1675 | pyramid.testing.tearDown() |
|
1677 | pyramid.testing.tearDown() | |
1676 |
|
1678 | |||
1677 | return config |
|
1679 | return config | |
1678 |
|
1680 | |||
1679 |
|
1681 | |||
1680 | @pytest.fixture() |
|
1682 | @pytest.fixture() | |
1681 | def StubIntegrationType(): |
|
1683 | def StubIntegrationType(): | |
1682 | class _StubIntegrationType(IntegrationTypeBase): |
|
1684 | class _StubIntegrationType(IntegrationTypeBase): | |
1683 | """ Test integration type class """ |
|
1685 | """ Test integration type class """ | |
1684 |
|
1686 | |||
1685 | key = 'test' |
|
1687 | key = 'test' | |
1686 | display_name = 'Test integration type' |
|
1688 | display_name = 'Test integration type' | |
1687 | description = 'A test integration type for testing' |
|
1689 | description = 'A test integration type for testing' | |
1688 |
|
1690 | |||
1689 | @classmethod |
|
1691 | @classmethod | |
1690 | def icon(cls): |
|
1692 | def icon(cls): | |
1691 | return 'test_icon_html_image' |
|
1693 | return 'test_icon_html_image' | |
1692 |
|
1694 | |||
1693 | def __init__(self, settings): |
|
1695 | def __init__(self, settings): | |
1694 | super(_StubIntegrationType, self).__init__(settings) |
|
1696 | super(_StubIntegrationType, self).__init__(settings) | |
1695 | self.sent_events = [] # for testing |
|
1697 | self.sent_events = [] # for testing | |
1696 |
|
1698 | |||
1697 | def send_event(self, event): |
|
1699 | def send_event(self, event): | |
1698 | self.sent_events.append(event) |
|
1700 | self.sent_events.append(event) | |
1699 |
|
1701 | |||
1700 | def settings_schema(self): |
|
1702 | def settings_schema(self): | |
1701 | class SettingsSchema(colander.Schema): |
|
1703 | class SettingsSchema(colander.Schema): | |
1702 | test_string_field = colander.SchemaNode( |
|
1704 | test_string_field = colander.SchemaNode( | |
1703 | colander.String(), |
|
1705 | colander.String(), | |
1704 | missing=colander.required, |
|
1706 | missing=colander.required, | |
1705 | title='test string field', |
|
1707 | title='test string field', | |
1706 | ) |
|
1708 | ) | |
1707 | test_int_field = colander.SchemaNode( |
|
1709 | test_int_field = colander.SchemaNode( | |
1708 | colander.Int(), |
|
1710 | colander.Int(), | |
1709 | title='some integer setting', |
|
1711 | title='some integer setting', | |
1710 | ) |
|
1712 | ) | |
1711 | return SettingsSchema() |
|
1713 | return SettingsSchema() | |
1712 |
|
1714 | |||
1713 |
|
1715 | |||
1714 | integration_type_registry.register_integration_type(_StubIntegrationType) |
|
1716 | integration_type_registry.register_integration_type(_StubIntegrationType) | |
1715 | return _StubIntegrationType |
|
1717 | return _StubIntegrationType | |
1716 |
|
1718 | |||
1717 | @pytest.fixture() |
|
1719 | @pytest.fixture() | |
1718 | def stub_integration_settings(): |
|
1720 | def stub_integration_settings(): | |
1719 | return { |
|
1721 | return { | |
1720 | 'test_string_field': 'some data', |
|
1722 | 'test_string_field': 'some data', | |
1721 | 'test_int_field': 100, |
|
1723 | 'test_int_field': 100, | |
1722 | } |
|
1724 | } | |
1723 |
|
1725 | |||
1724 |
|
1726 | |||
1725 | @pytest.fixture() |
|
1727 | @pytest.fixture() | |
1726 | def repo_integration_stub(request, repo_stub, StubIntegrationType, |
|
1728 | def repo_integration_stub(request, repo_stub, StubIntegrationType, | |
1727 | stub_integration_settings): |
|
1729 | stub_integration_settings): | |
1728 | integration = IntegrationModel().create( |
|
1730 | integration = IntegrationModel().create( | |
1729 | StubIntegrationType, settings=stub_integration_settings, enabled=True, |
|
1731 | StubIntegrationType, settings=stub_integration_settings, enabled=True, | |
1730 | name='test repo integration', |
|
1732 | name='test repo integration', | |
1731 | repo=repo_stub, repo_group=None, child_repos_only=None) |
|
1733 | repo=repo_stub, repo_group=None, child_repos_only=None) | |
1732 |
|
1734 | |||
1733 | @request.addfinalizer |
|
1735 | @request.addfinalizer | |
1734 | def cleanup(): |
|
1736 | def cleanup(): | |
1735 | IntegrationModel().delete(integration) |
|
1737 | IntegrationModel().delete(integration) | |
1736 |
|
1738 | |||
1737 | return integration |
|
1739 | return integration | |
1738 |
|
1740 | |||
1739 |
|
1741 | |||
1740 | @pytest.fixture() |
|
1742 | @pytest.fixture() | |
1741 | def repogroup_integration_stub(request, test_repo_group, StubIntegrationType, |
|
1743 | def repogroup_integration_stub(request, test_repo_group, StubIntegrationType, | |
1742 | stub_integration_settings): |
|
1744 | stub_integration_settings): | |
1743 | integration = IntegrationModel().create( |
|
1745 | integration = IntegrationModel().create( | |
1744 | StubIntegrationType, settings=stub_integration_settings, enabled=True, |
|
1746 | StubIntegrationType, settings=stub_integration_settings, enabled=True, | |
1745 | name='test repogroup integration', |
|
1747 | name='test repogroup integration', | |
1746 | repo=None, repo_group=test_repo_group, child_repos_only=True) |
|
1748 | repo=None, repo_group=test_repo_group, child_repos_only=True) | |
1747 |
|
1749 | |||
1748 | @request.addfinalizer |
|
1750 | @request.addfinalizer | |
1749 | def cleanup(): |
|
1751 | def cleanup(): | |
1750 | IntegrationModel().delete(integration) |
|
1752 | IntegrationModel().delete(integration) | |
1751 |
|
1753 | |||
1752 | return integration |
|
1754 | return integration | |
1753 |
|
1755 | |||
1754 |
|
1756 | |||
1755 | @pytest.fixture() |
|
1757 | @pytest.fixture() | |
1756 | def repogroup_recursive_integration_stub(request, test_repo_group, |
|
1758 | def repogroup_recursive_integration_stub(request, test_repo_group, | |
1757 | StubIntegrationType, stub_integration_settings): |
|
1759 | StubIntegrationType, stub_integration_settings): | |
1758 | integration = IntegrationModel().create( |
|
1760 | integration = IntegrationModel().create( | |
1759 | StubIntegrationType, settings=stub_integration_settings, enabled=True, |
|
1761 | StubIntegrationType, settings=stub_integration_settings, enabled=True, | |
1760 | name='test recursive repogroup integration', |
|
1762 | name='test recursive repogroup integration', | |
1761 | repo=None, repo_group=test_repo_group, child_repos_only=False) |
|
1763 | repo=None, repo_group=test_repo_group, child_repos_only=False) | |
1762 |
|
1764 | |||
1763 | @request.addfinalizer |
|
1765 | @request.addfinalizer | |
1764 | def cleanup(): |
|
1766 | def cleanup(): | |
1765 | IntegrationModel().delete(integration) |
|
1767 | IntegrationModel().delete(integration) | |
1766 |
|
1768 | |||
1767 | return integration |
|
1769 | return integration | |
1768 |
|
1770 | |||
1769 |
|
1771 | |||
1770 | @pytest.fixture() |
|
1772 | @pytest.fixture() | |
1771 | def global_integration_stub(request, StubIntegrationType, |
|
1773 | def global_integration_stub(request, StubIntegrationType, | |
1772 | stub_integration_settings): |
|
1774 | stub_integration_settings): | |
1773 | integration = IntegrationModel().create( |
|
1775 | integration = IntegrationModel().create( | |
1774 | StubIntegrationType, settings=stub_integration_settings, enabled=True, |
|
1776 | StubIntegrationType, settings=stub_integration_settings, enabled=True, | |
1775 | name='test global integration', |
|
1777 | name='test global integration', | |
1776 | repo=None, repo_group=None, child_repos_only=None) |
|
1778 | repo=None, repo_group=None, child_repos_only=None) | |
1777 |
|
1779 | |||
1778 | @request.addfinalizer |
|
1780 | @request.addfinalizer | |
1779 | def cleanup(): |
|
1781 | def cleanup(): | |
1780 | IntegrationModel().delete(integration) |
|
1782 | IntegrationModel().delete(integration) | |
1781 |
|
1783 | |||
1782 | return integration |
|
1784 | return integration | |
1783 |
|
1785 | |||
1784 |
|
1786 | |||
1785 | @pytest.fixture() |
|
1787 | @pytest.fixture() | |
1786 | def root_repos_integration_stub(request, StubIntegrationType, |
|
1788 | def root_repos_integration_stub(request, StubIntegrationType, | |
1787 | stub_integration_settings): |
|
1789 | stub_integration_settings): | |
1788 | integration = IntegrationModel().create( |
|
1790 | integration = IntegrationModel().create( | |
1789 | StubIntegrationType, settings=stub_integration_settings, enabled=True, |
|
1791 | StubIntegrationType, settings=stub_integration_settings, enabled=True, | |
1790 | name='test global integration', |
|
1792 | name='test global integration', | |
1791 | repo=None, repo_group=None, child_repos_only=True) |
|
1793 | repo=None, repo_group=None, child_repos_only=True) | |
1792 |
|
1794 | |||
1793 | @request.addfinalizer |
|
1795 | @request.addfinalizer | |
1794 | def cleanup(): |
|
1796 | def cleanup(): | |
1795 | IntegrationModel().delete(integration) |
|
1797 | IntegrationModel().delete(integration) | |
1796 |
|
1798 | |||
1797 | return integration |
|
1799 | return integration | |
1798 |
|
1800 | |||
1799 |
|
1801 | |||
1800 | @pytest.fixture() |
|
1802 | @pytest.fixture() | |
1801 | def local_dt_to_utc(): |
|
1803 | def local_dt_to_utc(): | |
1802 | def _factory(dt): |
|
1804 | def _factory(dt): | |
1803 | return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone( |
|
1805 | return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone( | |
1804 | dateutil.tz.tzutc()).replace(tzinfo=None) |
|
1806 | dateutil.tz.tzutc()).replace(tzinfo=None) | |
1805 | return _factory |
|
1807 | return _factory | |
1806 |
|
1808 | |||
1807 |
|
1809 | |||
1808 | @pytest.fixture() |
|
1810 | @pytest.fixture() | |
1809 | def disable_anonymous_user(request, baseapp): |
|
1811 | def disable_anonymous_user(request, baseapp): | |
1810 | set_anonymous_access(False) |
|
1812 | set_anonymous_access(False) | |
1811 |
|
1813 | |||
1812 | @request.addfinalizer |
|
1814 | @request.addfinalizer | |
1813 | def cleanup(): |
|
1815 | def cleanup(): | |
1814 | set_anonymous_access(True) |
|
1816 | set_anonymous_access(True) | |
1815 |
|
1817 | |||
1816 |
|
1818 | |||
1817 | @pytest.fixture(scope='module') |
|
1819 | @pytest.fixture(scope='module') | |
1818 | def rc_fixture(request): |
|
1820 | def rc_fixture(request): | |
1819 | return Fixture() |
|
1821 | return Fixture() | |
1820 |
|
1822 | |||
1821 |
|
1823 | |||
1822 | @pytest.fixture() |
|
1824 | @pytest.fixture() | |
1823 | def repo_groups(request): |
|
1825 | def repo_groups(request): | |
1824 | fixture = Fixture() |
|
1826 | fixture = Fixture() | |
1825 |
|
1827 | |||
1826 | session = Session() |
|
1828 | session = Session() | |
1827 | zombie_group = fixture.create_repo_group('zombie') |
|
1829 | zombie_group = fixture.create_repo_group('zombie') | |
1828 | parent_group = fixture.create_repo_group('parent') |
|
1830 | parent_group = fixture.create_repo_group('parent') | |
1829 | child_group = fixture.create_repo_group('parent/child') |
|
1831 | child_group = fixture.create_repo_group('parent/child') | |
1830 | groups_in_db = session.query(RepoGroup).all() |
|
1832 | groups_in_db = session.query(RepoGroup).all() | |
1831 | assert len(groups_in_db) == 3 |
|
1833 | assert len(groups_in_db) == 3 | |
1832 | assert child_group.group_parent_id == parent_group.group_id |
|
1834 | assert child_group.group_parent_id == parent_group.group_id | |
1833 |
|
1835 | |||
1834 | @request.addfinalizer |
|
1836 | @request.addfinalizer | |
1835 | def cleanup(): |
|
1837 | def cleanup(): | |
1836 | fixture.destroy_repo_group(zombie_group) |
|
1838 | fixture.destroy_repo_group(zombie_group) | |
1837 | fixture.destroy_repo_group(child_group) |
|
1839 | fixture.destroy_repo_group(child_group) | |
1838 | fixture.destroy_repo_group(parent_group) |
|
1840 | fixture.destroy_repo_group(parent_group) | |
1839 |
|
1841 | |||
1840 | return zombie_group, parent_group, child_group |
|
1842 | return zombie_group, parent_group, child_group |
@@ -1,285 +1,285 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import json |
|
21 | import json | |
22 | import platform |
|
22 | import platform | |
23 | import socket |
|
23 | import socket | |
24 | import random |
|
24 | import random | |
25 | import pytest |
|
25 | import pytest | |
26 |
|
26 | |||
27 | from rhodecode.lib.pyramid_utils import get_app_config |
|
27 | from rhodecode.lib.pyramid_utils import get_app_config | |
28 | from rhodecode.tests.fixture import TestINI |
|
28 | from rhodecode.tests.fixture import TestINI | |
29 | from rhodecode.tests.server_utils import RcVCSServer |
|
29 | from rhodecode.tests.server_utils import RcVCSServer | |
30 |
|
30 | |||
31 |
|
31 | |||
32 | def _parse_json(value): |
|
32 | def _parse_json(value): | |
33 | return json.loads(value) if value else None |
|
33 | return json.loads(value) if value else None | |
34 |
|
34 | |||
35 |
|
35 | |||
36 | def pytest_addoption(parser): |
|
36 | def pytest_addoption(parser): | |
37 | parser.addoption( |
|
37 | parser.addoption( | |
38 | '--test-loglevel', dest='test_loglevel', |
|
38 | '--test-loglevel', dest='test_loglevel', | |
39 | help="Set default Logging level for tests, critical(default), error, warn , info, debug") |
|
39 | help="Set default Logging level for tests, critical(default), error, warn , info, debug") | |
40 | group = parser.getgroup('pylons') |
|
40 | group = parser.getgroup('pylons') | |
41 | group.addoption( |
|
41 | group.addoption( | |
42 | '--with-pylons', dest='pyramid_config', |
|
42 | '--with-pylons', dest='pyramid_config', | |
43 | help="Set up a Pylons environment with the specified config file.") |
|
43 | help="Set up a Pylons environment with the specified config file.") | |
44 | group.addoption( |
|
44 | group.addoption( | |
45 | '--ini-config-override', action='store', type=_parse_json, |
|
45 | '--ini-config-override', action='store', type=_parse_json, | |
46 | default=None, dest='pyramid_config_override', help=( |
|
46 | default=None, dest='pyramid_config_override', help=( | |
47 | "Overrides the .ini file settings. Should be specified in JSON" |
|
47 | "Overrides the .ini file settings. Should be specified in JSON" | |
48 | " format, e.g. '{\"section\": {\"parameter\": \"value\", ...}}'" |
|
48 | " format, e.g. '{\"section\": {\"parameter\": \"value\", ...}}'" | |
49 | ) |
|
49 | ) | |
50 | ) |
|
50 | ) | |
51 | parser.addini( |
|
51 | parser.addini( | |
52 | 'pyramid_config', |
|
52 | 'pyramid_config', | |
53 | "Set up a Pyramid environment with the specified config file.") |
|
53 | "Set up a Pyramid environment with the specified config file.") | |
54 |
|
54 | |||
55 | vcsgroup = parser.getgroup('vcs') |
|
55 | vcsgroup = parser.getgroup('vcs') | |
56 | vcsgroup.addoption( |
|
56 | vcsgroup.addoption( | |
57 | '--without-vcsserver', dest='with_vcsserver', action='store_false', |
|
57 | '--without-vcsserver', dest='with_vcsserver', action='store_false', | |
58 | help="Do not start the VCSServer in a background process.") |
|
58 | help="Do not start the VCSServer in a background process.") | |
59 | vcsgroup.addoption( |
|
59 | vcsgroup.addoption( | |
60 | '--with-vcsserver-http', dest='vcsserver_config_http', |
|
60 | '--with-vcsserver-http', dest='vcsserver_config_http', | |
61 | help="Start the HTTP VCSServer with the specified config file.") |
|
61 | help="Start the HTTP VCSServer with the specified config file.") | |
62 | vcsgroup.addoption( |
|
62 | vcsgroup.addoption( | |
63 | '--vcsserver-protocol', dest='vcsserver_protocol', |
|
63 | '--vcsserver-protocol', dest='vcsserver_protocol', | |
64 | help="Start the VCSServer with HTTP protocol support.") |
|
64 | help="Start the VCSServer with HTTP protocol support.") | |
65 | vcsgroup.addoption( |
|
65 | vcsgroup.addoption( | |
66 | '--vcsserver-config-override', action='store', type=_parse_json, |
|
66 | '--vcsserver-config-override', action='store', type=_parse_json, | |
67 | default=None, dest='vcsserver_config_override', help=( |
|
67 | default=None, dest='vcsserver_config_override', help=( | |
68 | "Overrides the .ini file settings for the VCSServer. " |
|
68 | "Overrides the .ini file settings for the VCSServer. " | |
69 | "Should be specified in JSON " |
|
69 | "Should be specified in JSON " | |
70 | "format, e.g. '{\"section\": {\"parameter\": \"value\", ...}}'" |
|
70 | "format, e.g. '{\"section\": {\"parameter\": \"value\", ...}}'" | |
71 | ) |
|
71 | ) | |
72 | ) |
|
72 | ) | |
73 | vcsgroup.addoption( |
|
73 | vcsgroup.addoption( | |
74 | '--vcsserver-port', action='store', type=int, |
|
74 | '--vcsserver-port', action='store', type=int, | |
75 | default=None, help=( |
|
75 | default=None, help=( | |
76 | "Allows to set the port of the vcsserver. Useful when testing " |
|
76 | "Allows to set the port of the vcsserver. Useful when testing " | |
77 | "against an already running server and random ports cause " |
|
77 | "against an already running server and random ports cause " | |
78 | "trouble.")) |
|
78 | "trouble.")) | |
79 | parser.addini( |
|
79 | parser.addini( | |
80 | 'vcsserver_config_http', |
|
80 | 'vcsserver_config_http', | |
81 | "Start the HTTP VCSServer with the specified config file.") |
|
81 | "Start the HTTP VCSServer with the specified config file.") | |
82 | parser.addini( |
|
82 | parser.addini( | |
83 | 'vcsserver_protocol', |
|
83 | 'vcsserver_protocol', | |
84 | "Start the VCSServer with HTTP protocol support.") |
|
84 | "Start the VCSServer with HTTP protocol support.") | |
85 |
|
85 | |||
86 |
|
86 | |||
87 | @pytest.fixture(scope='session') |
|
87 | @pytest.fixture(scope='session') | |
88 | def vcsserver(request, vcsserver_port, vcsserver_factory): |
|
88 | def vcsserver(request, vcsserver_port, vcsserver_factory): | |
89 | """ |
|
89 | """ | |
90 | Session scope VCSServer. |
|
90 | Session scope VCSServer. | |
91 |
|
91 | |||
92 | Tests wich need the VCSServer have to rely on this fixture in order |
|
92 | Tests wich need the VCSServer have to rely on this fixture in order | |
93 | to ensure it will be running. |
|
93 | to ensure it will be running. | |
94 |
|
94 | |||
95 | For specific needs, the fixture vcsserver_factory can be used. It allows to |
|
95 | For specific needs, the fixture vcsserver_factory can be used. It allows to | |
96 | adjust the configuration file for the test run. |
|
96 | adjust the configuration file for the test run. | |
97 |
|
97 | |||
98 | Command line args: |
|
98 | Command line args: | |
99 |
|
99 | |||
100 | --without-vcsserver: Allows to switch this fixture off. You have to |
|
100 | --without-vcsserver: Allows to switch this fixture off. You have to | |
101 | manually start the server. |
|
101 | manually start the server. | |
102 |
|
102 | |||
103 | --vcsserver-port: Will expect the VCSServer to listen on this port. |
|
103 | --vcsserver-port: Will expect the VCSServer to listen on this port. | |
104 | """ |
|
104 | """ | |
105 |
|
105 | |||
106 | if not request.config.getoption('with_vcsserver'): |
|
106 | if not request.config.getoption('with_vcsserver'): | |
107 | return None |
|
107 | return None | |
108 |
|
108 | |||
109 | return vcsserver_factory( |
|
109 | return vcsserver_factory( | |
110 | request, vcsserver_port=vcsserver_port) |
|
110 | request, vcsserver_port=vcsserver_port) | |
111 |
|
111 | |||
112 |
|
112 | |||
113 | @pytest.fixture(scope='session') |
|
113 | @pytest.fixture(scope='session') | |
114 | def vcsserver_factory(tmpdir_factory): |
|
114 | def vcsserver_factory(tmpdir_factory): | |
115 | """ |
|
115 | """ | |
116 | Use this if you need a running vcsserver with a special configuration. |
|
116 | Use this if you need a running vcsserver with a special configuration. | |
117 | """ |
|
117 | """ | |
118 |
|
118 | |||
119 | def factory(request, overrides=(), vcsserver_port=None, |
|
119 | def factory(request, overrides=(), vcsserver_port=None, | |
120 | log_file=None): |
|
120 | log_file=None): | |
121 |
|
121 | |||
122 | if vcsserver_port is None: |
|
122 | if vcsserver_port is None: | |
123 | vcsserver_port = get_available_port() |
|
123 | vcsserver_port = get_available_port() | |
124 |
|
124 | |||
125 | overrides = list(overrides) |
|
125 | overrides = list(overrides) | |
126 | overrides.append({'server:main': {'port': vcsserver_port}}) |
|
126 | overrides.append({'server:main': {'port': vcsserver_port}}) | |
127 |
|
127 | |||
128 | option_name = 'vcsserver_config_http' |
|
128 | option_name = 'vcsserver_config_http' | |
129 | override_option_name = 'vcsserver_config_override' |
|
129 | override_option_name = 'vcsserver_config_override' | |
130 | config_file = get_config( |
|
130 | config_file = get_config( | |
131 | request.config, option_name=option_name, |
|
131 | request.config, option_name=option_name, | |
132 | override_option_name=override_option_name, overrides=overrides, |
|
132 | override_option_name=override_option_name, overrides=overrides, | |
133 | basetemp=tmpdir_factory.getbasetemp().strpath, |
|
133 | basetemp=tmpdir_factory.getbasetemp().strpath, | |
134 | prefix='test_vcs_') |
|
134 | prefix='test_vcs_') | |
135 |
|
135 | |||
136 | server = RcVCSServer(config_file, log_file) |
|
136 | server = RcVCSServer(config_file, log_file) | |
137 | server.start() |
|
137 | server.start() | |
138 |
|
138 | |||
139 | @request.addfinalizer |
|
139 | @request.addfinalizer | |
140 | def cleanup(): |
|
140 | def cleanup(): | |
141 | server.shutdown() |
|
141 | server.shutdown() | |
142 |
|
142 | |||
143 | server.wait_until_ready() |
|
143 | server.wait_until_ready() | |
144 | return server |
|
144 | return server | |
145 |
|
145 | |||
146 | return factory |
|
146 | return factory | |
147 |
|
147 | |||
148 |
|
148 | |||
149 | def is_cygwin(): |
|
149 | def is_cygwin(): | |
150 | return 'cygwin' in platform.system().lower() |
|
150 | return 'cygwin' in platform.system().lower() | |
151 |
|
151 | |||
152 |
|
152 | |||
153 | def _use_log_level(config): |
|
153 | def _use_log_level(config): | |
154 | level = config.getoption('test_loglevel') or 'critical' |
|
154 | level = config.getoption('test_loglevel') or 'critical' | |
155 | return level.upper() |
|
155 | return level.upper() | |
156 |
|
156 | |||
157 |
|
157 | |||
158 | @pytest.fixture(scope='session') |
|
158 | @pytest.fixture(scope='session') | |
159 | def ini_config(request, tmpdir_factory, rcserver_port, vcsserver_port): |
|
159 | def ini_config(request, tmpdir_factory, rcserver_port, vcsserver_port): | |
160 | option_name = 'pyramid_config' |
|
160 | option_name = 'pyramid_config' | |
161 | log_level = _use_log_level(request.config) |
|
161 | log_level = _use_log_level(request.config) | |
162 |
|
162 | |||
163 | overrides = [ |
|
163 | overrides = [ | |
164 | {'server:main': {'port': rcserver_port}}, |
|
164 | {'server:main': {'port': rcserver_port}}, | |
165 | {'app:main': { |
|
165 | {'app:main': { | |
166 | 'vcs.server': 'localhost:%s' % vcsserver_port, |
|
166 | 'vcs.server': 'localhost:%s' % vcsserver_port, | |
167 | # johbo: We will always start the VCSServer on our own based on the |
|
167 | # johbo: We will always start the VCSServer on our own based on the | |
168 | # fixtures of the test cases. For the test run it must always be |
|
168 | # fixtures of the test cases. For the test run it must always be | |
169 | # off in the INI file. |
|
169 | # off in the INI file. | |
170 | 'vcs.start_server': 'false', |
|
170 | 'vcs.start_server': 'false', | |
171 |
|
171 | |||
172 | 'vcs.server.protocol': 'http', |
|
172 | 'vcs.server.protocol': 'http', | |
173 | 'vcs.scm_app_implementation': 'http', |
|
173 | 'vcs.scm_app_implementation': 'http', | |
174 | 'vcs.hooks.protocol': 'http', |
|
174 | 'vcs.hooks.protocol': 'http', | |
175 | 'vcs.hooks.host': '127.0.0.1', |
|
175 | 'vcs.hooks.host': '127.0.0.1', | |
176 | }}, |
|
176 | }}, | |
177 |
|
177 | |||
178 | {'handler_console': { |
|
178 | {'handler_console': { | |
179 | 'class ': 'StreamHandler', |
|
179 | 'class ': 'StreamHandler', | |
180 | 'args ': '(sys.stderr,)', |
|
180 | 'args ': '(sys.stderr,)', | |
181 | 'level': log_level, |
|
181 | 'level': log_level, | |
182 | }}, |
|
182 | }}, | |
183 |
|
183 | |||
184 | ] |
|
184 | ] | |
185 |
|
185 | |||
186 | filename = get_config( |
|
186 | filename = get_config( | |
187 | request.config, option_name=option_name, |
|
187 | request.config, option_name=option_name, | |
188 | override_option_name='{}_override'.format(option_name), |
|
188 | override_option_name='{}_override'.format(option_name), | |
189 | overrides=overrides, |
|
189 | overrides=overrides, | |
190 | basetemp=tmpdir_factory.getbasetemp().strpath, |
|
190 | basetemp=tmpdir_factory.getbasetemp().strpath, | |
191 | prefix='test_rce_') |
|
191 | prefix='test_rce_') | |
192 | return filename |
|
192 | return filename | |
193 |
|
193 | |||
194 |
|
194 | |||
195 | @pytest.fixture(scope='session') |
|
195 | @pytest.fixture(scope='session') | |
196 | def ini_settings(ini_config): |
|
196 | def ini_settings(ini_config): | |
197 | ini_path = ini_config |
|
197 | ini_path = ini_config | |
198 | return get_app_config(ini_path) |
|
198 | return get_app_config(ini_path) | |
199 |
|
199 | |||
200 |
|
200 | |||
201 | def get_available_port(min_port=40000, max_port=55555): |
|
201 | def get_available_port(min_port=40000, max_port=55555): | |
202 | from rhodecode.lib.utils2 import get_available_port as _get_port |
|
202 | from rhodecode.lib.utils2 import get_available_port as _get_port | |
203 | return _get_port(min_port, max_port) |
|
203 | return _get_port(min_port, max_port) | |
204 |
|
204 | |||
205 |
|
205 | |||
206 | @pytest.fixture(scope='session') |
|
206 | @pytest.fixture(scope='session') | |
207 | def rcserver_port(request): |
|
207 | def rcserver_port(request): | |
208 | port = get_available_port() |
|
208 | port = get_available_port() | |
209 |
print('Using r |
|
209 | print('Using rhodecode port {}'.format(port)) | |
210 | return port |
|
210 | return port | |
211 |
|
211 | |||
212 |
|
212 | |||
213 | @pytest.fixture(scope='session') |
|
213 | @pytest.fixture(scope='session') | |
214 | def vcsserver_port(request): |
|
214 | def vcsserver_port(request): | |
215 | port = request.config.getoption('--vcsserver-port') |
|
215 | port = request.config.getoption('--vcsserver-port') | |
216 | if port is None: |
|
216 | if port is None: | |
217 | port = get_available_port() |
|
217 | port = get_available_port() | |
218 | print('Using vcsserver port {}'.format(port)) |
|
218 | print('Using vcsserver port {}'.format(port)) | |
219 | return port |
|
219 | return port | |
220 |
|
220 | |||
221 |
|
221 | |||
222 | @pytest.fixture(scope='session') |
|
222 | @pytest.fixture(scope='session') | |
223 | def available_port_factory(): |
|
223 | def available_port_factory(): | |
224 | """ |
|
224 | """ | |
225 | Returns a callable which returns free port numbers. |
|
225 | Returns a callable which returns free port numbers. | |
226 | """ |
|
226 | """ | |
227 | return get_available_port |
|
227 | return get_available_port | |
228 |
|
228 | |||
229 |
|
229 | |||
230 | @pytest.fixture() |
|
230 | @pytest.fixture() | |
231 | def available_port(available_port_factory): |
|
231 | def available_port(available_port_factory): | |
232 | """ |
|
232 | """ | |
233 | Gives you one free port for the current test. |
|
233 | Gives you one free port for the current test. | |
234 |
|
234 | |||
235 | Uses "available_port_factory" to retrieve the port. |
|
235 | Uses "available_port_factory" to retrieve the port. | |
236 | """ |
|
236 | """ | |
237 | return available_port_factory() |
|
237 | return available_port_factory() | |
238 |
|
238 | |||
239 |
|
239 | |||
240 | @pytest.fixture(scope='session') |
|
240 | @pytest.fixture(scope='session') | |
241 | def testini_factory(tmpdir_factory, ini_config): |
|
241 | def testini_factory(tmpdir_factory, ini_config): | |
242 | """ |
|
242 | """ | |
243 | Factory to create an INI file based on TestINI. |
|
243 | Factory to create an INI file based on TestINI. | |
244 |
|
244 | |||
245 | It will make sure to place the INI file in the correct directory. |
|
245 | It will make sure to place the INI file in the correct directory. | |
246 | """ |
|
246 | """ | |
247 | basetemp = tmpdir_factory.getbasetemp().strpath |
|
247 | basetemp = tmpdir_factory.getbasetemp().strpath | |
248 | return TestIniFactory(basetemp, ini_config) |
|
248 | return TestIniFactory(basetemp, ini_config) | |
249 |
|
249 | |||
250 |
|
250 | |||
251 | class TestIniFactory(object): |
|
251 | class TestIniFactory(object): | |
252 |
|
252 | |||
253 | def __init__(self, basetemp, template_ini): |
|
253 | def __init__(self, basetemp, template_ini): | |
254 | self._basetemp = basetemp |
|
254 | self._basetemp = basetemp | |
255 | self._template_ini = template_ini |
|
255 | self._template_ini = template_ini | |
256 |
|
256 | |||
257 | def __call__(self, ini_params, new_file_prefix='test'): |
|
257 | def __call__(self, ini_params, new_file_prefix='test'): | |
258 | ini_file = TestINI( |
|
258 | ini_file = TestINI( | |
259 | self._template_ini, ini_params=ini_params, |
|
259 | self._template_ini, ini_params=ini_params, | |
260 | new_file_prefix=new_file_prefix, dir=self._basetemp) |
|
260 | new_file_prefix=new_file_prefix, dir=self._basetemp) | |
261 | result = ini_file.create() |
|
261 | result = ini_file.create() | |
262 | return result |
|
262 | return result | |
263 |
|
263 | |||
264 |
|
264 | |||
265 | def get_config( |
|
265 | def get_config( | |
266 | config, option_name, override_option_name, overrides=None, |
|
266 | config, option_name, override_option_name, overrides=None, | |
267 | basetemp=None, prefix='test'): |
|
267 | basetemp=None, prefix='test'): | |
268 | """ |
|
268 | """ | |
269 | Find a configuration file and apply overrides for the given `prefix`. |
|
269 | Find a configuration file and apply overrides for the given `prefix`. | |
270 | """ |
|
270 | """ | |
271 | config_file = ( |
|
271 | config_file = ( | |
272 | config.getoption(option_name) or config.getini(option_name)) |
|
272 | config.getoption(option_name) or config.getini(option_name)) | |
273 | if not config_file: |
|
273 | if not config_file: | |
274 | pytest.exit( |
|
274 | pytest.exit( | |
275 | "Configuration error, could not extract {}.".format(option_name)) |
|
275 | "Configuration error, could not extract {}.".format(option_name)) | |
276 |
|
276 | |||
277 | overrides = overrides or [] |
|
277 | overrides = overrides or [] | |
278 | config_override = config.getoption(override_option_name) |
|
278 | config_override = config.getoption(override_option_name) | |
279 | if config_override: |
|
279 | if config_override: | |
280 | overrides.append(config_override) |
|
280 | overrides.append(config_override) | |
281 | temp_ini_file = TestINI( |
|
281 | temp_ini_file = TestINI( | |
282 | config_file, ini_params=overrides, new_file_prefix=prefix, |
|
282 | config_file, ini_params=overrides, new_file_prefix=prefix, | |
283 | dir=basetemp) |
|
283 | dir=basetemp) | |
284 |
|
284 | |||
285 | return temp_ini_file.create() |
|
285 | return temp_ini_file.create() |
@@ -1,197 +1,200 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 |
|
21 | |||
22 | import os |
|
22 | import os | |
23 | import time |
|
23 | import time | |
24 | import tempfile |
|
24 | import tempfile | |
25 | import pytest |
|
25 | import pytest | |
26 | import subprocess32 |
|
26 | import subprocess32 | |
27 | import configobj |
|
27 | import configobj | |
|
28 | import logging | |||
28 |
|
29 | |||
29 | from urllib2 import urlopen, URLError |
|
30 | from urllib2 import urlopen, URLError | |
30 | from pyramid.compat import configparser |
|
31 | from pyramid.compat import configparser | |
31 |
|
32 | |||
32 |
|
33 | |||
33 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS |
|
34 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS | |
34 | from rhodecode.tests.utils import is_url_reachable |
|
35 | from rhodecode.tests.utils import is_url_reachable | |
35 |
|
36 | |||
|
37 | log = logging.getLogger(__name__) | |||
|
38 | ||||
36 |
|
39 | |||
37 | def get_port(pyramid_config): |
|
40 | def get_port(pyramid_config): | |
38 | config = configparser.ConfigParser() |
|
41 | config = configparser.ConfigParser() | |
39 | config.read(pyramid_config) |
|
42 | config.read(pyramid_config) | |
40 | return config.get('server:main', 'port') |
|
43 | return config.get('server:main', 'port') | |
41 |
|
44 | |||
42 |
|
45 | |||
43 | def get_host_url(pyramid_config): |
|
46 | def get_host_url(pyramid_config): | |
44 | """Construct the host url using the port in the test configuration.""" |
|
47 | """Construct the host url using the port in the test configuration.""" | |
45 | return '127.0.0.1:%s' % get_port(pyramid_config) |
|
48 | return '127.0.0.1:%s' % get_port(pyramid_config) | |
46 |
|
49 | |||
47 |
|
50 | |||
48 | def assert_no_running_instance(url): |
|
51 | def assert_no_running_instance(url): | |
49 | if is_url_reachable(url): |
|
52 | if is_url_reachable(url): | |
50 | print("Hint: Usually this means another instance of server " |
|
53 | print("Hint: Usually this means another instance of server " | |
51 | "is running in the background at %s." % url) |
|
54 | "is running in the background at %s." % url) | |
52 | pytest.fail( |
|
55 | pytest.fail( | |
53 | "Port is not free at %s, cannot start server at" % url) |
|
56 | "Port is not free at %s, cannot start server at" % url) | |
54 |
|
57 | |||
55 |
|
58 | |||
56 | class ServerBase(object): |
|
59 | class ServerBase(object): | |
57 | _args = [] |
|
60 | _args = [] | |
58 | log_file_name = 'NOT_DEFINED.log' |
|
61 | log_file_name = 'NOT_DEFINED.log' | |
59 | status_url_tmpl = 'http://{host}:{port}' |
|
62 | status_url_tmpl = 'http://{host}:{port}' | |
60 |
|
63 | |||
61 | def __init__(self, config_file, log_file): |
|
64 | def __init__(self, config_file, log_file): | |
62 | self.config_file = config_file |
|
65 | self.config_file = config_file | |
63 | config_data = configobj.ConfigObj(config_file) |
|
66 | config_data = configobj.ConfigObj(config_file) | |
64 | self._config = config_data['server:main'] |
|
67 | self._config = config_data['server:main'] | |
65 |
|
68 | |||
66 | self._args = [] |
|
69 | self._args = [] | |
67 | self.log_file = log_file or os.path.join( |
|
70 | self.log_file = log_file or os.path.join( | |
68 | tempfile.gettempdir(), self.log_file_name) |
|
71 | tempfile.gettempdir(), self.log_file_name) | |
69 | self.process = None |
|
72 | self.process = None | |
70 | self.server_out = None |
|
73 | self.server_out = None | |
71 |
|
|
74 | log.info("Using the {} configuration:{}".format( | |
72 | self.__class__.__name__, config_file)) |
|
75 | self.__class__.__name__, config_file)) | |
73 |
|
76 | |||
74 | if not os.path.isfile(config_file): |
|
77 | if not os.path.isfile(config_file): | |
75 | raise RuntimeError('Failed to get config at {}'.format(config_file)) |
|
78 | raise RuntimeError('Failed to get config at {}'.format(config_file)) | |
76 |
|
79 | |||
77 | @property |
|
80 | @property | |
78 | def command(self): |
|
81 | def command(self): | |
79 | return ' '.join(self._args) |
|
82 | return ' '.join(self._args) | |
80 |
|
83 | |||
81 | @property |
|
84 | @property | |
82 | def http_url(self): |
|
85 | def http_url(self): | |
83 | template = 'http://{host}:{port}/' |
|
86 | template = 'http://{host}:{port}/' | |
84 | return template.format(**self._config) |
|
87 | return template.format(**self._config) | |
85 |
|
88 | |||
86 | def host_url(self): |
|
89 | def host_url(self): | |
87 | return 'http://' + get_host_url(self.config_file) |
|
90 | return 'http://' + get_host_url(self.config_file) | |
88 |
|
91 | |||
89 | def get_rc_log(self): |
|
92 | def get_rc_log(self): | |
90 | with open(self.log_file) as f: |
|
93 | with open(self.log_file) as f: | |
91 | return f.read() |
|
94 | return f.read() | |
92 |
|
95 | |||
93 | def wait_until_ready(self, timeout=30): |
|
96 | def wait_until_ready(self, timeout=30): | |
94 | host = self._config['host'] |
|
97 | host = self._config['host'] | |
95 | port = self._config['port'] |
|
98 | port = self._config['port'] | |
96 | status_url = self.status_url_tmpl.format(host=host, port=port) |
|
99 | status_url = self.status_url_tmpl.format(host=host, port=port) | |
97 | start = time.time() |
|
100 | start = time.time() | |
98 |
|
101 | |||
99 | while time.time() - start < timeout: |
|
102 | while time.time() - start < timeout: | |
100 | try: |
|
103 | try: | |
101 | urlopen(status_url) |
|
104 | urlopen(status_url) | |
102 | break |
|
105 | break | |
103 | except URLError: |
|
106 | except URLError: | |
104 | time.sleep(0.2) |
|
107 | time.sleep(0.2) | |
105 | else: |
|
108 | else: | |
106 | pytest.fail( |
|
109 | pytest.fail( | |
107 | "Starting the {} failed or took more than {} " |
|
110 | "Starting the {} failed or took more than {} " | |
108 | "seconds. cmd: `{}`".format( |
|
111 | "seconds. cmd: `{}`".format( | |
109 | self.__class__.__name__, timeout, self.command)) |
|
112 | self.__class__.__name__, timeout, self.command)) | |
110 |
|
113 | |||
111 |
|
|
114 | log.info('Server of {} ready at url {}'.format( | |
112 | self.__class__.__name__, status_url)) |
|
115 | self.__class__.__name__, status_url)) | |
113 |
|
116 | |||
114 | def shutdown(self): |
|
117 | def shutdown(self): | |
115 | self.process.kill() |
|
118 | self.process.kill() | |
116 | self.server_out.flush() |
|
119 | self.server_out.flush() | |
117 | self.server_out.close() |
|
120 | self.server_out.close() | |
118 |
|
121 | |||
119 | def get_log_file_with_port(self): |
|
122 | def get_log_file_with_port(self): | |
120 | log_file = list(self.log_file.partition('.log')) |
|
123 | log_file = list(self.log_file.partition('.log')) | |
121 | log_file.insert(1, get_port(self.config_file)) |
|
124 | log_file.insert(1, get_port(self.config_file)) | |
122 | log_file = ''.join(log_file) |
|
125 | log_file = ''.join(log_file) | |
123 | return log_file |
|
126 | return log_file | |
124 |
|
127 | |||
125 |
|
128 | |||
126 | class RcVCSServer(ServerBase): |
|
129 | class RcVCSServer(ServerBase): | |
127 | """ |
|
130 | """ | |
128 | Represents a running VCSServer instance. |
|
131 | Represents a running VCSServer instance. | |
129 | """ |
|
132 | """ | |
130 |
|
133 | |||
131 | log_file_name = 'rc-vcsserver.log' |
|
134 | log_file_name = 'rc-vcsserver.log' | |
132 | status_url_tmpl = 'http://{host}:{port}/status' |
|
135 | status_url_tmpl = 'http://{host}:{port}/status' | |
133 |
|
136 | |||
134 | def __init__(self, config_file, log_file=None): |
|
137 | def __init__(self, config_file, log_file=None): | |
135 | super(RcVCSServer, self).__init__(config_file, log_file) |
|
138 | super(RcVCSServer, self).__init__(config_file, log_file) | |
136 | self._args = ['gunicorn', '--paste', self.config_file] |
|
139 | self._args = ['gunicorn', '--paste', self.config_file] | |
137 |
|
140 | |||
138 | def start(self): |
|
141 | def start(self): | |
139 | env = os.environ.copy() |
|
142 | env = os.environ.copy() | |
140 |
|
143 | |||
141 | self.log_file = self.get_log_file_with_port() |
|
144 | self.log_file = self.get_log_file_with_port() | |
142 | self.server_out = open(self.log_file, 'w') |
|
145 | self.server_out = open(self.log_file, 'w') | |
143 |
|
146 | |||
144 | host_url = self.host_url() |
|
147 | host_url = self.host_url() | |
145 | assert_no_running_instance(host_url) |
|
148 | assert_no_running_instance(host_url) | |
146 |
|
149 | |||
147 |
|
|
150 | log.info('rhodecode-vcsserver start command: {}'.format(' '.join(self._args))) | |
148 |
|
|
151 | log.info('rhodecode-vcsserver starting at: {}'.format(host_url)) | |
149 |
|
|
152 | log.info('rhodecode-vcsserver command: {}'.format(self.command)) | |
150 |
|
|
153 | log.info('rhodecode-vcsserver logfile: {}'.format(self.log_file)) | |
151 |
|
154 | |||
152 | self.process = subprocess32.Popen( |
|
155 | self.process = subprocess32.Popen( | |
153 | self._args, bufsize=0, env=env, |
|
156 | self._args, bufsize=0, env=env, | |
154 | stdout=self.server_out, stderr=self.server_out) |
|
157 | stdout=self.server_out, stderr=self.server_out) | |
155 |
|
158 | |||
156 |
|
159 | |||
157 | class RcWebServer(ServerBase): |
|
160 | class RcWebServer(ServerBase): | |
158 | """ |
|
161 | """ | |
159 | Represents a running RCE web server used as a test fixture. |
|
162 | Represents a running RCE web server used as a test fixture. | |
160 | """ |
|
163 | """ | |
161 |
|
164 | |||
162 | log_file_name = 'rc-web.log' |
|
165 | log_file_name = 'rc-web.log' | |
163 | status_url_tmpl = 'http://{host}:{port}/_admin/ops/ping' |
|
166 | status_url_tmpl = 'http://{host}:{port}/_admin/ops/ping' | |
164 |
|
167 | |||
165 | def __init__(self, config_file, log_file=None): |
|
168 | def __init__(self, config_file, log_file=None): | |
166 | super(RcWebServer, self).__init__(config_file, log_file) |
|
169 | super(RcWebServer, self).__init__(config_file, log_file) | |
167 | self._args = [ |
|
170 | self._args = [ | |
168 | 'gunicorn', '--worker-class', 'gevent', '--paste', config_file] |
|
171 | 'gunicorn', '--worker-class', 'gevent', '--paste', config_file] | |
169 |
|
172 | |||
170 | def start(self): |
|
173 | def start(self): | |
171 | env = os.environ.copy() |
|
174 | env = os.environ.copy() | |
172 | env['RC_NO_TMP_PATH'] = '1' |
|
175 | env['RC_NO_TMP_PATH'] = '1' | |
173 |
|
176 | |||
174 | self.log_file = self.get_log_file_with_port() |
|
177 | self.log_file = self.get_log_file_with_port() | |
175 | self.server_out = open(self.log_file, 'w') |
|
178 | self.server_out = open(self.log_file, 'w') | |
176 |
|
179 | |||
177 | host_url = self.host_url() |
|
180 | host_url = self.host_url() | |
178 | assert_no_running_instance(host_url) |
|
181 | assert_no_running_instance(host_url) | |
179 |
|
182 | |||
180 |
|
|
183 | log.info('rhodecode-web starting at: {}'.format(host_url)) | |
181 |
|
|
184 | log.info('rhodecode-web command: {}'.format(self.command)) | |
182 |
|
|
185 | log.info('rhodecode-web logfile: {}'.format(self.log_file)) | |
183 |
|
186 | |||
184 | self.process = subprocess32.Popen( |
|
187 | self.process = subprocess32.Popen( | |
185 | self._args, bufsize=0, env=env, |
|
188 | self._args, bufsize=0, env=env, | |
186 | stdout=self.server_out, stderr=self.server_out) |
|
189 | stdout=self.server_out, stderr=self.server_out) | |
187 |
|
190 | |||
188 | def repo_clone_url(self, repo_name, **kwargs): |
|
191 | def repo_clone_url(self, repo_name, **kwargs): | |
189 | params = { |
|
192 | params = { | |
190 | 'user': TEST_USER_ADMIN_LOGIN, |
|
193 | 'user': TEST_USER_ADMIN_LOGIN, | |
191 | 'passwd': TEST_USER_ADMIN_PASS, |
|
194 | 'passwd': TEST_USER_ADMIN_PASS, | |
192 | 'host': get_host_url(self.config_file), |
|
195 | 'host': get_host_url(self.config_file), | |
193 | 'cloned_repo': repo_name, |
|
196 | 'cloned_repo': repo_name, | |
194 | } |
|
197 | } | |
195 | params.update(**kwargs) |
|
198 | params.update(**kwargs) | |
196 | _url = 'http://%(user)s:%(passwd)s@%(host)s/%(cloned_repo)s' % params |
|
199 | _url = 'http://%(user)s:%(passwd)s@%(host)s/%(cloned_repo)s' % params | |
197 | return _url |
|
200 | return _url |
@@ -1,193 +1,193 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | Base for test suite for making push/pull operations. |
|
22 | Base for test suite for making push/pull operations. | |
23 |
|
23 | |||
24 | .. important:: |
|
24 | .. important:: | |
25 |
|
25 | |||
26 | You must have git >= 1.8.5 for tests to work fine. With 68b939b git started |
|
26 | You must have git >= 1.8.5 for tests to work fine. With 68b939b git started | |
27 | to redirect things to stderr instead of stdout. |
|
27 | to redirect things to stderr instead of stdout. | |
28 | """ |
|
28 | """ | |
29 |
|
29 | |||
30 | from os.path import join as jn |
|
30 | from os.path import join as jn | |
31 | from subprocess32 import Popen, PIPE |
|
31 | from subprocess32 import Popen, PIPE | |
32 | import logging |
|
32 | import logging | |
33 | import os |
|
33 | import os | |
34 | import tempfile |
|
34 | import tempfile | |
35 |
|
35 | |||
36 | from rhodecode.tests import GIT_REPO, HG_REPO |
|
36 | from rhodecode.tests import GIT_REPO, HG_REPO | |
37 |
|
37 | |||
38 | DEBUG = True |
|
38 | DEBUG = True | |
39 | RC_LOG = os.path.join(tempfile.gettempdir(), 'rc.log') |
|
39 | RC_LOG = os.path.join(tempfile.gettempdir(), 'rc.log') | |
40 | REPO_GROUP = 'a_repo_group' |
|
40 | REPO_GROUP = 'a_repo_group' | |
41 | HG_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, HG_REPO) |
|
41 | HG_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, HG_REPO) | |
42 | GIT_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, GIT_REPO) |
|
42 | GIT_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, GIT_REPO) | |
43 |
|
43 | |||
44 | log = logging.getLogger(__name__) |
|
44 | log = logging.getLogger(__name__) | |
45 |
|
45 | |||
46 |
|
46 | |||
47 | class Command(object): |
|
47 | class Command(object): | |
48 |
|
48 | |||
49 | def __init__(self, cwd): |
|
49 | def __init__(self, cwd): | |
50 | self.cwd = cwd |
|
50 | self.cwd = cwd | |
51 | self.process = None |
|
51 | self.process = None | |
52 |
|
52 | |||
53 | def execute(self, cmd, *args): |
|
53 | def execute(self, cmd, *args): | |
54 | """ |
|
54 | """ | |
55 | Runs command on the system with given ``args``. |
|
55 | Runs command on the system with given ``args``. | |
56 | """ |
|
56 | """ | |
57 |
|
57 | |||
58 | command = cmd + ' ' + ' '.join(args) |
|
58 | command = cmd + ' ' + ' '.join(args) | |
59 | if DEBUG: |
|
59 | if DEBUG: | |
60 | log.debug('*** CMD %s ***', command) |
|
60 | log.debug('*** CMD %s ***', command) | |
61 |
|
61 | |||
62 | env = dict(os.environ) |
|
62 | env = dict(os.environ) | |
63 | # Delete coverage variables, as they make the test fail for Mercurial |
|
63 | # Delete coverage variables, as they make the test fail for Mercurial | |
64 | for key in env.keys(): |
|
64 | for key in env.keys(): | |
65 | if key.startswith('COV_CORE_'): |
|
65 | if key.startswith('COV_CORE_'): | |
66 | del env[key] |
|
66 | del env[key] | |
67 |
|
67 | |||
68 | self.process = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, |
|
68 | self.process = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, | |
69 | cwd=self.cwd, env=env) |
|
69 | cwd=self.cwd, env=env) | |
70 | stdout, stderr = self.process.communicate() |
|
70 | stdout, stderr = self.process.communicate() | |
71 | if DEBUG: |
|
71 | if DEBUG: | |
72 | log.debug('STDOUT:%s', stdout) |
|
72 | log.debug('STDOUT:%s', stdout) | |
73 | log.debug('STDERR:%s', stderr) |
|
73 | log.debug('STDERR:%s', stderr) | |
74 | return stdout, stderr |
|
74 | return stdout, stderr | |
75 |
|
75 | |||
76 | def assert_returncode_success(self): |
|
76 | def assert_returncode_success(self): | |
77 | assert self.process.returncode == 0 |
|
77 | assert self.process.returncode == 0 | |
78 |
|
78 | |||
79 |
|
79 | |||
80 | def _add_files(vcs, dest, clone_url=None, tags=None, target_branch=None, |
|
80 | def _add_files(vcs, dest, clone_url=None, tags=None, target_branch=None, new_branch=False, **kwargs): | |
81 | new_branch=False, **kwargs): |
|
|||
82 | git_ident = "git config user.name {} && git config user.email {}".format( |
|
81 | git_ident = "git config user.name {} && git config user.email {}".format( | |
83 | 'Marcin KuΕΊminski', 'me@email.com') |
|
82 | 'Marcin KuΕΊminski', 'me@email.com') | |
84 | cwd = path = jn(dest) |
|
83 | cwd = path = jn(dest) | |
85 |
|
84 | |||
86 | tags = tags or [] |
|
85 | tags = tags or [] | |
87 | added_file = jn(path, '%ssetup.py' % tempfile._RandomNameSequence().next()) |
|
86 | added_file = jn(path, '%s_setup.py' % tempfile._RandomNameSequence().next()) | |
88 | Command(cwd).execute('touch %s' % added_file) |
|
87 | Command(cwd).execute('touch %s' % added_file) | |
89 | Command(cwd).execute('%s add %s' % (vcs, added_file)) |
|
88 | Command(cwd).execute('%s add %s' % (vcs, added_file)) | |
90 | author_str = 'Marcin KuΕΊminski <me@email.com>' |
|
89 | author_str = 'Marcin KuΕΊminski <me@email.com>' | |
91 |
|
90 | |||
92 | for i in range(kwargs.get('files_no', 3)): |
|
91 | for i in range(kwargs.get('files_no', 3)): | |
93 | cmd = """echo 'added_line%s' >> %s""" % (i, added_file) |
|
92 | cmd = """echo 'added_line%s' >> %s""" % (i, added_file) | |
94 | Command(cwd).execute(cmd) |
|
93 | Command(cwd).execute(cmd) | |
|
94 | ||||
95 | if vcs == 'hg': |
|
95 | if vcs == 'hg': | |
96 | cmd = """hg commit -m 'commited new %s' -u '%s' %s """ % ( |
|
96 | cmd = """hg commit -m 'committed new %s' -u '%s' %s """ % ( | |
97 | i, author_str, added_file |
|
97 | i, author_str, added_file | |
98 | ) |
|
98 | ) | |
99 | elif vcs == 'git': |
|
99 | elif vcs == 'git': | |
100 | cmd = """%s && git commit -m 'commited new %s' %s""" % ( |
|
100 | cmd = """%s && git commit -m 'committed new %s' %s""" % ( | |
101 | git_ident, i, added_file) |
|
101 | git_ident, i, added_file) | |
102 | Command(cwd).execute(cmd) |
|
102 | Command(cwd).execute(cmd) | |
103 |
|
103 | |||
104 | for tag in tags: |
|
104 | for tag in tags: | |
105 | if vcs == 'hg': |
|
105 | if vcs == 'hg': | |
106 | Command(cwd).execute( |
|
106 | Command(cwd).execute( | |
107 | 'hg tag', tag['name']) |
|
107 | 'hg tag -m "{}" -u "{}" '.format(tag['commit'], author_str), tag['name']) | |
108 | elif vcs == 'git': |
|
108 | elif vcs == 'git': | |
109 | if tag['commit']: |
|
109 | if tag['commit']: | |
110 | # annotated tag |
|
110 | # annotated tag | |
111 | _stdout, _stderr = Command(cwd).execute( |
|
111 | _stdout, _stderr = Command(cwd).execute( | |
112 | """%s && git tag -a %s -m "%s" """ % ( |
|
112 | """%s && git tag -a %s -m "%s" """ % ( | |
113 | git_ident, tag['name'], tag['commit'])) |
|
113 | git_ident, tag['name'], tag['commit'])) | |
114 | else: |
|
114 | else: | |
115 | # lightweight tag |
|
115 | # lightweight tag | |
116 | _stdout, _stderr = Command(cwd).execute( |
|
116 | _stdout, _stderr = Command(cwd).execute( | |
117 | """%s && git tag %s""" % ( |
|
117 | """%s && git tag %s""" % ( | |
118 | git_ident, tag['name'])) |
|
118 | git_ident, tag['name'])) | |
119 |
|
119 | |||
120 |
|
120 | |||
121 | def _add_files_and_push(vcs, dest, clone_url=None, tags=None, target_branch=None, |
|
121 | def _add_files_and_push(vcs, dest, clone_url=None, tags=None, target_branch=None, | |
122 | new_branch=False, **kwargs): |
|
122 | new_branch=False, **kwargs): | |
123 | """ |
|
123 | """ | |
124 | Generate some files, add it to DEST repo and push back |
|
124 | Generate some files, add it to DEST repo and push back | |
125 | vcs is git or hg and defines what VCS we want to make those files for |
|
125 | vcs is git or hg and defines what VCS we want to make those files for | |
126 | """ |
|
126 | """ | |
127 | git_ident = "git config user.name {} && git config user.email {}".format( |
|
127 | git_ident = "git config user.name {} && git config user.email {}".format( | |
128 | 'Marcin KuΕΊminski', 'me@email.com') |
|
128 | 'Marcin KuΕΊminski', 'me@email.com') | |
129 | cwd = path = jn(dest) |
|
129 | cwd = path = jn(dest) | |
130 |
|
130 | |||
131 | # commit some stuff into this repo |
|
131 | # commit some stuff into this repo | |
132 | _add_files(vcs, dest, clone_url, tags, target_branch, new_branch, **kwargs) |
|
132 | _add_files(vcs, dest, clone_url, tags, target_branch, new_branch, **kwargs) | |
133 |
|
133 | |||
134 | default_target_branch = { |
|
134 | default_target_branch = { | |
135 | 'git': 'master', |
|
135 | 'git': 'master', | |
136 | 'hg': 'default' |
|
136 | 'hg': 'default' | |
137 | }.get(vcs) |
|
137 | }.get(vcs) | |
138 |
|
138 | |||
139 | target_branch = target_branch or default_target_branch |
|
139 | target_branch = target_branch or default_target_branch | |
140 |
|
140 | |||
141 | # PUSH it back |
|
141 | # PUSH it back | |
142 | stdout = stderr = None |
|
142 | stdout = stderr = None | |
143 | if vcs == 'hg': |
|
143 | if vcs == 'hg': | |
144 | maybe_new_branch = '' |
|
144 | maybe_new_branch = '' | |
145 | if new_branch: |
|
145 | if new_branch: | |
146 | maybe_new_branch = '--new-branch' |
|
146 | maybe_new_branch = '--new-branch' | |
147 | stdout, stderr = Command(cwd).execute( |
|
147 | stdout, stderr = Command(cwd).execute( | |
148 | 'hg push --verbose {} -r {} {}'.format(maybe_new_branch, target_branch, clone_url) |
|
148 | 'hg push --verbose {} -r {} {}'.format(maybe_new_branch, target_branch, clone_url) | |
149 | ) |
|
149 | ) | |
150 | elif vcs == 'git': |
|
150 | elif vcs == 'git': | |
151 | stdout, stderr = Command(cwd).execute( |
|
151 | stdout, stderr = Command(cwd).execute( | |
152 | """{} && |
|
152 | """{} && | |
153 | git push --verbose --tags {} {}""".format(git_ident, clone_url, target_branch) |
|
153 | git push --verbose --tags {} {}""".format(git_ident, clone_url, target_branch) | |
154 | ) |
|
154 | ) | |
155 |
|
155 | |||
156 | return stdout, stderr |
|
156 | return stdout, stderr | |
157 |
|
157 | |||
158 |
|
158 | |||
159 | def _check_proper_git_push( |
|
159 | def _check_proper_git_push( | |
160 | stdout, stderr, branch='master', should_set_default_branch=False): |
|
160 | stdout, stderr, branch='master', should_set_default_branch=False): | |
161 | # Note: Git is writing most information to stderr intentionally |
|
161 | # Note: Git is writing most information to stderr intentionally | |
162 | assert 'fatal' not in stderr |
|
162 | assert 'fatal' not in stderr | |
163 | assert 'rejected' not in stderr |
|
163 | assert 'rejected' not in stderr | |
164 | assert 'Pushing to' in stderr |
|
164 | assert 'Pushing to' in stderr | |
165 | assert '%s -> %s' % (branch, branch) in stderr |
|
165 | assert '%s -> %s' % (branch, branch) in stderr | |
166 |
|
166 | |||
167 | if should_set_default_branch: |
|
167 | if should_set_default_branch: | |
168 | assert "Setting default branch to %s" % branch in stderr |
|
168 | assert "Setting default branch to %s" % branch in stderr | |
169 | else: |
|
169 | else: | |
170 | assert "Setting default branch" not in stderr |
|
170 | assert "Setting default branch" not in stderr | |
171 |
|
171 | |||
172 |
|
172 | |||
173 | def _check_proper_hg_push(stdout, stderr, branch='default'): |
|
173 | def _check_proper_hg_push(stdout, stderr, branch='default'): | |
174 | assert 'pushing to' in stdout |
|
174 | assert 'pushing to' in stdout | |
175 | assert 'searching for changes' in stdout |
|
175 | assert 'searching for changes' in stdout | |
176 |
|
176 | |||
177 | assert 'abort:' not in stderr |
|
177 | assert 'abort:' not in stderr | |
178 |
|
178 | |||
179 |
|
179 | |||
180 | def _check_proper_clone(stdout, stderr, vcs): |
|
180 | def _check_proper_clone(stdout, stderr, vcs): | |
181 | if vcs == 'hg': |
|
181 | if vcs == 'hg': | |
182 | assert 'requesting all changes' in stdout |
|
182 | assert 'requesting all changes' in stdout | |
183 | assert 'adding changesets' in stdout |
|
183 | assert 'adding changesets' in stdout | |
184 | assert 'adding manifests' in stdout |
|
184 | assert 'adding manifests' in stdout | |
185 | assert 'adding file changes' in stdout |
|
185 | assert 'adding file changes' in stdout | |
186 |
|
186 | |||
187 | assert stderr == '' |
|
187 | assert stderr == '' | |
188 |
|
188 | |||
189 | if vcs == 'git': |
|
189 | if vcs == 'git': | |
190 | assert '' == stdout |
|
190 | assert '' == stdout | |
191 | assert 'Cloning into' in stderr |
|
191 | assert 'Cloning into' in stderr | |
192 | assert 'abort:' not in stderr |
|
192 | assert 'abort:' not in stderr | |
193 | assert 'fatal:' not in stderr |
|
193 | assert 'fatal:' not in stderr |
@@ -1,342 +1,345 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | py.test config for test suite for making push/pull operations. |
|
22 | py.test config for test suite for making push/pull operations. | |
23 |
|
23 | |||
24 | .. important:: |
|
24 | .. important:: | |
25 |
|
25 | |||
26 | You must have git >= 1.8.5 for tests to work fine. With 68b939b git started |
|
26 | You must have git >= 1.8.5 for tests to work fine. With 68b939b git started | |
27 | to redirect things to stderr instead of stdout. |
|
27 | to redirect things to stderr instead of stdout. | |
28 | """ |
|
28 | """ | |
29 |
|
29 | |||
30 | import os |
|
30 | import os | |
31 | import tempfile |
|
31 | import tempfile | |
32 | import textwrap |
|
32 | import textwrap | |
33 | import pytest |
|
33 | import pytest | |
|
34 | import logging | |||
34 |
|
35 | |||
35 | from rhodecode import events |
|
36 | from rhodecode import events | |
36 | from rhodecode.model.db import Integration, UserRepoToPerm, Permission, \ |
|
37 | from rhodecode.model.db import Integration, UserRepoToPerm, Permission, \ | |
37 | UserToRepoBranchPermission, User |
|
38 | UserToRepoBranchPermission, User | |
38 | from rhodecode.model.integration import IntegrationModel |
|
39 | from rhodecode.model.integration import IntegrationModel | |
39 | from rhodecode.model.db import Repository |
|
40 | from rhodecode.model.db import Repository | |
40 | from rhodecode.model.meta import Session |
|
41 | from rhodecode.model.meta import Session | |
41 | from rhodecode.model.settings import SettingsModel |
|
42 | from rhodecode.model.settings import SettingsModel | |
42 | from rhodecode.integrations.types.webhook import WebhookIntegrationType |
|
43 | from rhodecode.integrations.types.webhook import WebhookIntegrationType | |
43 |
|
44 | |||
44 | from rhodecode.tests import GIT_REPO, HG_REPO |
|
45 | from rhodecode.tests import GIT_REPO, HG_REPO | |
45 | from rhodecode.tests.fixture import Fixture |
|
46 | from rhodecode.tests.fixture import Fixture | |
46 | from rhodecode.tests.server_utils import RcWebServer |
|
47 | from rhodecode.tests.server_utils import RcWebServer | |
47 |
|
48 | |||
48 | REPO_GROUP = 'a_repo_group' |
|
49 | REPO_GROUP = 'a_repo_group' | |
49 | HG_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, HG_REPO) |
|
50 | HG_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, HG_REPO) | |
50 | GIT_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, GIT_REPO) |
|
51 | GIT_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, GIT_REPO) | |
51 |
|
52 | |||
|
53 | log = logging.getLogger(__name__) | |||
|
54 | ||||
52 |
|
55 | |||
53 | @pytest.fixture(scope="module") |
|
56 | @pytest.fixture(scope="module") | |
54 | def rcextensions(request, db_connection, tmpdir_factory): |
|
57 | def rcextensions(request, db_connection, tmpdir_factory): | |
55 | """ |
|
58 | """ | |
56 | Installs a testing rcextensions pack to ensure they work as expected. |
|
59 | Installs a testing rcextensions pack to ensure they work as expected. | |
57 | """ |
|
60 | """ | |
58 | init_content = textwrap.dedent(""" |
|
61 | init_content = textwrap.dedent(""" | |
59 | # Forward import the example rcextensions to make it |
|
62 | # Forward import the example rcextensions to make it | |
60 | # active for our tests. |
|
63 | # active for our tests. | |
61 | from rhodecode.tests.other.example_rcextensions import * |
|
64 | from rhodecode.tests.other.example_rcextensions import * | |
62 | """) |
|
65 | """) | |
63 |
|
66 | |||
64 | # Note: rcextensions are looked up based on the path of the ini file |
|
67 | # Note: rcextensions are looked up based on the path of the ini file | |
65 | root_path = tmpdir_factory.getbasetemp() |
|
68 | root_path = tmpdir_factory.getbasetemp() | |
66 | rcextensions_path = root_path.join('rcextensions') |
|
69 | rcextensions_path = root_path.join('rcextensions') | |
67 | init_path = rcextensions_path.join('__init__.py') |
|
70 | init_path = rcextensions_path.join('__init__.py') | |
68 |
|
71 | |||
69 | if rcextensions_path.check(): |
|
72 | if rcextensions_path.check(): | |
70 | pytest.fail( |
|
73 | pytest.fail( | |
71 | "Path for rcextensions already exists, please clean up before " |
|
74 | "Path for rcextensions already exists, please clean up before " | |
72 | "test run this path: %s" % (rcextensions_path, )) |
|
75 | "test run this path: %s" % (rcextensions_path, )) | |
73 | return |
|
76 | return | |
74 |
|
77 | |||
75 | request.addfinalizer(rcextensions_path.remove) |
|
78 | request.addfinalizer(rcextensions_path.remove) | |
76 | init_path.write_binary(init_content, ensure=True) |
|
79 | init_path.write_binary(init_content, ensure=True) | |
77 |
|
80 | |||
78 |
|
81 | |||
79 | @pytest.fixture(scope="module") |
|
82 | @pytest.fixture(scope="module") | |
80 | def repos(request, db_connection): |
|
83 | def repos(request, db_connection): | |
81 | """Create a copy of each test repo in a repo group.""" |
|
84 | """Create a copy of each test repo in a repo group.""" | |
82 | fixture = Fixture() |
|
85 | fixture = Fixture() | |
83 | repo_group = fixture.create_repo_group(REPO_GROUP) |
|
86 | repo_group = fixture.create_repo_group(REPO_GROUP) | |
84 | repo_group_id = repo_group.group_id |
|
87 | repo_group_id = repo_group.group_id | |
85 | fixture.create_fork(HG_REPO, HG_REPO, |
|
88 | fixture.create_fork(HG_REPO, HG_REPO, | |
86 | repo_name_full=HG_REPO_WITH_GROUP, |
|
89 | repo_name_full=HG_REPO_WITH_GROUP, | |
87 | repo_group=repo_group_id) |
|
90 | repo_group=repo_group_id) | |
88 | fixture.create_fork(GIT_REPO, GIT_REPO, |
|
91 | fixture.create_fork(GIT_REPO, GIT_REPO, | |
89 | repo_name_full=GIT_REPO_WITH_GROUP, |
|
92 | repo_name_full=GIT_REPO_WITH_GROUP, | |
90 | repo_group=repo_group_id) |
|
93 | repo_group=repo_group_id) | |
91 |
|
94 | |||
92 | @request.addfinalizer |
|
95 | @request.addfinalizer | |
93 | def cleanup(): |
|
96 | def cleanup(): | |
94 | fixture.destroy_repo(HG_REPO_WITH_GROUP) |
|
97 | fixture.destroy_repo(HG_REPO_WITH_GROUP) | |
95 | fixture.destroy_repo(GIT_REPO_WITH_GROUP) |
|
98 | fixture.destroy_repo(GIT_REPO_WITH_GROUP) | |
96 | fixture.destroy_repo_group(repo_group_id) |
|
99 | fixture.destroy_repo_group(repo_group_id) | |
97 |
|
100 | |||
98 |
|
101 | |||
99 | @pytest.fixture(scope="module") |
|
102 | @pytest.fixture(scope="module") | |
100 | def rc_web_server_config_modification(): |
|
103 | def rc_web_server_config_modification(): | |
101 | return [] |
|
104 | return [] | |
102 |
|
105 | |||
103 |
|
106 | |||
104 | @pytest.fixture(scope="module") |
|
107 | @pytest.fixture(scope="module") | |
105 | def rc_web_server_config_factory(testini_factory, rc_web_server_config_modification): |
|
108 | def rc_web_server_config_factory(testini_factory, rc_web_server_config_modification): | |
106 | """ |
|
109 | """ | |
107 | Configuration file used for the fixture `rc_web_server`. |
|
110 | Configuration file used for the fixture `rc_web_server`. | |
108 | """ |
|
111 | """ | |
109 |
|
112 | |||
110 | def factory(rcweb_port, vcsserver_port): |
|
113 | def factory(rcweb_port, vcsserver_port): | |
111 | custom_params = [ |
|
114 | custom_params = [ | |
112 | {'handler_console': {'level': 'DEBUG'}}, |
|
115 | {'handler_console': {'level': 'DEBUG'}}, | |
113 | {'server:main': {'port': rcweb_port}}, |
|
116 | {'server:main': {'port': rcweb_port}}, | |
114 | {'app:main': {'vcs.server': 'localhost:%s' % vcsserver_port}} |
|
117 | {'app:main': {'vcs.server': 'localhost:%s' % vcsserver_port}} | |
115 | ] |
|
118 | ] | |
116 | custom_params.extend(rc_web_server_config_modification) |
|
119 | custom_params.extend(rc_web_server_config_modification) | |
117 | return testini_factory(custom_params) |
|
120 | return testini_factory(custom_params) | |
118 | return factory |
|
121 | return factory | |
119 |
|
122 | |||
120 |
|
123 | |||
121 | @pytest.fixture(scope="module") |
|
124 | @pytest.fixture(scope="module") | |
122 | def rc_web_server( |
|
125 | def rc_web_server( | |
123 | request, vcsserver_factory, available_port_factory, |
|
126 | request, vcsserver_factory, available_port_factory, | |
124 | rc_web_server_config_factory, repos, rcextensions): |
|
127 | rc_web_server_config_factory, repos, rcextensions): | |
125 | """ |
|
128 | """ | |
126 | Run the web server as a subprocess. with it's own instance of vcsserver |
|
129 | Run the web server as a subprocess. with it's own instance of vcsserver | |
127 | """ |
|
130 | """ | |
128 | rcweb_port = available_port_factory() |
|
131 | rcweb_port = available_port_factory() | |
129 |
|
|
132 | log.info('Using rcweb ops test port {}'.format(rcweb_port)) | |
130 |
|
133 | |||
131 | vcsserver_port = available_port_factory() |
|
134 | vcsserver_port = available_port_factory() | |
132 |
|
|
135 | log.info('Using vcsserver ops test port {}'.format(vcsserver_port)) | |
133 |
|
136 | |||
134 | vcs_log = os.path.join(tempfile.gettempdir(), 'rc_op_vcs.log') |
|
137 | vcs_log = os.path.join(tempfile.gettempdir(), 'rc_op_vcs.log') | |
135 | vcsserver_factory( |
|
138 | vcsserver_factory( | |
136 | request, vcsserver_port=vcsserver_port, |
|
139 | request, vcsserver_port=vcsserver_port, | |
137 | log_file=vcs_log, |
|
140 | log_file=vcs_log, | |
138 | overrides=( |
|
141 | overrides=( | |
139 | {'server:main': {'workers': 2}}, |
|
142 | {'server:main': {'workers': 2}}, | |
140 | {'server:main': {'graceful_timeout': 10}}, |
|
143 | {'server:main': {'graceful_timeout': 10}}, | |
141 | )) |
|
144 | )) | |
142 |
|
145 | |||
143 | rc_log = os.path.join(tempfile.gettempdir(), 'rc_op_web.log') |
|
146 | rc_log = os.path.join(tempfile.gettempdir(), 'rc_op_web.log') | |
144 | rc_web_server_config = rc_web_server_config_factory( |
|
147 | rc_web_server_config = rc_web_server_config_factory( | |
145 | rcweb_port=rcweb_port, |
|
148 | rcweb_port=rcweb_port, | |
146 | vcsserver_port=vcsserver_port) |
|
149 | vcsserver_port=vcsserver_port) | |
147 | server = RcWebServer(rc_web_server_config, log_file=rc_log) |
|
150 | server = RcWebServer(rc_web_server_config, log_file=rc_log) | |
148 | server.start() |
|
151 | server.start() | |
149 |
|
152 | |||
150 | @request.addfinalizer |
|
153 | @request.addfinalizer | |
151 | def cleanup(): |
|
154 | def cleanup(): | |
152 | server.shutdown() |
|
155 | server.shutdown() | |
153 |
|
156 | |||
154 | server.wait_until_ready() |
|
157 | server.wait_until_ready() | |
155 | return server |
|
158 | return server | |
156 |
|
159 | |||
157 |
|
160 | |||
158 | @pytest.fixture() |
|
161 | @pytest.fixture() | |
159 | def disable_locking(baseapp): |
|
162 | def disable_locking(baseapp): | |
160 | r = Repository.get_by_repo_name(GIT_REPO) |
|
163 | r = Repository.get_by_repo_name(GIT_REPO) | |
161 | Repository.unlock(r) |
|
164 | Repository.unlock(r) | |
162 | r.enable_locking = False |
|
165 | r.enable_locking = False | |
163 | Session().add(r) |
|
166 | Session().add(r) | |
164 | Session().commit() |
|
167 | Session().commit() | |
165 |
|
168 | |||
166 | r = Repository.get_by_repo_name(HG_REPO) |
|
169 | r = Repository.get_by_repo_name(HG_REPO) | |
167 | Repository.unlock(r) |
|
170 | Repository.unlock(r) | |
168 | r.enable_locking = False |
|
171 | r.enable_locking = False | |
169 | Session().add(r) |
|
172 | Session().add(r) | |
170 | Session().commit() |
|
173 | Session().commit() | |
171 |
|
174 | |||
172 |
|
175 | |||
173 | @pytest.fixture() |
|
176 | @pytest.fixture() | |
174 | def enable_auth_plugins(request, baseapp, csrf_token): |
|
177 | def enable_auth_plugins(request, baseapp, csrf_token): | |
175 | """ |
|
178 | """ | |
176 | Return a factory object that when called, allows to control which |
|
179 | Return a factory object that when called, allows to control which | |
177 | authentication plugins are enabled. |
|
180 | authentication plugins are enabled. | |
178 | """ |
|
181 | """ | |
179 | def _enable_plugins(plugins_list, override=None): |
|
182 | def _enable_plugins(plugins_list, override=None): | |
180 | override = override or {} |
|
183 | override = override or {} | |
181 | params = { |
|
184 | params = { | |
182 | 'auth_plugins': ','.join(plugins_list), |
|
185 | 'auth_plugins': ','.join(plugins_list), | |
183 | } |
|
186 | } | |
184 |
|
187 | |||
185 | # helper translate some names to others |
|
188 | # helper translate some names to others | |
186 | name_map = { |
|
189 | name_map = { | |
187 | 'token': 'authtoken' |
|
190 | 'token': 'authtoken' | |
188 | } |
|
191 | } | |
189 |
|
192 | |||
190 | for module in plugins_list: |
|
193 | for module in plugins_list: | |
191 | plugin_name = module.partition('#')[-1] |
|
194 | plugin_name = module.partition('#')[-1] | |
192 | if plugin_name in name_map: |
|
195 | if plugin_name in name_map: | |
193 | plugin_name = name_map[plugin_name] |
|
196 | plugin_name = name_map[plugin_name] | |
194 | enabled_plugin = 'auth_%s_enabled' % plugin_name |
|
197 | enabled_plugin = 'auth_%s_enabled' % plugin_name | |
195 | cache_ttl = 'auth_%s_cache_ttl' % plugin_name |
|
198 | cache_ttl = 'auth_%s_cache_ttl' % plugin_name | |
196 |
|
199 | |||
197 | # default params that are needed for each plugin, |
|
200 | # default params that are needed for each plugin, | |
198 | # `enabled` and `cache_ttl` |
|
201 | # `enabled` and `cache_ttl` | |
199 | params.update({ |
|
202 | params.update({ | |
200 | enabled_plugin: True, |
|
203 | enabled_plugin: True, | |
201 | cache_ttl: 0 |
|
204 | cache_ttl: 0 | |
202 | }) |
|
205 | }) | |
203 | if override.get: |
|
206 | if override.get: | |
204 | params.update(override.get(module, {})) |
|
207 | params.update(override.get(module, {})) | |
205 |
|
208 | |||
206 | validated_params = params |
|
209 | validated_params = params | |
207 | for k, v in validated_params.items(): |
|
210 | for k, v in validated_params.items(): | |
208 | setting = SettingsModel().create_or_update_setting(k, v) |
|
211 | setting = SettingsModel().create_or_update_setting(k, v) | |
209 | Session().add(setting) |
|
212 | Session().add(setting) | |
210 | Session().commit() |
|
213 | Session().commit() | |
211 |
|
214 | |||
212 | SettingsModel().invalidate_settings_cache() |
|
215 | SettingsModel().invalidate_settings_cache() | |
213 |
|
216 | |||
214 | def cleanup(): |
|
217 | def cleanup(): | |
215 | _enable_plugins(['egg:rhodecode-enterprise-ce#rhodecode']) |
|
218 | _enable_plugins(['egg:rhodecode-enterprise-ce#rhodecode']) | |
216 |
|
219 | |||
217 | request.addfinalizer(cleanup) |
|
220 | request.addfinalizer(cleanup) | |
218 |
|
221 | |||
219 | return _enable_plugins |
|
222 | return _enable_plugins | |
220 |
|
223 | |||
221 |
|
224 | |||
222 | @pytest.fixture() |
|
225 | @pytest.fixture() | |
223 | def fs_repo_only(request, rhodecode_fixtures): |
|
226 | def fs_repo_only(request, rhodecode_fixtures): | |
224 | def fs_repo_fabric(repo_name, repo_type): |
|
227 | def fs_repo_fabric(repo_name, repo_type): | |
225 | rhodecode_fixtures.create_repo(repo_name, repo_type=repo_type) |
|
228 | rhodecode_fixtures.create_repo(repo_name, repo_type=repo_type) | |
226 | rhodecode_fixtures.destroy_repo(repo_name, fs_remove=False) |
|
229 | rhodecode_fixtures.destroy_repo(repo_name, fs_remove=False) | |
227 |
|
230 | |||
228 | def cleanup(): |
|
231 | def cleanup(): | |
229 | rhodecode_fixtures.destroy_repo(repo_name, fs_remove=True) |
|
232 | rhodecode_fixtures.destroy_repo(repo_name, fs_remove=True) | |
230 | rhodecode_fixtures.destroy_repo_on_filesystem(repo_name) |
|
233 | rhodecode_fixtures.destroy_repo_on_filesystem(repo_name) | |
231 |
|
234 | |||
232 | request.addfinalizer(cleanup) |
|
235 | request.addfinalizer(cleanup) | |
233 |
|
236 | |||
234 | return fs_repo_fabric |
|
237 | return fs_repo_fabric | |
235 |
|
238 | |||
236 |
|
239 | |||
237 | @pytest.fixture() |
|
240 | @pytest.fixture() | |
238 | def enable_webhook_push_integration(request): |
|
241 | def enable_webhook_push_integration(request): | |
239 | integration = Integration() |
|
242 | integration = Integration() | |
240 | integration.integration_type = WebhookIntegrationType.key |
|
243 | integration.integration_type = WebhookIntegrationType.key | |
241 | Session().add(integration) |
|
244 | Session().add(integration) | |
242 |
|
245 | |||
243 | settings = dict( |
|
246 | settings = dict( | |
244 | url='http://httpbin.org/post', |
|
247 | url='http://httpbin.org/post', | |
245 | secret_token='secret', |
|
248 | secret_token='secret', | |
246 | username=None, |
|
249 | username=None, | |
247 | password=None, |
|
250 | password=None, | |
248 | custom_header_key=None, |
|
251 | custom_header_key=None, | |
249 | custom_header_val=None, |
|
252 | custom_header_val=None, | |
250 | method_type='post', |
|
253 | method_type='post', | |
251 | events=[events.RepoPushEvent.name], |
|
254 | events=[events.RepoPushEvent.name], | |
252 | log_data=True |
|
255 | log_data=True | |
253 | ) |
|
256 | ) | |
254 |
|
257 | |||
255 | IntegrationModel().update_integration( |
|
258 | IntegrationModel().update_integration( | |
256 | integration, |
|
259 | integration, | |
257 | name='IntegrationWebhookTest', |
|
260 | name='IntegrationWebhookTest', | |
258 | enabled=True, |
|
261 | enabled=True, | |
259 | settings=settings, |
|
262 | settings=settings, | |
260 | repo=None, |
|
263 | repo=None, | |
261 | repo_group=None, |
|
264 | repo_group=None, | |
262 | child_repos_only=False, |
|
265 | child_repos_only=False, | |
263 | ) |
|
266 | ) | |
264 | Session().commit() |
|
267 | Session().commit() | |
265 | integration_id = integration.integration_id |
|
268 | integration_id = integration.integration_id | |
266 |
|
269 | |||
267 | @request.addfinalizer |
|
270 | @request.addfinalizer | |
268 | def cleanup(): |
|
271 | def cleanup(): | |
269 | integration = Integration.get(integration_id) |
|
272 | integration = Integration.get(integration_id) | |
270 | Session().delete(integration) |
|
273 | Session().delete(integration) | |
271 | Session().commit() |
|
274 | Session().commit() | |
272 |
|
275 | |||
273 |
|
276 | |||
274 | @pytest.fixture() |
|
277 | @pytest.fixture() | |
275 | def branch_permission_setter(request): |
|
278 | def branch_permission_setter(request): | |
276 | """ |
|
279 | """ | |
277 |
|
280 | |||
278 | def my_test(branch_permission_setter) |
|
281 | def my_test(branch_permission_setter) | |
279 | branch_permission_setter(repo_name, username, pattern='*', permission='branch.push') |
|
282 | branch_permission_setter(repo_name, username, pattern='*', permission='branch.push') | |
280 |
|
283 | |||
281 | """ |
|
284 | """ | |
282 |
|
285 | |||
283 | rule_id = None |
|
286 | rule_id = None | |
284 | write_perm_id = None |
|
287 | write_perm_id = None | |
285 | write_perm = None |
|
288 | write_perm = None | |
286 | rule = None |
|
289 | rule = None | |
287 |
|
290 | |||
288 | def _branch_permissions_setter( |
|
291 | def _branch_permissions_setter( | |
289 | repo_name, username, pattern='*', permission='branch.push_force'): |
|
292 | repo_name, username, pattern='*', permission='branch.push_force'): | |
290 | global rule_id, write_perm_id |
|
293 | global rule_id, write_perm_id | |
291 | global rule, write_perm |
|
294 | global rule, write_perm | |
292 |
|
295 | |||
293 | repo = Repository.get_by_repo_name(repo_name) |
|
296 | repo = Repository.get_by_repo_name(repo_name) | |
294 | repo_id = repo.repo_id |
|
297 | repo_id = repo.repo_id | |
295 |
|
298 | |||
296 | user = User.get_by_username(username) |
|
299 | user = User.get_by_username(username) | |
297 | user_id = user.user_id |
|
300 | user_id = user.user_id | |
298 |
|
301 | |||
299 | rule_perm_obj = Permission.get_by_key(permission) |
|
302 | rule_perm_obj = Permission.get_by_key(permission) | |
300 |
|
303 | |||
301 | # add new entry, based on existing perm entry |
|
304 | # add new entry, based on existing perm entry | |
302 | perm = UserRepoToPerm.query() \ |
|
305 | perm = UserRepoToPerm.query() \ | |
303 | .filter(UserRepoToPerm.repository_id == repo_id) \ |
|
306 | .filter(UserRepoToPerm.repository_id == repo_id) \ | |
304 | .filter(UserRepoToPerm.user_id == user_id) \ |
|
307 | .filter(UserRepoToPerm.user_id == user_id) \ | |
305 | .first() |
|
308 | .first() | |
306 |
|
309 | |||
307 | if not perm: |
|
310 | if not perm: | |
308 | # such user isn't defined in Permissions for repository |
|
311 | # such user isn't defined in Permissions for repository | |
309 | # we now on-the-fly add new permission |
|
312 | # we now on-the-fly add new permission | |
310 |
|
313 | |||
311 | write_perm = UserRepoToPerm() |
|
314 | write_perm = UserRepoToPerm() | |
312 | write_perm.permission = Permission.get_by_key('repository.write') |
|
315 | write_perm.permission = Permission.get_by_key('repository.write') | |
313 | write_perm.repository_id = repo_id |
|
316 | write_perm.repository_id = repo_id | |
314 | write_perm.user_id = user_id |
|
317 | write_perm.user_id = user_id | |
315 | Session().add(write_perm) |
|
318 | Session().add(write_perm) | |
316 | Session().flush() |
|
319 | Session().flush() | |
317 |
|
320 | |||
318 | perm = write_perm |
|
321 | perm = write_perm | |
319 |
|
322 | |||
320 | rule = UserToRepoBranchPermission() |
|
323 | rule = UserToRepoBranchPermission() | |
321 | rule.rule_to_perm_id = perm.repo_to_perm_id |
|
324 | rule.rule_to_perm_id = perm.repo_to_perm_id | |
322 | rule.branch_pattern = pattern |
|
325 | rule.branch_pattern = pattern | |
323 | rule.rule_order = 10 |
|
326 | rule.rule_order = 10 | |
324 | rule.permission = rule_perm_obj |
|
327 | rule.permission = rule_perm_obj | |
325 | rule.repository_id = repo_id |
|
328 | rule.repository_id = repo_id | |
326 | Session().add(rule) |
|
329 | Session().add(rule) | |
327 | Session().commit() |
|
330 | Session().commit() | |
328 |
|
331 | |||
329 | return rule |
|
332 | return rule | |
330 |
|
333 | |||
331 | @request.addfinalizer |
|
334 | @request.addfinalizer | |
332 | def cleanup(): |
|
335 | def cleanup(): | |
333 | if rule: |
|
336 | if rule: | |
334 | Session().delete(rule) |
|
337 | Session().delete(rule) | |
335 | Session().commit() |
|
338 | Session().commit() | |
336 | if write_perm: |
|
339 | if write_perm: | |
337 | Session().delete(write_perm) |
|
340 | Session().delete(write_perm) | |
338 | Session().commit() |
|
341 | Session().commit() | |
339 |
|
342 | |||
340 | return _branch_permissions_setter |
|
343 | return _branch_permissions_setter | |
341 |
|
344 | |||
342 |
|
345 |
General Comments 0
You need to be logged in to leave comments.
Login now