Show More
@@ -1,213 +1,212 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2018 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2018 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import pytest |
|
21 | import pytest | |
22 |
|
22 | |||
23 | from rhodecode.lib.vcs.nodes import FileNode |
|
23 | from rhodecode.lib.vcs.nodes import FileNode | |
24 | from rhodecode.model.db import User |
|
24 | from rhodecode.model.db import User | |
25 | from rhodecode.model.pull_request import PullRequestModel |
|
25 | from rhodecode.model.pull_request import PullRequestModel | |
26 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN |
|
26 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN | |
27 | from rhodecode.api.tests.utils import ( |
|
27 | from rhodecode.api.tests.utils import ( | |
28 | build_data, api_call, assert_ok, assert_error) |
|
28 | build_data, api_call, assert_ok, assert_error) | |
29 |
|
29 | |||
30 |
|
30 | |||
31 | @pytest.mark.usefixtures("testuser_api", "app") |
|
31 | @pytest.mark.usefixtures("testuser_api", "app") | |
32 | class TestUpdatePullRequest(object): |
|
32 | class TestUpdatePullRequest(object): | |
33 |
|
33 | |||
34 | @pytest.mark.backends("git", "hg") |
|
34 | @pytest.mark.backends("git", "hg") | |
35 | def test_api_update_pull_request_title_or_description( |
|
35 | def test_api_update_pull_request_title_or_description( | |
36 | self, pr_util, no_notifications): |
|
36 | self, pr_util, no_notifications): | |
37 | pull_request = pr_util.create_pull_request() |
|
37 | pull_request = pr_util.create_pull_request() | |
38 |
|
38 | |||
39 | id_, params = build_data( |
|
39 | id_, params = build_data( | |
40 | self.apikey, 'update_pull_request', |
|
40 | self.apikey, 'update_pull_request', | |
41 | repoid=pull_request.target_repo.repo_name, |
|
41 | repoid=pull_request.target_repo.repo_name, | |
42 | pullrequestid=pull_request.pull_request_id, |
|
42 | pullrequestid=pull_request.pull_request_id, | |
43 | title='New TITLE OF A PR', |
|
43 | title='New TITLE OF A PR', | |
44 | description='New DESC OF A PR', |
|
44 | description='New DESC OF A PR', | |
45 | ) |
|
45 | ) | |
46 | response = api_call(self.app, params) |
|
46 | response = api_call(self.app, params) | |
47 |
|
47 | |||
48 | expected = { |
|
48 | expected = { | |
49 | "msg": "Updated pull request `{}`".format( |
|
49 | "msg": "Updated pull request `{}`".format( | |
50 | pull_request.pull_request_id), |
|
50 | pull_request.pull_request_id), | |
51 | "pull_request": response.json['result']['pull_request'], |
|
51 | "pull_request": response.json['result']['pull_request'], | |
52 | "updated_commits": {"added": [], "common": [], "removed": []}, |
|
52 | "updated_commits": {"added": [], "common": [], "removed": []}, | |
53 | "updated_reviewers": {"added": [], "removed": []}, |
|
53 | "updated_reviewers": {"added": [], "removed": []}, | |
54 | } |
|
54 | } | |
55 |
|
55 | |||
56 | response_json = response.json['result'] |
|
56 | response_json = response.json['result'] | |
57 | assert response_json == expected |
|
57 | assert response_json == expected | |
58 | pr = response_json['pull_request'] |
|
58 | pr = response_json['pull_request'] | |
59 | assert pr['title'] == 'New TITLE OF A PR' |
|
59 | assert pr['title'] == 'New TITLE OF A PR' | |
60 | assert pr['description'] == 'New DESC OF A PR' |
|
60 | assert pr['description'] == 'New DESC OF A PR' | |
61 |
|
61 | |||
62 | @pytest.mark.backends("git", "hg") |
|
62 | @pytest.mark.backends("git", "hg") | |
63 | def test_api_try_update_closed_pull_request( |
|
63 | def test_api_try_update_closed_pull_request( | |
64 | self, pr_util, no_notifications): |
|
64 | self, pr_util, no_notifications): | |
65 | pull_request = pr_util.create_pull_request() |
|
65 | pull_request = pr_util.create_pull_request() | |
66 | PullRequestModel().close_pull_request( |
|
66 | PullRequestModel().close_pull_request( | |
67 | pull_request, TEST_USER_ADMIN_LOGIN) |
|
67 | pull_request, TEST_USER_ADMIN_LOGIN) | |
68 |
|
68 | |||
69 | id_, params = build_data( |
|
69 | id_, params = build_data( | |
70 | self.apikey, 'update_pull_request', |
|
70 | self.apikey, 'update_pull_request', | |
71 | repoid=pull_request.target_repo.repo_name, |
|
71 | repoid=pull_request.target_repo.repo_name, | |
72 | pullrequestid=pull_request.pull_request_id) |
|
72 | pullrequestid=pull_request.pull_request_id) | |
73 | response = api_call(self.app, params) |
|
73 | response = api_call(self.app, params) | |
74 |
|
74 | |||
75 | expected = 'pull request `{}` update failed, pull request ' \ |
|
75 | expected = 'pull request `{}` update failed, pull request ' \ | |
76 | 'is closed'.format(pull_request.pull_request_id) |
|
76 | 'is closed'.format(pull_request.pull_request_id) | |
77 |
|
77 | |||
78 | assert_error(id_, expected, response.body) |
|
78 | assert_error(id_, expected, response.body) | |
79 |
|
79 | |||
80 | @pytest.mark.backends("git", "hg") |
|
80 | @pytest.mark.backends("git", "hg") | |
81 | def test_api_update_update_commits(self, pr_util, no_notifications): |
|
81 | def test_api_update_update_commits(self, pr_util, no_notifications): | |
82 | commits = [ |
|
82 | commits = [ | |
83 | {'message': 'a'}, |
|
83 | {'message': 'a'}, | |
84 | {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]}, |
|
84 | {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]}, | |
85 | {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]}, |
|
85 | {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]}, | |
86 | ] |
|
86 | ] | |
87 | pull_request = pr_util.create_pull_request( |
|
87 | pull_request = pr_util.create_pull_request( | |
88 | commits=commits, target_head='a', source_head='b', revisions=['b']) |
|
88 | commits=commits, target_head='a', source_head='b', revisions=['b']) | |
89 | pr_util.update_source_repository(head='c') |
|
89 | pr_util.update_source_repository(head='c') | |
90 | repo = pull_request.source_repo.scm_instance() |
|
90 | repo = pull_request.source_repo.scm_instance() | |
91 | commits = [x for x in repo.get_commits()] |
|
91 | commits = [x for x in repo.get_commits()] | |
92 | print commits |
|
|||
93 |
|
92 | |||
94 | added_commit_id = commits[-1].raw_id # c commit |
|
93 | added_commit_id = commits[-1].raw_id # c commit | |
95 | common_commit_id = commits[1].raw_id # b commit is common ancestor |
|
94 | common_commit_id = commits[1].raw_id # b commit is common ancestor | |
96 | total_commits = [added_commit_id, common_commit_id] |
|
95 | total_commits = [added_commit_id, common_commit_id] | |
97 |
|
96 | |||
98 | id_, params = build_data( |
|
97 | id_, params = build_data( | |
99 | self.apikey, 'update_pull_request', |
|
98 | self.apikey, 'update_pull_request', | |
100 | repoid=pull_request.target_repo.repo_name, |
|
99 | repoid=pull_request.target_repo.repo_name, | |
101 | pullrequestid=pull_request.pull_request_id, |
|
100 | pullrequestid=pull_request.pull_request_id, | |
102 | update_commits=True |
|
101 | update_commits=True | |
103 | ) |
|
102 | ) | |
104 | response = api_call(self.app, params) |
|
103 | response = api_call(self.app, params) | |
105 |
|
104 | |||
106 | expected = { |
|
105 | expected = { | |
107 | "msg": "Updated pull request `{}`".format( |
|
106 | "msg": "Updated pull request `{}`".format( | |
108 | pull_request.pull_request_id), |
|
107 | pull_request.pull_request_id), | |
109 | "pull_request": response.json['result']['pull_request'], |
|
108 | "pull_request": response.json['result']['pull_request'], | |
110 | "updated_commits": {"added": [added_commit_id], |
|
109 | "updated_commits": {"added": [added_commit_id], | |
111 | "common": [common_commit_id], |
|
110 | "common": [common_commit_id], | |
112 | "total": total_commits, |
|
111 | "total": total_commits, | |
113 | "removed": []}, |
|
112 | "removed": []}, | |
114 | "updated_reviewers": {"added": [], "removed": []}, |
|
113 | "updated_reviewers": {"added": [], "removed": []}, | |
115 | } |
|
114 | } | |
116 |
|
115 | |||
117 | assert_ok(id_, expected, response.body) |
|
116 | assert_ok(id_, expected, response.body) | |
118 |
|
117 | |||
119 | @pytest.mark.backends("git", "hg") |
|
118 | @pytest.mark.backends("git", "hg") | |
120 | def test_api_update_change_reviewers( |
|
119 | def test_api_update_change_reviewers( | |
121 | self, user_util, pr_util, no_notifications): |
|
120 | self, user_util, pr_util, no_notifications): | |
122 | a = user_util.create_user() |
|
121 | a = user_util.create_user() | |
123 | b = user_util.create_user() |
|
122 | b = user_util.create_user() | |
124 | c = user_util.create_user() |
|
123 | c = user_util.create_user() | |
125 | new_reviewers = [ |
|
124 | new_reviewers = [ | |
126 | {'username': b.username,'reasons': ['updated via API'], |
|
125 | {'username': b.username,'reasons': ['updated via API'], | |
127 | 'mandatory':False}, |
|
126 | 'mandatory':False}, | |
128 | {'username': c.username, 'reasons': ['updated via API'], |
|
127 | {'username': c.username, 'reasons': ['updated via API'], | |
129 | 'mandatory':False}, |
|
128 | 'mandatory':False}, | |
130 | ] |
|
129 | ] | |
131 |
|
130 | |||
132 | added = [b.username, c.username] |
|
131 | added = [b.username, c.username] | |
133 | removed = [a.username] |
|
132 | removed = [a.username] | |
134 |
|
133 | |||
135 | pull_request = pr_util.create_pull_request( |
|
134 | pull_request = pr_util.create_pull_request( | |
136 | reviewers=[(a.username, ['added via API'], False, [])]) |
|
135 | reviewers=[(a.username, ['added via API'], False, [])]) | |
137 |
|
136 | |||
138 | id_, params = build_data( |
|
137 | id_, params = build_data( | |
139 | self.apikey, 'update_pull_request', |
|
138 | self.apikey, 'update_pull_request', | |
140 | repoid=pull_request.target_repo.repo_name, |
|
139 | repoid=pull_request.target_repo.repo_name, | |
141 | pullrequestid=pull_request.pull_request_id, |
|
140 | pullrequestid=pull_request.pull_request_id, | |
142 | reviewers=new_reviewers) |
|
141 | reviewers=new_reviewers) | |
143 | response = api_call(self.app, params) |
|
142 | response = api_call(self.app, params) | |
144 | expected = { |
|
143 | expected = { | |
145 | "msg": "Updated pull request `{}`".format( |
|
144 | "msg": "Updated pull request `{}`".format( | |
146 | pull_request.pull_request_id), |
|
145 | pull_request.pull_request_id), | |
147 | "pull_request": response.json['result']['pull_request'], |
|
146 | "pull_request": response.json['result']['pull_request'], | |
148 | "updated_commits": {"added": [], "common": [], "removed": []}, |
|
147 | "updated_commits": {"added": [], "common": [], "removed": []}, | |
149 | "updated_reviewers": {"added": added, "removed": removed}, |
|
148 | "updated_reviewers": {"added": added, "removed": removed}, | |
150 | } |
|
149 | } | |
151 |
|
150 | |||
152 | assert_ok(id_, expected, response.body) |
|
151 | assert_ok(id_, expected, response.body) | |
153 |
|
152 | |||
154 | @pytest.mark.backends("git", "hg") |
|
153 | @pytest.mark.backends("git", "hg") | |
155 | def test_api_update_bad_user_in_reviewers(self, pr_util): |
|
154 | def test_api_update_bad_user_in_reviewers(self, pr_util): | |
156 | pull_request = pr_util.create_pull_request() |
|
155 | pull_request = pr_util.create_pull_request() | |
157 |
|
156 | |||
158 | id_, params = build_data( |
|
157 | id_, params = build_data( | |
159 | self.apikey, 'update_pull_request', |
|
158 | self.apikey, 'update_pull_request', | |
160 | repoid=pull_request.target_repo.repo_name, |
|
159 | repoid=pull_request.target_repo.repo_name, | |
161 | pullrequestid=pull_request.pull_request_id, |
|
160 | pullrequestid=pull_request.pull_request_id, | |
162 | reviewers=[{'username': 'bad_name'}]) |
|
161 | reviewers=[{'username': 'bad_name'}]) | |
163 | response = api_call(self.app, params) |
|
162 | response = api_call(self.app, params) | |
164 |
|
163 | |||
165 | expected = 'user `bad_name` does not exist' |
|
164 | expected = 'user `bad_name` does not exist' | |
166 |
|
165 | |||
167 | assert_error(id_, expected, response.body) |
|
166 | assert_error(id_, expected, response.body) | |
168 |
|
167 | |||
169 | @pytest.mark.backends("git", "hg") |
|
168 | @pytest.mark.backends("git", "hg") | |
170 | def test_api_update_repo_error(self, pr_util): |
|
169 | def test_api_update_repo_error(self, pr_util): | |
171 | pull_request = pr_util.create_pull_request() |
|
170 | pull_request = pr_util.create_pull_request() | |
172 | id_, params = build_data( |
|
171 | id_, params = build_data( | |
173 | self.apikey, 'update_pull_request', |
|
172 | self.apikey, 'update_pull_request', | |
174 | repoid='fake', |
|
173 | repoid='fake', | |
175 | pullrequestid=pull_request.pull_request_id, |
|
174 | pullrequestid=pull_request.pull_request_id, | |
176 | reviewers=[{'username': 'bad_name'}]) |
|
175 | reviewers=[{'username': 'bad_name'}]) | |
177 | response = api_call(self.app, params) |
|
176 | response = api_call(self.app, params) | |
178 |
|
177 | |||
179 | expected = 'repository `fake` does not exist' |
|
178 | expected = 'repository `fake` does not exist' | |
180 |
|
179 | |||
181 | response_json = response.json['error'] |
|
180 | response_json = response.json['error'] | |
182 | assert response_json == expected |
|
181 | assert response_json == expected | |
183 |
|
182 | |||
184 | @pytest.mark.backends("git", "hg") |
|
183 | @pytest.mark.backends("git", "hg") | |
185 | def test_api_update_pull_request_error(self, pr_util): |
|
184 | def test_api_update_pull_request_error(self, pr_util): | |
186 | pull_request = pr_util.create_pull_request() |
|
185 | pull_request = pr_util.create_pull_request() | |
187 |
|
186 | |||
188 | id_, params = build_data( |
|
187 | id_, params = build_data( | |
189 | self.apikey, 'update_pull_request', |
|
188 | self.apikey, 'update_pull_request', | |
190 | repoid=pull_request.target_repo.repo_name, |
|
189 | repoid=pull_request.target_repo.repo_name, | |
191 | pullrequestid=999999, |
|
190 | pullrequestid=999999, | |
192 | reviewers=[{'username': 'bad_name'}]) |
|
191 | reviewers=[{'username': 'bad_name'}]) | |
193 | response = api_call(self.app, params) |
|
192 | response = api_call(self.app, params) | |
194 |
|
193 | |||
195 | expected = 'pull request `999999` does not exist' |
|
194 | expected = 'pull request `999999` does not exist' | |
196 | assert_error(id_, expected, response.body) |
|
195 | assert_error(id_, expected, response.body) | |
197 |
|
196 | |||
198 | @pytest.mark.backends("git", "hg") |
|
197 | @pytest.mark.backends("git", "hg") | |
199 | def test_api_update_pull_request_no_perms_to_update( |
|
198 | def test_api_update_pull_request_no_perms_to_update( | |
200 | self, user_util, pr_util): |
|
199 | self, user_util, pr_util): | |
201 | user = user_util.create_user() |
|
200 | user = user_util.create_user() | |
202 | pull_request = pr_util.create_pull_request() |
|
201 | pull_request = pr_util.create_pull_request() | |
203 |
|
202 | |||
204 | id_, params = build_data( |
|
203 | id_, params = build_data( | |
205 | user.api_key, 'update_pull_request', |
|
204 | user.api_key, 'update_pull_request', | |
206 | repoid=pull_request.target_repo.repo_name, |
|
205 | repoid=pull_request.target_repo.repo_name, | |
207 | pullrequestid=pull_request.pull_request_id,) |
|
206 | pullrequestid=pull_request.pull_request_id,) | |
208 | response = api_call(self.app, params) |
|
207 | response = api_call(self.app, params) | |
209 |
|
208 | |||
210 | expected = ('pull request `%s` update failed, ' |
|
209 | expected = ('pull request `%s` update failed, ' | |
211 | 'no permission to update.') % pull_request.pull_request_id |
|
210 | 'no permission to update.') % pull_request.pull_request_id | |
212 |
|
211 | |||
213 | assert_error(id_, expected, response.body) |
|
212 | assert_error(id_, expected, response.body) |
@@ -1,621 +1,621 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2018 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2018 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | Database creation, and setup module for RhodeCode Enterprise. Used for creation |
|
22 | Database creation, and setup module for RhodeCode Enterprise. Used for creation | |
23 | of database as well as for migration operations |
|
23 | of database as well as for migration operations | |
24 | """ |
|
24 | """ | |
25 |
|
25 | |||
26 | import os |
|
26 | import os | |
27 | import sys |
|
27 | import sys | |
28 | import time |
|
28 | import time | |
29 | import uuid |
|
29 | import uuid | |
30 | import logging |
|
30 | import logging | |
31 | import getpass |
|
31 | import getpass | |
32 | from os.path import dirname as dn, join as jn |
|
32 | from os.path import dirname as dn, join as jn | |
33 |
|
33 | |||
34 | from sqlalchemy.engine import create_engine |
|
34 | from sqlalchemy.engine import create_engine | |
35 |
|
35 | |||
36 | from rhodecode import __dbversion__ |
|
36 | from rhodecode import __dbversion__ | |
37 | from rhodecode.model import init_model |
|
37 | from rhodecode.model import init_model | |
38 | from rhodecode.model.user import UserModel |
|
38 | from rhodecode.model.user import UserModel | |
39 | from rhodecode.model.db import ( |
|
39 | from rhodecode.model.db import ( | |
40 | User, Permission, RhodeCodeUi, RhodeCodeSetting, UserToPerm, |
|
40 | User, Permission, RhodeCodeUi, RhodeCodeSetting, UserToPerm, | |
41 | DbMigrateVersion, RepoGroup, UserRepoGroupToPerm, CacheKey, Repository) |
|
41 | DbMigrateVersion, RepoGroup, UserRepoGroupToPerm, CacheKey, Repository) | |
42 | from rhodecode.model.meta import Session, Base |
|
42 | from rhodecode.model.meta import Session, Base | |
43 | from rhodecode.model.permission import PermissionModel |
|
43 | from rhodecode.model.permission import PermissionModel | |
44 | from rhodecode.model.repo import RepoModel |
|
44 | from rhodecode.model.repo import RepoModel | |
45 | from rhodecode.model.repo_group import RepoGroupModel |
|
45 | from rhodecode.model.repo_group import RepoGroupModel | |
46 | from rhodecode.model.settings import SettingsModel |
|
46 | from rhodecode.model.settings import SettingsModel | |
47 |
|
47 | |||
48 |
|
48 | |||
49 | log = logging.getLogger(__name__) |
|
49 | log = logging.getLogger(__name__) | |
50 |
|
50 | |||
51 |
|
51 | |||
52 | def notify(msg): |
|
52 | def notify(msg): | |
53 | """ |
|
53 | """ | |
54 | Notification for migrations messages |
|
54 | Notification for migrations messages | |
55 | """ |
|
55 | """ | |
56 | ml = len(msg) + (4 * 2) |
|
56 | ml = len(msg) + (4 * 2) | |
57 | print(('\n%s\n*** %s ***\n%s' % ('*' * ml, msg, '*' * ml)).upper()) |
|
57 | print(('\n%s\n*** %s ***\n%s' % ('*' * ml, msg, '*' * ml)).upper()) | |
58 |
|
58 | |||
59 |
|
59 | |||
60 | class DbManage(object): |
|
60 | class DbManage(object): | |
61 |
|
61 | |||
62 | def __init__(self, log_sql, dbconf, root, tests=False, |
|
62 | def __init__(self, log_sql, dbconf, root, tests=False, | |
63 | SESSION=None, cli_args=None): |
|
63 | SESSION=None, cli_args=None): | |
64 | self.dbname = dbconf.split('/')[-1] |
|
64 | self.dbname = dbconf.split('/')[-1] | |
65 | self.tests = tests |
|
65 | self.tests = tests | |
66 | self.root = root |
|
66 | self.root = root | |
67 | self.dburi = dbconf |
|
67 | self.dburi = dbconf | |
68 | self.log_sql = log_sql |
|
68 | self.log_sql = log_sql | |
69 | self.db_exists = False |
|
69 | self.db_exists = False | |
70 | self.cli_args = cli_args or {} |
|
70 | self.cli_args = cli_args or {} | |
71 | self.init_db(SESSION=SESSION) |
|
71 | self.init_db(SESSION=SESSION) | |
72 | self.ask_ok = self.get_ask_ok_func(self.cli_args.get('force_ask')) |
|
72 | self.ask_ok = self.get_ask_ok_func(self.cli_args.get('force_ask')) | |
73 |
|
73 | |||
74 | def get_ask_ok_func(self, param): |
|
74 | def get_ask_ok_func(self, param): | |
75 | if param not in [None]: |
|
75 | if param not in [None]: | |
76 | # return a function lambda that has a default set to param |
|
76 | # return a function lambda that has a default set to param | |
77 | return lambda *args, **kwargs: param |
|
77 | return lambda *args, **kwargs: param | |
78 | else: |
|
78 | else: | |
79 | from rhodecode.lib.utils import ask_ok |
|
79 | from rhodecode.lib.utils import ask_ok | |
80 | return ask_ok |
|
80 | return ask_ok | |
81 |
|
81 | |||
82 | def init_db(self, SESSION=None): |
|
82 | def init_db(self, SESSION=None): | |
83 | if SESSION: |
|
83 | if SESSION: | |
84 | self.sa = SESSION |
|
84 | self.sa = SESSION | |
85 | else: |
|
85 | else: | |
86 | # init new sessions |
|
86 | # init new sessions | |
87 | engine = create_engine(self.dburi, echo=self.log_sql) |
|
87 | engine = create_engine(self.dburi, echo=self.log_sql) | |
88 | init_model(engine) |
|
88 | init_model(engine) | |
89 | self.sa = Session() |
|
89 | self.sa = Session() | |
90 |
|
90 | |||
91 | def create_tables(self, override=False): |
|
91 | def create_tables(self, override=False): | |
92 | """ |
|
92 | """ | |
93 | Create a auth database |
|
93 | Create a auth database | |
94 | """ |
|
94 | """ | |
95 |
|
95 | |||
96 | log.info("Existing database with the same name is going to be destroyed.") |
|
96 | log.info("Existing database with the same name is going to be destroyed.") | |
97 | log.info("Setup command will run DROP ALL command on that database.") |
|
97 | log.info("Setup command will run DROP ALL command on that database.") | |
98 | if self.tests: |
|
98 | if self.tests: | |
99 | destroy = True |
|
99 | destroy = True | |
100 | else: |
|
100 | else: | |
101 | destroy = self.ask_ok('Are you sure that you want to destroy the old database? [y/n]') |
|
101 | destroy = self.ask_ok('Are you sure that you want to destroy the old database? [y/n]') | |
102 | if not destroy: |
|
102 | if not destroy: | |
103 | log.info('Nothing done.') |
|
103 | log.info('Nothing done.') | |
104 | sys.exit(0) |
|
104 | sys.exit(0) | |
105 | if destroy: |
|
105 | if destroy: | |
106 | Base.metadata.drop_all() |
|
106 | Base.metadata.drop_all() | |
107 |
|
107 | |||
108 | checkfirst = not override |
|
108 | checkfirst = not override | |
109 | Base.metadata.create_all(checkfirst=checkfirst) |
|
109 | Base.metadata.create_all(checkfirst=checkfirst) | |
110 | log.info('Created tables for %s' % self.dbname) |
|
110 | log.info('Created tables for %s' % self.dbname) | |
111 |
|
111 | |||
112 | def set_db_version(self): |
|
112 | def set_db_version(self): | |
113 | ver = DbMigrateVersion() |
|
113 | ver = DbMigrateVersion() | |
114 | ver.version = __dbversion__ |
|
114 | ver.version = __dbversion__ | |
115 | ver.repository_id = 'rhodecode_db_migrations' |
|
115 | ver.repository_id = 'rhodecode_db_migrations' | |
116 | ver.repository_path = 'versions' |
|
116 | ver.repository_path = 'versions' | |
117 | self.sa.add(ver) |
|
117 | self.sa.add(ver) | |
118 | log.info('db version set to: %s' % __dbversion__) |
|
118 | log.info('db version set to: %s' % __dbversion__) | |
119 |
|
119 | |||
120 | def run_pre_migration_tasks(self): |
|
120 | def run_pre_migration_tasks(self): | |
121 | """ |
|
121 | """ | |
122 | Run various tasks before actually doing migrations |
|
122 | Run various tasks before actually doing migrations | |
123 | """ |
|
123 | """ | |
124 | # delete cache keys on each upgrade |
|
124 | # delete cache keys on each upgrade | |
125 | total = CacheKey.query().count() |
|
125 | total = CacheKey.query().count() | |
126 | log.info("Deleting (%s) cache keys now...", total) |
|
126 | log.info("Deleting (%s) cache keys now...", total) | |
127 | CacheKey.delete_all_cache() |
|
127 | CacheKey.delete_all_cache() | |
128 |
|
128 | |||
129 | def upgrade(self, version=None): |
|
129 | def upgrade(self, version=None): | |
130 | """ |
|
130 | """ | |
131 | Upgrades given database schema to given revision following |
|
131 | Upgrades given database schema to given revision following | |
132 | all needed steps, to perform the upgrade |
|
132 | all needed steps, to perform the upgrade | |
133 |
|
133 | |||
134 | """ |
|
134 | """ | |
135 |
|
135 | |||
136 | from rhodecode.lib.dbmigrate.migrate.versioning import api |
|
136 | from rhodecode.lib.dbmigrate.migrate.versioning import api | |
137 | from rhodecode.lib.dbmigrate.migrate.exceptions import \ |
|
137 | from rhodecode.lib.dbmigrate.migrate.exceptions import \ | |
138 | DatabaseNotControlledError |
|
138 | DatabaseNotControlledError | |
139 |
|
139 | |||
140 | if 'sqlite' in self.dburi: |
|
140 | if 'sqlite' in self.dburi: | |
141 |
print |
|
141 | print( | |
142 | '********************** WARNING **********************\n' |
|
142 | '********************** WARNING **********************\n' | |
143 | 'Make sure your version of sqlite is at least 3.7.X. \n' |
|
143 | 'Make sure your version of sqlite is at least 3.7.X. \n' | |
144 | 'Earlier versions are known to fail on some migrations\n' |
|
144 | 'Earlier versions are known to fail on some migrations\n' | |
145 | '*****************************************************\n') |
|
145 | '*****************************************************\n') | |
146 |
|
146 | |||
147 | upgrade = self.ask_ok( |
|
147 | upgrade = self.ask_ok( | |
148 | 'You are about to perform a database upgrade. Make ' |
|
148 | 'You are about to perform a database upgrade. Make ' | |
149 | 'sure you have backed up your database. ' |
|
149 | 'sure you have backed up your database. ' | |
150 | 'Continue ? [y/n]') |
|
150 | 'Continue ? [y/n]') | |
151 | if not upgrade: |
|
151 | if not upgrade: | |
152 | log.info('No upgrade performed') |
|
152 | log.info('No upgrade performed') | |
153 | sys.exit(0) |
|
153 | sys.exit(0) | |
154 |
|
154 | |||
155 | repository_path = jn(dn(dn(dn(os.path.realpath(__file__)))), |
|
155 | repository_path = jn(dn(dn(dn(os.path.realpath(__file__)))), | |
156 | 'rhodecode/lib/dbmigrate') |
|
156 | 'rhodecode/lib/dbmigrate') | |
157 | db_uri = self.dburi |
|
157 | db_uri = self.dburi | |
158 |
|
158 | |||
159 | try: |
|
159 | try: | |
160 | curr_version = version or api.db_version(db_uri, repository_path) |
|
160 | curr_version = version or api.db_version(db_uri, repository_path) | |
161 | msg = ('Found current database db_uri under version ' |
|
161 | msg = ('Found current database db_uri under version ' | |
162 | 'control with version {}'.format(curr_version)) |
|
162 | 'control with version {}'.format(curr_version)) | |
163 |
|
163 | |||
164 | except (RuntimeError, DatabaseNotControlledError): |
|
164 | except (RuntimeError, DatabaseNotControlledError): | |
165 | curr_version = 1 |
|
165 | curr_version = 1 | |
166 | msg = ('Current database is not under version control. Setting ' |
|
166 | msg = ('Current database is not under version control. Setting ' | |
167 | 'as version %s' % curr_version) |
|
167 | 'as version %s' % curr_version) | |
168 | api.version_control(db_uri, repository_path, curr_version) |
|
168 | api.version_control(db_uri, repository_path, curr_version) | |
169 |
|
169 | |||
170 | notify(msg) |
|
170 | notify(msg) | |
171 |
|
171 | |||
172 | self.run_pre_migration_tasks() |
|
172 | self.run_pre_migration_tasks() | |
173 |
|
173 | |||
174 | if curr_version == __dbversion__: |
|
174 | if curr_version == __dbversion__: | |
175 | log.info('This database is already at the newest version') |
|
175 | log.info('This database is already at the newest version') | |
176 | sys.exit(0) |
|
176 | sys.exit(0) | |
177 |
|
177 | |||
178 | upgrade_steps = range(curr_version + 1, __dbversion__ + 1) |
|
178 | upgrade_steps = range(curr_version + 1, __dbversion__ + 1) | |
179 | notify('attempting to upgrade database from ' |
|
179 | notify('attempting to upgrade database from ' | |
180 | 'version %s to version %s' % (curr_version, __dbversion__)) |
|
180 | 'version %s to version %s' % (curr_version, __dbversion__)) | |
181 |
|
181 | |||
182 | # CALL THE PROPER ORDER OF STEPS TO PERFORM FULL UPGRADE |
|
182 | # CALL THE PROPER ORDER OF STEPS TO PERFORM FULL UPGRADE | |
183 | _step = None |
|
183 | _step = None | |
184 | for step in upgrade_steps: |
|
184 | for step in upgrade_steps: | |
185 | notify('performing upgrade step %s' % step) |
|
185 | notify('performing upgrade step %s' % step) | |
186 | time.sleep(0.5) |
|
186 | time.sleep(0.5) | |
187 |
|
187 | |||
188 | api.upgrade(db_uri, repository_path, step) |
|
188 | api.upgrade(db_uri, repository_path, step) | |
189 | self.sa.rollback() |
|
189 | self.sa.rollback() | |
190 | notify('schema upgrade for step %s completed' % (step,)) |
|
190 | notify('schema upgrade for step %s completed' % (step,)) | |
191 |
|
191 | |||
192 | _step = step |
|
192 | _step = step | |
193 |
|
193 | |||
194 | notify('upgrade to version %s successful' % _step) |
|
194 | notify('upgrade to version %s successful' % _step) | |
195 |
|
195 | |||
196 | def fix_repo_paths(self): |
|
196 | def fix_repo_paths(self): | |
197 | """ |
|
197 | """ | |
198 | Fixes an old RhodeCode version path into new one without a '*' |
|
198 | Fixes an old RhodeCode version path into new one without a '*' | |
199 | """ |
|
199 | """ | |
200 |
|
200 | |||
201 | paths = self.sa.query(RhodeCodeUi)\ |
|
201 | paths = self.sa.query(RhodeCodeUi)\ | |
202 | .filter(RhodeCodeUi.ui_key == '/')\ |
|
202 | .filter(RhodeCodeUi.ui_key == '/')\ | |
203 | .scalar() |
|
203 | .scalar() | |
204 |
|
204 | |||
205 | paths.ui_value = paths.ui_value.replace('*', '') |
|
205 | paths.ui_value = paths.ui_value.replace('*', '') | |
206 |
|
206 | |||
207 | try: |
|
207 | try: | |
208 | self.sa.add(paths) |
|
208 | self.sa.add(paths) | |
209 | self.sa.commit() |
|
209 | self.sa.commit() | |
210 | except Exception: |
|
210 | except Exception: | |
211 | self.sa.rollback() |
|
211 | self.sa.rollback() | |
212 | raise |
|
212 | raise | |
213 |
|
213 | |||
214 | def fix_default_user(self): |
|
214 | def fix_default_user(self): | |
215 | """ |
|
215 | """ | |
216 | Fixes an old default user with some 'nicer' default values, |
|
216 | Fixes an old default user with some 'nicer' default values, | |
217 | used mostly for anonymous access |
|
217 | used mostly for anonymous access | |
218 | """ |
|
218 | """ | |
219 | def_user = self.sa.query(User)\ |
|
219 | def_user = self.sa.query(User)\ | |
220 | .filter(User.username == User.DEFAULT_USER)\ |
|
220 | .filter(User.username == User.DEFAULT_USER)\ | |
221 | .one() |
|
221 | .one() | |
222 |
|
222 | |||
223 | def_user.name = 'Anonymous' |
|
223 | def_user.name = 'Anonymous' | |
224 | def_user.lastname = 'User' |
|
224 | def_user.lastname = 'User' | |
225 | def_user.email = User.DEFAULT_USER_EMAIL |
|
225 | def_user.email = User.DEFAULT_USER_EMAIL | |
226 |
|
226 | |||
227 | try: |
|
227 | try: | |
228 | self.sa.add(def_user) |
|
228 | self.sa.add(def_user) | |
229 | self.sa.commit() |
|
229 | self.sa.commit() | |
230 | except Exception: |
|
230 | except Exception: | |
231 | self.sa.rollback() |
|
231 | self.sa.rollback() | |
232 | raise |
|
232 | raise | |
233 |
|
233 | |||
234 | def fix_settings(self): |
|
234 | def fix_settings(self): | |
235 | """ |
|
235 | """ | |
236 | Fixes rhodecode settings and adds ga_code key for google analytics |
|
236 | Fixes rhodecode settings and adds ga_code key for google analytics | |
237 | """ |
|
237 | """ | |
238 |
|
238 | |||
239 | hgsettings3 = RhodeCodeSetting('ga_code', '') |
|
239 | hgsettings3 = RhodeCodeSetting('ga_code', '') | |
240 |
|
240 | |||
241 | try: |
|
241 | try: | |
242 | self.sa.add(hgsettings3) |
|
242 | self.sa.add(hgsettings3) | |
243 | self.sa.commit() |
|
243 | self.sa.commit() | |
244 | except Exception: |
|
244 | except Exception: | |
245 | self.sa.rollback() |
|
245 | self.sa.rollback() | |
246 | raise |
|
246 | raise | |
247 |
|
247 | |||
248 | def create_admin_and_prompt(self): |
|
248 | def create_admin_and_prompt(self): | |
249 |
|
249 | |||
250 | # defaults |
|
250 | # defaults | |
251 | defaults = self.cli_args |
|
251 | defaults = self.cli_args | |
252 | username = defaults.get('username') |
|
252 | username = defaults.get('username') | |
253 | password = defaults.get('password') |
|
253 | password = defaults.get('password') | |
254 | email = defaults.get('email') |
|
254 | email = defaults.get('email') | |
255 |
|
255 | |||
256 | if username is None: |
|
256 | if username is None: | |
257 | username = raw_input('Specify admin username:') |
|
257 | username = raw_input('Specify admin username:') | |
258 | if password is None: |
|
258 | if password is None: | |
259 | password = self._get_admin_password() |
|
259 | password = self._get_admin_password() | |
260 | if not password: |
|
260 | if not password: | |
261 | # second try |
|
261 | # second try | |
262 | password = self._get_admin_password() |
|
262 | password = self._get_admin_password() | |
263 | if not password: |
|
263 | if not password: | |
264 | sys.exit() |
|
264 | sys.exit() | |
265 | if email is None: |
|
265 | if email is None: | |
266 | email = raw_input('Specify admin email:') |
|
266 | email = raw_input('Specify admin email:') | |
267 | api_key = self.cli_args.get('api_key') |
|
267 | api_key = self.cli_args.get('api_key') | |
268 | self.create_user(username, password, email, True, |
|
268 | self.create_user(username, password, email, True, | |
269 | strict_creation_check=False, |
|
269 | strict_creation_check=False, | |
270 | api_key=api_key) |
|
270 | api_key=api_key) | |
271 |
|
271 | |||
272 | def _get_admin_password(self): |
|
272 | def _get_admin_password(self): | |
273 | password = getpass.getpass('Specify admin password ' |
|
273 | password = getpass.getpass('Specify admin password ' | |
274 | '(min 6 chars):') |
|
274 | '(min 6 chars):') | |
275 | confirm = getpass.getpass('Confirm password:') |
|
275 | confirm = getpass.getpass('Confirm password:') | |
276 |
|
276 | |||
277 | if password != confirm: |
|
277 | if password != confirm: | |
278 | log.error('passwords mismatch') |
|
278 | log.error('passwords mismatch') | |
279 | return False |
|
279 | return False | |
280 | if len(password) < 6: |
|
280 | if len(password) < 6: | |
281 | log.error('password is too short - use at least 6 characters') |
|
281 | log.error('password is too short - use at least 6 characters') | |
282 | return False |
|
282 | return False | |
283 |
|
283 | |||
284 | return password |
|
284 | return password | |
285 |
|
285 | |||
286 | def create_test_admin_and_users(self): |
|
286 | def create_test_admin_and_users(self): | |
287 | log.info('creating admin and regular test users') |
|
287 | log.info('creating admin and regular test users') | |
288 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN, \ |
|
288 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN, \ | |
289 | TEST_USER_ADMIN_PASS, TEST_USER_ADMIN_EMAIL, \ |
|
289 | TEST_USER_ADMIN_PASS, TEST_USER_ADMIN_EMAIL, \ | |
290 | TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, \ |
|
290 | TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, \ | |
291 | TEST_USER_REGULAR_EMAIL, TEST_USER_REGULAR2_LOGIN, \ |
|
291 | TEST_USER_REGULAR_EMAIL, TEST_USER_REGULAR2_LOGIN, \ | |
292 | TEST_USER_REGULAR2_PASS, TEST_USER_REGULAR2_EMAIL |
|
292 | TEST_USER_REGULAR2_PASS, TEST_USER_REGULAR2_EMAIL | |
293 |
|
293 | |||
294 | self.create_user(TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS, |
|
294 | self.create_user(TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS, | |
295 | TEST_USER_ADMIN_EMAIL, True, api_key=True) |
|
295 | TEST_USER_ADMIN_EMAIL, True, api_key=True) | |
296 |
|
296 | |||
297 | self.create_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, |
|
297 | self.create_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, | |
298 | TEST_USER_REGULAR_EMAIL, False, api_key=True) |
|
298 | TEST_USER_REGULAR_EMAIL, False, api_key=True) | |
299 |
|
299 | |||
300 | self.create_user(TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS, |
|
300 | self.create_user(TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS, | |
301 | TEST_USER_REGULAR2_EMAIL, False, api_key=True) |
|
301 | TEST_USER_REGULAR2_EMAIL, False, api_key=True) | |
302 |
|
302 | |||
303 | def create_ui_settings(self, repo_store_path): |
|
303 | def create_ui_settings(self, repo_store_path): | |
304 | """ |
|
304 | """ | |
305 | Creates ui settings, fills out hooks |
|
305 | Creates ui settings, fills out hooks | |
306 | and disables dotencode |
|
306 | and disables dotencode | |
307 | """ |
|
307 | """ | |
308 | settings_model = SettingsModel(sa=self.sa) |
|
308 | settings_model = SettingsModel(sa=self.sa) | |
309 | from rhodecode.lib.vcs.backends.hg import largefiles_store |
|
309 | from rhodecode.lib.vcs.backends.hg import largefiles_store | |
310 | from rhodecode.lib.vcs.backends.git import lfs_store |
|
310 | from rhodecode.lib.vcs.backends.git import lfs_store | |
311 |
|
311 | |||
312 | # Build HOOKS |
|
312 | # Build HOOKS | |
313 | hooks = [ |
|
313 | hooks = [ | |
314 | (RhodeCodeUi.HOOK_REPO_SIZE, 'python:vcsserver.hooks.repo_size'), |
|
314 | (RhodeCodeUi.HOOK_REPO_SIZE, 'python:vcsserver.hooks.repo_size'), | |
315 |
|
315 | |||
316 | # HG |
|
316 | # HG | |
317 | (RhodeCodeUi.HOOK_PRE_PULL, 'python:vcsserver.hooks.pre_pull'), |
|
317 | (RhodeCodeUi.HOOK_PRE_PULL, 'python:vcsserver.hooks.pre_pull'), | |
318 | (RhodeCodeUi.HOOK_PULL, 'python:vcsserver.hooks.log_pull_action'), |
|
318 | (RhodeCodeUi.HOOK_PULL, 'python:vcsserver.hooks.log_pull_action'), | |
319 | (RhodeCodeUi.HOOK_PRE_PUSH, 'python:vcsserver.hooks.pre_push'), |
|
319 | (RhodeCodeUi.HOOK_PRE_PUSH, 'python:vcsserver.hooks.pre_push'), | |
320 | (RhodeCodeUi.HOOK_PRETX_PUSH, 'python:vcsserver.hooks.pre_push'), |
|
320 | (RhodeCodeUi.HOOK_PRETX_PUSH, 'python:vcsserver.hooks.pre_push'), | |
321 | (RhodeCodeUi.HOOK_PUSH, 'python:vcsserver.hooks.log_push_action'), |
|
321 | (RhodeCodeUi.HOOK_PUSH, 'python:vcsserver.hooks.log_push_action'), | |
322 | (RhodeCodeUi.HOOK_PUSH_KEY, 'python:vcsserver.hooks.key_push'), |
|
322 | (RhodeCodeUi.HOOK_PUSH_KEY, 'python:vcsserver.hooks.key_push'), | |
323 |
|
323 | |||
324 | ] |
|
324 | ] | |
325 |
|
325 | |||
326 | for key, value in hooks: |
|
326 | for key, value in hooks: | |
327 | hook_obj = settings_model.get_ui_by_key(key) |
|
327 | hook_obj = settings_model.get_ui_by_key(key) | |
328 | hooks2 = hook_obj if hook_obj else RhodeCodeUi() |
|
328 | hooks2 = hook_obj if hook_obj else RhodeCodeUi() | |
329 | hooks2.ui_section = 'hooks' |
|
329 | hooks2.ui_section = 'hooks' | |
330 | hooks2.ui_key = key |
|
330 | hooks2.ui_key = key | |
331 | hooks2.ui_value = value |
|
331 | hooks2.ui_value = value | |
332 | self.sa.add(hooks2) |
|
332 | self.sa.add(hooks2) | |
333 |
|
333 | |||
334 | # enable largefiles |
|
334 | # enable largefiles | |
335 | largefiles = RhodeCodeUi() |
|
335 | largefiles = RhodeCodeUi() | |
336 | largefiles.ui_section = 'extensions' |
|
336 | largefiles.ui_section = 'extensions' | |
337 | largefiles.ui_key = 'largefiles' |
|
337 | largefiles.ui_key = 'largefiles' | |
338 | largefiles.ui_value = '' |
|
338 | largefiles.ui_value = '' | |
339 | self.sa.add(largefiles) |
|
339 | self.sa.add(largefiles) | |
340 |
|
340 | |||
341 | # set default largefiles cache dir, defaults to |
|
341 | # set default largefiles cache dir, defaults to | |
342 | # /repo_store_location/.cache/largefiles |
|
342 | # /repo_store_location/.cache/largefiles | |
343 | largefiles = RhodeCodeUi() |
|
343 | largefiles = RhodeCodeUi() | |
344 | largefiles.ui_section = 'largefiles' |
|
344 | largefiles.ui_section = 'largefiles' | |
345 | largefiles.ui_key = 'usercache' |
|
345 | largefiles.ui_key = 'usercache' | |
346 | largefiles.ui_value = largefiles_store(repo_store_path) |
|
346 | largefiles.ui_value = largefiles_store(repo_store_path) | |
347 |
|
347 | |||
348 | self.sa.add(largefiles) |
|
348 | self.sa.add(largefiles) | |
349 |
|
349 | |||
350 | # set default lfs cache dir, defaults to |
|
350 | # set default lfs cache dir, defaults to | |
351 | # /repo_store_location/.cache/lfs_store |
|
351 | # /repo_store_location/.cache/lfs_store | |
352 | lfsstore = RhodeCodeUi() |
|
352 | lfsstore = RhodeCodeUi() | |
353 | lfsstore.ui_section = 'vcs_git_lfs' |
|
353 | lfsstore.ui_section = 'vcs_git_lfs' | |
354 | lfsstore.ui_key = 'store_location' |
|
354 | lfsstore.ui_key = 'store_location' | |
355 | lfsstore.ui_value = lfs_store(repo_store_path) |
|
355 | lfsstore.ui_value = lfs_store(repo_store_path) | |
356 |
|
356 | |||
357 | self.sa.add(lfsstore) |
|
357 | self.sa.add(lfsstore) | |
358 |
|
358 | |||
359 | # enable hgsubversion disabled by default |
|
359 | # enable hgsubversion disabled by default | |
360 | hgsubversion = RhodeCodeUi() |
|
360 | hgsubversion = RhodeCodeUi() | |
361 | hgsubversion.ui_section = 'extensions' |
|
361 | hgsubversion.ui_section = 'extensions' | |
362 | hgsubversion.ui_key = 'hgsubversion' |
|
362 | hgsubversion.ui_key = 'hgsubversion' | |
363 | hgsubversion.ui_value = '' |
|
363 | hgsubversion.ui_value = '' | |
364 | hgsubversion.ui_active = False |
|
364 | hgsubversion.ui_active = False | |
365 | self.sa.add(hgsubversion) |
|
365 | self.sa.add(hgsubversion) | |
366 |
|
366 | |||
367 | # enable hgevolve disabled by default |
|
367 | # enable hgevolve disabled by default | |
368 | hgevolve = RhodeCodeUi() |
|
368 | hgevolve = RhodeCodeUi() | |
369 | hgevolve.ui_section = 'extensions' |
|
369 | hgevolve.ui_section = 'extensions' | |
370 | hgevolve.ui_key = 'evolve' |
|
370 | hgevolve.ui_key = 'evolve' | |
371 | hgevolve.ui_value = '' |
|
371 | hgevolve.ui_value = '' | |
372 | hgevolve.ui_active = False |
|
372 | hgevolve.ui_active = False | |
373 | self.sa.add(hgevolve) |
|
373 | self.sa.add(hgevolve) | |
374 |
|
374 | |||
375 | # enable hggit disabled by default |
|
375 | # enable hggit disabled by default | |
376 | hggit = RhodeCodeUi() |
|
376 | hggit = RhodeCodeUi() | |
377 | hggit.ui_section = 'extensions' |
|
377 | hggit.ui_section = 'extensions' | |
378 | hggit.ui_key = 'hggit' |
|
378 | hggit.ui_key = 'hggit' | |
379 | hggit.ui_value = '' |
|
379 | hggit.ui_value = '' | |
380 | hggit.ui_active = False |
|
380 | hggit.ui_active = False | |
381 | self.sa.add(hggit) |
|
381 | self.sa.add(hggit) | |
382 |
|
382 | |||
383 | # set svn branch defaults |
|
383 | # set svn branch defaults | |
384 | branches = ["/branches/*", "/trunk"] |
|
384 | branches = ["/branches/*", "/trunk"] | |
385 | tags = ["/tags/*"] |
|
385 | tags = ["/tags/*"] | |
386 |
|
386 | |||
387 | for branch in branches: |
|
387 | for branch in branches: | |
388 | settings_model.create_ui_section_value( |
|
388 | settings_model.create_ui_section_value( | |
389 | RhodeCodeUi.SVN_BRANCH_ID, branch) |
|
389 | RhodeCodeUi.SVN_BRANCH_ID, branch) | |
390 |
|
390 | |||
391 | for tag in tags: |
|
391 | for tag in tags: | |
392 | settings_model.create_ui_section_value(RhodeCodeUi.SVN_TAG_ID, tag) |
|
392 | settings_model.create_ui_section_value(RhodeCodeUi.SVN_TAG_ID, tag) | |
393 |
|
393 | |||
394 | def create_auth_plugin_options(self, skip_existing=False): |
|
394 | def create_auth_plugin_options(self, skip_existing=False): | |
395 | """ |
|
395 | """ | |
396 | Create default auth plugin settings, and make it active |
|
396 | Create default auth plugin settings, and make it active | |
397 |
|
397 | |||
398 | :param skip_existing: |
|
398 | :param skip_existing: | |
399 | """ |
|
399 | """ | |
400 |
|
400 | |||
401 | for k, v, t in [('auth_plugins', 'egg:rhodecode-enterprise-ce#rhodecode', 'list'), |
|
401 | for k, v, t in [('auth_plugins', 'egg:rhodecode-enterprise-ce#rhodecode', 'list'), | |
402 | ('auth_rhodecode_enabled', 'True', 'bool')]: |
|
402 | ('auth_rhodecode_enabled', 'True', 'bool')]: | |
403 | if (skip_existing and |
|
403 | if (skip_existing and | |
404 | SettingsModel().get_setting_by_name(k) is not None): |
|
404 | SettingsModel().get_setting_by_name(k) is not None): | |
405 | log.debug('Skipping option %s' % k) |
|
405 | log.debug('Skipping option %s' % k) | |
406 | continue |
|
406 | continue | |
407 | setting = RhodeCodeSetting(k, v, t) |
|
407 | setting = RhodeCodeSetting(k, v, t) | |
408 | self.sa.add(setting) |
|
408 | self.sa.add(setting) | |
409 |
|
409 | |||
410 | def create_default_options(self, skip_existing=False): |
|
410 | def create_default_options(self, skip_existing=False): | |
411 | """Creates default settings""" |
|
411 | """Creates default settings""" | |
412 |
|
412 | |||
413 | for k, v, t in [ |
|
413 | for k, v, t in [ | |
414 | ('default_repo_enable_locking', False, 'bool'), |
|
414 | ('default_repo_enable_locking', False, 'bool'), | |
415 | ('default_repo_enable_downloads', False, 'bool'), |
|
415 | ('default_repo_enable_downloads', False, 'bool'), | |
416 | ('default_repo_enable_statistics', False, 'bool'), |
|
416 | ('default_repo_enable_statistics', False, 'bool'), | |
417 | ('default_repo_private', False, 'bool'), |
|
417 | ('default_repo_private', False, 'bool'), | |
418 | ('default_repo_type', 'hg', 'unicode')]: |
|
418 | ('default_repo_type', 'hg', 'unicode')]: | |
419 |
|
419 | |||
420 | if (skip_existing and |
|
420 | if (skip_existing and | |
421 | SettingsModel().get_setting_by_name(k) is not None): |
|
421 | SettingsModel().get_setting_by_name(k) is not None): | |
422 | log.debug('Skipping option %s' % k) |
|
422 | log.debug('Skipping option %s' % k) | |
423 | continue |
|
423 | continue | |
424 | setting = RhodeCodeSetting(k, v, t) |
|
424 | setting = RhodeCodeSetting(k, v, t) | |
425 | self.sa.add(setting) |
|
425 | self.sa.add(setting) | |
426 |
|
426 | |||
427 | def fixup_groups(self): |
|
427 | def fixup_groups(self): | |
428 | def_usr = User.get_default_user() |
|
428 | def_usr = User.get_default_user() | |
429 | for g in RepoGroup.query().all(): |
|
429 | for g in RepoGroup.query().all(): | |
430 | g.group_name = g.get_new_name(g.name) |
|
430 | g.group_name = g.get_new_name(g.name) | |
431 | self.sa.add(g) |
|
431 | self.sa.add(g) | |
432 | # get default perm |
|
432 | # get default perm | |
433 | default = UserRepoGroupToPerm.query()\ |
|
433 | default = UserRepoGroupToPerm.query()\ | |
434 | .filter(UserRepoGroupToPerm.group == g)\ |
|
434 | .filter(UserRepoGroupToPerm.group == g)\ | |
435 | .filter(UserRepoGroupToPerm.user == def_usr)\ |
|
435 | .filter(UserRepoGroupToPerm.user == def_usr)\ | |
436 | .scalar() |
|
436 | .scalar() | |
437 |
|
437 | |||
438 | if default is None: |
|
438 | if default is None: | |
439 | log.debug('missing default permission for group %s adding' % g) |
|
439 | log.debug('missing default permission for group %s adding' % g) | |
440 | perm_obj = RepoGroupModel()._create_default_perms(g) |
|
440 | perm_obj = RepoGroupModel()._create_default_perms(g) | |
441 | self.sa.add(perm_obj) |
|
441 | self.sa.add(perm_obj) | |
442 |
|
442 | |||
443 | def reset_permissions(self, username): |
|
443 | def reset_permissions(self, username): | |
444 | """ |
|
444 | """ | |
445 | Resets permissions to default state, useful when old systems had |
|
445 | Resets permissions to default state, useful when old systems had | |
446 | bad permissions, we must clean them up |
|
446 | bad permissions, we must clean them up | |
447 |
|
447 | |||
448 | :param username: |
|
448 | :param username: | |
449 | """ |
|
449 | """ | |
450 | default_user = User.get_by_username(username) |
|
450 | default_user = User.get_by_username(username) | |
451 | if not default_user: |
|
451 | if not default_user: | |
452 | return |
|
452 | return | |
453 |
|
453 | |||
454 | u2p = UserToPerm.query()\ |
|
454 | u2p = UserToPerm.query()\ | |
455 | .filter(UserToPerm.user == default_user).all() |
|
455 | .filter(UserToPerm.user == default_user).all() | |
456 | fixed = False |
|
456 | fixed = False | |
457 | if len(u2p) != len(Permission.DEFAULT_USER_PERMISSIONS): |
|
457 | if len(u2p) != len(Permission.DEFAULT_USER_PERMISSIONS): | |
458 | for p in u2p: |
|
458 | for p in u2p: | |
459 | Session().delete(p) |
|
459 | Session().delete(p) | |
460 | fixed = True |
|
460 | fixed = True | |
461 | self.populate_default_permissions() |
|
461 | self.populate_default_permissions() | |
462 | return fixed |
|
462 | return fixed | |
463 |
|
463 | |||
464 | def update_repo_info(self): |
|
464 | def update_repo_info(self): | |
465 | RepoModel.update_repoinfo() |
|
465 | RepoModel.update_repoinfo() | |
466 |
|
466 | |||
467 | def config_prompt(self, test_repo_path='', retries=3): |
|
467 | def config_prompt(self, test_repo_path='', retries=3): | |
468 | defaults = self.cli_args |
|
468 | defaults = self.cli_args | |
469 | _path = defaults.get('repos_location') |
|
469 | _path = defaults.get('repos_location') | |
470 | if retries == 3: |
|
470 | if retries == 3: | |
471 | log.info('Setting up repositories config') |
|
471 | log.info('Setting up repositories config') | |
472 |
|
472 | |||
473 | if _path is not None: |
|
473 | if _path is not None: | |
474 | path = _path |
|
474 | path = _path | |
475 | elif not self.tests and not test_repo_path: |
|
475 | elif not self.tests and not test_repo_path: | |
476 | path = raw_input( |
|
476 | path = raw_input( | |
477 | 'Enter a valid absolute path to store repositories. ' |
|
477 | 'Enter a valid absolute path to store repositories. ' | |
478 | 'All repositories in that path will be added automatically:' |
|
478 | 'All repositories in that path will be added automatically:' | |
479 | ) |
|
479 | ) | |
480 | else: |
|
480 | else: | |
481 | path = test_repo_path |
|
481 | path = test_repo_path | |
482 | path_ok = True |
|
482 | path_ok = True | |
483 |
|
483 | |||
484 | # check proper dir |
|
484 | # check proper dir | |
485 | if not os.path.isdir(path): |
|
485 | if not os.path.isdir(path): | |
486 | path_ok = False |
|
486 | path_ok = False | |
487 | log.error('Given path %s is not a valid directory' % (path,)) |
|
487 | log.error('Given path %s is not a valid directory' % (path,)) | |
488 |
|
488 | |||
489 | elif not os.path.isabs(path): |
|
489 | elif not os.path.isabs(path): | |
490 | path_ok = False |
|
490 | path_ok = False | |
491 | log.error('Given path %s is not an absolute path' % (path,)) |
|
491 | log.error('Given path %s is not an absolute path' % (path,)) | |
492 |
|
492 | |||
493 | # check if path is at least readable. |
|
493 | # check if path is at least readable. | |
494 | if not os.access(path, os.R_OK): |
|
494 | if not os.access(path, os.R_OK): | |
495 | path_ok = False |
|
495 | path_ok = False | |
496 | log.error('Given path %s is not readable' % (path,)) |
|
496 | log.error('Given path %s is not readable' % (path,)) | |
497 |
|
497 | |||
498 | # check write access, warn user about non writeable paths |
|
498 | # check write access, warn user about non writeable paths | |
499 | elif not os.access(path, os.W_OK) and path_ok: |
|
499 | elif not os.access(path, os.W_OK) and path_ok: | |
500 | log.warning('No write permission to given path %s' % (path,)) |
|
500 | log.warning('No write permission to given path %s' % (path,)) | |
501 |
|
501 | |||
502 | q = ('Given path %s is not writeable, do you want to ' |
|
502 | q = ('Given path %s is not writeable, do you want to ' | |
503 | 'continue with read only mode ? [y/n]' % (path,)) |
|
503 | 'continue with read only mode ? [y/n]' % (path,)) | |
504 | if not self.ask_ok(q): |
|
504 | if not self.ask_ok(q): | |
505 | log.error('Canceled by user') |
|
505 | log.error('Canceled by user') | |
506 | sys.exit(-1) |
|
506 | sys.exit(-1) | |
507 |
|
507 | |||
508 | if retries == 0: |
|
508 | if retries == 0: | |
509 | sys.exit('max retries reached') |
|
509 | sys.exit('max retries reached') | |
510 | if not path_ok: |
|
510 | if not path_ok: | |
511 | retries -= 1 |
|
511 | retries -= 1 | |
512 | return self.config_prompt(test_repo_path, retries) |
|
512 | return self.config_prompt(test_repo_path, retries) | |
513 |
|
513 | |||
514 | real_path = os.path.normpath(os.path.realpath(path)) |
|
514 | real_path = os.path.normpath(os.path.realpath(path)) | |
515 |
|
515 | |||
516 | if real_path != os.path.normpath(path): |
|
516 | if real_path != os.path.normpath(path): | |
517 | q = ('Path looks like a symlink, RhodeCode Enterprise will store ' |
|
517 | q = ('Path looks like a symlink, RhodeCode Enterprise will store ' | |
518 | 'given path as %s ? [y/n]') % (real_path,) |
|
518 | 'given path as %s ? [y/n]') % (real_path,) | |
519 | if not self.ask_ok(q): |
|
519 | if not self.ask_ok(q): | |
520 | log.error('Canceled by user') |
|
520 | log.error('Canceled by user') | |
521 | sys.exit(-1) |
|
521 | sys.exit(-1) | |
522 |
|
522 | |||
523 | return real_path |
|
523 | return real_path | |
524 |
|
524 | |||
525 | def create_settings(self, path): |
|
525 | def create_settings(self, path): | |
526 |
|
526 | |||
527 | self.create_ui_settings(path) |
|
527 | self.create_ui_settings(path) | |
528 |
|
528 | |||
529 | ui_config = [ |
|
529 | ui_config = [ | |
530 | ('web', 'push_ssl', 'False'), |
|
530 | ('web', 'push_ssl', 'False'), | |
531 | ('web', 'allow_archive', 'gz zip bz2'), |
|
531 | ('web', 'allow_archive', 'gz zip bz2'), | |
532 | ('web', 'allow_push', '*'), |
|
532 | ('web', 'allow_push', '*'), | |
533 | ('web', 'baseurl', '/'), |
|
533 | ('web', 'baseurl', '/'), | |
534 | ('paths', '/', path), |
|
534 | ('paths', '/', path), | |
535 | ('phases', 'publish', 'True') |
|
535 | ('phases', 'publish', 'True') | |
536 | ] |
|
536 | ] | |
537 | for section, key, value in ui_config: |
|
537 | for section, key, value in ui_config: | |
538 | ui_conf = RhodeCodeUi() |
|
538 | ui_conf = RhodeCodeUi() | |
539 | setattr(ui_conf, 'ui_section', section) |
|
539 | setattr(ui_conf, 'ui_section', section) | |
540 | setattr(ui_conf, 'ui_key', key) |
|
540 | setattr(ui_conf, 'ui_key', key) | |
541 | setattr(ui_conf, 'ui_value', value) |
|
541 | setattr(ui_conf, 'ui_value', value) | |
542 | self.sa.add(ui_conf) |
|
542 | self.sa.add(ui_conf) | |
543 |
|
543 | |||
544 | # rhodecode app settings |
|
544 | # rhodecode app settings | |
545 | settings = [ |
|
545 | settings = [ | |
546 | ('realm', 'RhodeCode', 'unicode'), |
|
546 | ('realm', 'RhodeCode', 'unicode'), | |
547 | ('title', '', 'unicode'), |
|
547 | ('title', '', 'unicode'), | |
548 | ('pre_code', '', 'unicode'), |
|
548 | ('pre_code', '', 'unicode'), | |
549 | ('post_code', '', 'unicode'), |
|
549 | ('post_code', '', 'unicode'), | |
550 | ('show_public_icon', True, 'bool'), |
|
550 | ('show_public_icon', True, 'bool'), | |
551 | ('show_private_icon', True, 'bool'), |
|
551 | ('show_private_icon', True, 'bool'), | |
552 | ('stylify_metatags', False, 'bool'), |
|
552 | ('stylify_metatags', False, 'bool'), | |
553 | ('dashboard_items', 100, 'int'), |
|
553 | ('dashboard_items', 100, 'int'), | |
554 | ('admin_grid_items', 25, 'int'), |
|
554 | ('admin_grid_items', 25, 'int'), | |
555 | ('show_version', True, 'bool'), |
|
555 | ('show_version', True, 'bool'), | |
556 | ('use_gravatar', False, 'bool'), |
|
556 | ('use_gravatar', False, 'bool'), | |
557 | ('gravatar_url', User.DEFAULT_GRAVATAR_URL, 'unicode'), |
|
557 | ('gravatar_url', User.DEFAULT_GRAVATAR_URL, 'unicode'), | |
558 | ('clone_uri_tmpl', Repository.DEFAULT_CLONE_URI, 'unicode'), |
|
558 | ('clone_uri_tmpl', Repository.DEFAULT_CLONE_URI, 'unicode'), | |
559 | ('support_url', '', 'unicode'), |
|
559 | ('support_url', '', 'unicode'), | |
560 | ('update_url', RhodeCodeSetting.DEFAULT_UPDATE_URL, 'unicode'), |
|
560 | ('update_url', RhodeCodeSetting.DEFAULT_UPDATE_URL, 'unicode'), | |
561 | ('show_revision_number', True, 'bool'), |
|
561 | ('show_revision_number', True, 'bool'), | |
562 | ('show_sha_length', 12, 'int'), |
|
562 | ('show_sha_length', 12, 'int'), | |
563 | ] |
|
563 | ] | |
564 |
|
564 | |||
565 | for key, val, type_ in settings: |
|
565 | for key, val, type_ in settings: | |
566 | sett = RhodeCodeSetting(key, val, type_) |
|
566 | sett = RhodeCodeSetting(key, val, type_) | |
567 | self.sa.add(sett) |
|
567 | self.sa.add(sett) | |
568 |
|
568 | |||
569 | self.create_auth_plugin_options() |
|
569 | self.create_auth_plugin_options() | |
570 | self.create_default_options() |
|
570 | self.create_default_options() | |
571 |
|
571 | |||
572 | log.info('created ui config') |
|
572 | log.info('created ui config') | |
573 |
|
573 | |||
574 | def create_user(self, username, password, email='', admin=False, |
|
574 | def create_user(self, username, password, email='', admin=False, | |
575 | strict_creation_check=True, api_key=None): |
|
575 | strict_creation_check=True, api_key=None): | |
576 | log.info('creating user `%s`' % username) |
|
576 | log.info('creating user `%s`' % username) | |
577 | user = UserModel().create_or_update( |
|
577 | user = UserModel().create_or_update( | |
578 | username, password, email, firstname=u'RhodeCode', lastname=u'Admin', |
|
578 | username, password, email, firstname=u'RhodeCode', lastname=u'Admin', | |
579 | active=True, admin=admin, extern_type="rhodecode", |
|
579 | active=True, admin=admin, extern_type="rhodecode", | |
580 | strict_creation_check=strict_creation_check) |
|
580 | strict_creation_check=strict_creation_check) | |
581 |
|
581 | |||
582 | if api_key: |
|
582 | if api_key: | |
583 | log.info('setting a new default auth token for user `%s`', username) |
|
583 | log.info('setting a new default auth token for user `%s`', username) | |
584 | UserModel().add_auth_token( |
|
584 | UserModel().add_auth_token( | |
585 | user=user, lifetime_minutes=-1, |
|
585 | user=user, lifetime_minutes=-1, | |
586 | role=UserModel.auth_token_role.ROLE_ALL, |
|
586 | role=UserModel.auth_token_role.ROLE_ALL, | |
587 | description=u'BUILTIN TOKEN') |
|
587 | description=u'BUILTIN TOKEN') | |
588 |
|
588 | |||
589 | def create_default_user(self): |
|
589 | def create_default_user(self): | |
590 | log.info('creating default user') |
|
590 | log.info('creating default user') | |
591 | # create default user for handling default permissions. |
|
591 | # create default user for handling default permissions. | |
592 | user = UserModel().create_or_update(username=User.DEFAULT_USER, |
|
592 | user = UserModel().create_or_update(username=User.DEFAULT_USER, | |
593 | password=str(uuid.uuid1())[:20], |
|
593 | password=str(uuid.uuid1())[:20], | |
594 | email=User.DEFAULT_USER_EMAIL, |
|
594 | email=User.DEFAULT_USER_EMAIL, | |
595 | firstname=u'Anonymous', |
|
595 | firstname=u'Anonymous', | |
596 | lastname=u'User', |
|
596 | lastname=u'User', | |
597 | strict_creation_check=False) |
|
597 | strict_creation_check=False) | |
598 | # based on configuration options activate/de-activate this user which |
|
598 | # based on configuration options activate/de-activate this user which | |
599 | # controlls anonymous access |
|
599 | # controlls anonymous access | |
600 | if self.cli_args.get('public_access') is False: |
|
600 | if self.cli_args.get('public_access') is False: | |
601 | log.info('Public access disabled') |
|
601 | log.info('Public access disabled') | |
602 | user.active = False |
|
602 | user.active = False | |
603 | Session().add(user) |
|
603 | Session().add(user) | |
604 | Session().commit() |
|
604 | Session().commit() | |
605 |
|
605 | |||
606 | def create_permissions(self): |
|
606 | def create_permissions(self): | |
607 | """ |
|
607 | """ | |
608 | Creates all permissions defined in the system |
|
608 | Creates all permissions defined in the system | |
609 | """ |
|
609 | """ | |
610 | # module.(access|create|change|delete)_[name] |
|
610 | # module.(access|create|change|delete)_[name] | |
611 | # module.(none|read|write|admin) |
|
611 | # module.(none|read|write|admin) | |
612 | log.info('creating permissions') |
|
612 | log.info('creating permissions') | |
613 | PermissionModel(self.sa).create_permissions() |
|
613 | PermissionModel(self.sa).create_permissions() | |
614 |
|
614 | |||
615 | def populate_default_permissions(self): |
|
615 | def populate_default_permissions(self): | |
616 | """ |
|
616 | """ | |
617 | Populate default permissions. It will create only the default |
|
617 | Populate default permissions. It will create only the default | |
618 | permissions that are missing, and not alter already defined ones |
|
618 | permissions that are missing, and not alter already defined ones | |
619 | """ |
|
619 | """ | |
620 | log.info('creating default user permissions') |
|
620 | log.info('creating default user permissions') | |
621 | PermissionModel(self.sa).create_default_user_permissions(user=User.DEFAULT_USER) |
|
621 | PermissionModel(self.sa).create_default_user_permissions(user=User.DEFAULT_USER) |
@@ -1,100 +1,100 b'' | |||||
1 | """ |
|
1 | """ | |
2 | Script to migrate repository from sqlalchemy <= 0.4.4 to the new |
|
2 | Script to migrate repository from sqlalchemy <= 0.4.4 to the new | |
3 | repository schema. This shouldn't use any other migrate modules, so |
|
3 | repository schema. This shouldn't use any other migrate modules, so | |
4 | that it can work in any version. |
|
4 | that it can work in any version. | |
5 | """ |
|
5 | """ | |
6 |
|
6 | |||
7 | import os |
|
7 | import os | |
8 | import sys |
|
8 | import sys | |
9 | import logging |
|
9 | import logging | |
10 |
|
10 | |||
11 | log = logging.getLogger(__name__) |
|
11 | log = logging.getLogger(__name__) | |
12 |
|
12 | |||
13 |
|
13 | |||
14 | def usage(): |
|
14 | def usage(): | |
15 | """Gives usage information.""" |
|
15 | """Gives usage information.""" | |
16 |
print |
|
16 | print("""Usage: %(prog)s repository-to-migrate | |
17 |
|
17 | |||
18 | Upgrade your repository to the new flat format. |
|
18 | Upgrade your repository to the new flat format. | |
19 |
|
19 | |||
20 | NOTE: You should probably make a backup before running this. |
|
20 | NOTE: You should probably make a backup before running this. | |
21 | """ % {'prog': sys.argv[0]} |
|
21 | """ % {'prog': sys.argv[0]}) | |
22 |
|
22 | |||
23 | sys.exit(1) |
|
23 | sys.exit(1) | |
24 |
|
24 | |||
25 |
|
25 | |||
26 | def delete_file(filepath): |
|
26 | def delete_file(filepath): | |
27 | """Deletes a file and prints a message.""" |
|
27 | """Deletes a file and prints a message.""" | |
28 | log.info('Deleting file: %s' % filepath) |
|
28 | log.info('Deleting file: %s' % filepath) | |
29 | os.remove(filepath) |
|
29 | os.remove(filepath) | |
30 |
|
30 | |||
31 |
|
31 | |||
32 | def move_file(src, tgt): |
|
32 | def move_file(src, tgt): | |
33 | """Moves a file and prints a message.""" |
|
33 | """Moves a file and prints a message.""" | |
34 | log.info('Moving file %s to %s' % (src, tgt)) |
|
34 | log.info('Moving file %s to %s' % (src, tgt)) | |
35 | if os.path.exists(tgt): |
|
35 | if os.path.exists(tgt): | |
36 | raise Exception( |
|
36 | raise Exception( | |
37 | 'Cannot move file %s because target %s already exists' % \ |
|
37 | 'Cannot move file %s because target %s already exists' % \ | |
38 | (src, tgt)) |
|
38 | (src, tgt)) | |
39 | os.rename(src, tgt) |
|
39 | os.rename(src, tgt) | |
40 |
|
40 | |||
41 |
|
41 | |||
42 | def delete_directory(dirpath): |
|
42 | def delete_directory(dirpath): | |
43 | """Delete a directory and print a message.""" |
|
43 | """Delete a directory and print a message.""" | |
44 | log.info('Deleting directory: %s' % dirpath) |
|
44 | log.info('Deleting directory: %s' % dirpath) | |
45 | os.rmdir(dirpath) |
|
45 | os.rmdir(dirpath) | |
46 |
|
46 | |||
47 |
|
47 | |||
48 | def migrate_repository(repos): |
|
48 | def migrate_repository(repos): | |
49 | """Does the actual migration to the new repository format.""" |
|
49 | """Does the actual migration to the new repository format.""" | |
50 | log.info('Migrating repository at: %s to new format' % repos) |
|
50 | log.info('Migrating repository at: %s to new format' % repos) | |
51 | versions = '%s/versions' % repos |
|
51 | versions = '%s/versions' % repos | |
52 | dirs = os.listdir(versions) |
|
52 | dirs = os.listdir(versions) | |
53 | # Only use int's in list. |
|
53 | # Only use int's in list. | |
54 | numdirs = [int(dirname) for dirname in dirs if dirname.isdigit()] |
|
54 | numdirs = [int(dirname) for dirname in dirs if dirname.isdigit()] | |
55 | numdirs.sort() # Sort list. |
|
55 | numdirs.sort() # Sort list. | |
56 | for dirname in numdirs: |
|
56 | for dirname in numdirs: | |
57 | origdir = '%s/%s' % (versions, dirname) |
|
57 | origdir = '%s/%s' % (versions, dirname) | |
58 | log.info('Working on directory: %s' % origdir) |
|
58 | log.info('Working on directory: %s' % origdir) | |
59 | files = os.listdir(origdir) |
|
59 | files = os.listdir(origdir) | |
60 | files.sort() |
|
60 | files.sort() | |
61 | for filename in files: |
|
61 | for filename in files: | |
62 | # Delete compiled Python files. |
|
62 | # Delete compiled Python files. | |
63 | if filename.endswith('.pyc') or filename.endswith('.pyo'): |
|
63 | if filename.endswith('.pyc') or filename.endswith('.pyo'): | |
64 | delete_file('%s/%s' % (origdir, filename)) |
|
64 | delete_file('%s/%s' % (origdir, filename)) | |
65 |
|
65 | |||
66 | # Delete empty __init__.py files. |
|
66 | # Delete empty __init__.py files. | |
67 | origfile = '%s/__init__.py' % origdir |
|
67 | origfile = '%s/__init__.py' % origdir | |
68 | if os.path.exists(origfile) and len(open(origfile).read()) == 0: |
|
68 | if os.path.exists(origfile) and len(open(origfile).read()) == 0: | |
69 | delete_file(origfile) |
|
69 | delete_file(origfile) | |
70 |
|
70 | |||
71 | # Move sql upgrade scripts. |
|
71 | # Move sql upgrade scripts. | |
72 | if filename.endswith('.sql'): |
|
72 | if filename.endswith('.sql'): | |
73 | version, dbms, operation = filename.split('.', 3)[0:3] |
|
73 | version, dbms, operation = filename.split('.', 3)[0:3] | |
74 | origfile = '%s/%s' % (origdir, filename) |
|
74 | origfile = '%s/%s' % (origdir, filename) | |
75 | # For instance: 2.postgres.upgrade.sql -> |
|
75 | # For instance: 2.postgres.upgrade.sql -> | |
76 | # 002_postgres_upgrade.sql |
|
76 | # 002_postgres_upgrade.sql | |
77 | tgtfile = '%s/%03d_%s_%s.sql' % ( |
|
77 | tgtfile = '%s/%03d_%s_%s.sql' % ( | |
78 | versions, int(version), dbms, operation) |
|
78 | versions, int(version), dbms, operation) | |
79 | move_file(origfile, tgtfile) |
|
79 | move_file(origfile, tgtfile) | |
80 |
|
80 | |||
81 | # Move Python upgrade script. |
|
81 | # Move Python upgrade script. | |
82 | pyfile = '%s.py' % dirname |
|
82 | pyfile = '%s.py' % dirname | |
83 | pyfilepath = '%s/%s' % (origdir, pyfile) |
|
83 | pyfilepath = '%s/%s' % (origdir, pyfile) | |
84 | if os.path.exists(pyfilepath): |
|
84 | if os.path.exists(pyfilepath): | |
85 | tgtfile = '%s/%03d.py' % (versions, int(dirname)) |
|
85 | tgtfile = '%s/%03d.py' % (versions, int(dirname)) | |
86 | move_file(pyfilepath, tgtfile) |
|
86 | move_file(pyfilepath, tgtfile) | |
87 |
|
87 | |||
88 | # Try to remove directory. Will fail if it's not empty. |
|
88 | # Try to remove directory. Will fail if it's not empty. | |
89 | delete_directory(origdir) |
|
89 | delete_directory(origdir) | |
90 |
|
90 | |||
91 |
|
91 | |||
92 | def main(): |
|
92 | def main(): | |
93 | """Main function to be called when using this script.""" |
|
93 | """Main function to be called when using this script.""" | |
94 | if len(sys.argv) != 2: |
|
94 | if len(sys.argv) != 2: | |
95 | usage() |
|
95 | usage() | |
96 | migrate_repository(sys.argv[1]) |
|
96 | migrate_repository(sys.argv[1]) | |
97 |
|
97 | |||
98 |
|
98 | |||
99 | if __name__ == '__main__': |
|
99 | if __name__ == '__main__': | |
100 | main() |
|
100 | main() |
@@ -1,79 +1,79 b'' | |||||
1 | import logging |
|
1 | import logging | |
2 | import datetime |
|
2 | import datetime | |
3 |
|
3 | |||
4 | from sqlalchemy import * |
|
4 | from sqlalchemy import * | |
5 | from sqlalchemy.exc import DatabaseError |
|
5 | from sqlalchemy.exc import DatabaseError | |
6 | from sqlalchemy.orm import relation, backref, class_mapper |
|
6 | from sqlalchemy.orm import relation, backref, class_mapper | |
7 | from sqlalchemy.orm.session import Session |
|
7 | from sqlalchemy.orm.session import Session | |
8 |
|
8 | |||
9 | from rhodecode.lib.dbmigrate.migrate import * |
|
9 | from rhodecode.lib.dbmigrate.migrate import * | |
10 | from rhodecode.lib.dbmigrate.migrate.changeset import * |
|
10 | from rhodecode.lib.dbmigrate.migrate.changeset import * | |
11 |
|
11 | |||
12 | from rhodecode.model.meta import Base |
|
12 | from rhodecode.model.meta import Base | |
13 |
|
13 | |||
14 | log = logging.getLogger(__name__) |
|
14 | log = logging.getLogger(__name__) | |
15 |
|
15 | |||
16 |
|
16 | |||
17 | def upgrade(migrate_engine): |
|
17 | def upgrade(migrate_engine): | |
18 | """ Upgrade operations go here. |
|
18 | """ Upgrade operations go here. | |
19 | Don't create your own engine; bind migrate_engine to your metadata |
|
19 | Don't create your own engine; bind migrate_engine to your metadata | |
20 | """ |
|
20 | """ | |
21 |
|
21 | |||
22 | #========================================================================== |
|
22 | #========================================================================== | |
23 | # Change unique constraints of table `repo_to_perm` |
|
23 | # Change unique constraints of table `repo_to_perm` | |
24 | #========================================================================== |
|
24 | #========================================================================== | |
25 | from rhodecode.lib.dbmigrate.schema.db_1_3_0 import UserRepoToPerm |
|
25 | from rhodecode.lib.dbmigrate.schema.db_1_3_0 import UserRepoToPerm | |
26 | tbl = UserRepoToPerm().__table__ |
|
26 | tbl = UserRepoToPerm().__table__ | |
27 | new_cons = UniqueConstraint('user_id', 'repository_id', 'permission_id', table=tbl) |
|
27 | new_cons = UniqueConstraint('user_id', 'repository_id', 'permission_id', table=tbl) | |
28 | new_cons.create() |
|
28 | new_cons.create() | |
29 | old_cons = None |
|
29 | old_cons = None | |
30 | if migrate_engine.name in ['mysql']: |
|
30 | if migrate_engine.name in ['mysql']: | |
31 | old_cons = UniqueConstraint('user_id', 'repository_id', table=tbl, name="user_id") |
|
31 | old_cons = UniqueConstraint('user_id', 'repository_id', table=tbl, name="user_id") | |
32 | elif migrate_engine.name in ['postgresql']: |
|
32 | elif migrate_engine.name in ['postgresql']: | |
33 | old_cons = UniqueConstraint('user_id', 'repository_id', table=tbl) |
|
33 | old_cons = UniqueConstraint('user_id', 'repository_id', table=tbl) | |
34 | else: |
|
34 | else: | |
35 | # sqlite doesn't support dropping constraints... |
|
35 | # sqlite doesn't support dropping constraints... | |
36 |
print |
|
36 | print("""Please manually drop UniqueConstraint('user_id', 'repository_id')""") | |
37 |
|
37 | |||
38 | if old_cons: |
|
38 | if old_cons: | |
39 | try: |
|
39 | try: | |
40 | old_cons.drop() |
|
40 | old_cons.drop() | |
41 | except Exception as e: |
|
41 | except Exception as e: | |
42 | # we don't care if this fails really... better to pass migration than |
|
42 | # we don't care if this fails really... better to pass migration than | |
43 | # leave this in intermidiate state |
|
43 | # leave this in intermidiate state | |
44 |
print |
|
44 | print('Failed to remove Unique for user_id, repository_id reason %s' % e) | |
45 |
|
45 | |||
46 |
|
46 | |||
47 | #========================================================================== |
|
47 | #========================================================================== | |
48 | # fix uniques of table `user_repo_group_to_perm` |
|
48 | # fix uniques of table `user_repo_group_to_perm` | |
49 | #========================================================================== |
|
49 | #========================================================================== | |
50 | from rhodecode.lib.dbmigrate.schema.db_1_3_0 import UserRepoGroupToPerm |
|
50 | from rhodecode.lib.dbmigrate.schema.db_1_3_0 import UserRepoGroupToPerm | |
51 | tbl = UserRepoGroupToPerm().__table__ |
|
51 | tbl = UserRepoGroupToPerm().__table__ | |
52 | new_cons = UniqueConstraint('group_id', 'permission_id', 'user_id', table=tbl) |
|
52 | new_cons = UniqueConstraint('group_id', 'permission_id', 'user_id', table=tbl) | |
53 | new_cons.create() |
|
53 | new_cons.create() | |
54 | old_cons = None |
|
54 | old_cons = None | |
55 |
|
55 | |||
56 | # fix uniqueConstraints |
|
56 | # fix uniqueConstraints | |
57 | if migrate_engine.name in ['mysql']: |
|
57 | if migrate_engine.name in ['mysql']: | |
58 | #mysql is givinig troubles here... |
|
58 | #mysql is givinig troubles here... | |
59 | old_cons = UniqueConstraint('group_id', 'permission_id', table=tbl, name="group_id") |
|
59 | old_cons = UniqueConstraint('group_id', 'permission_id', table=tbl, name="group_id") | |
60 | elif migrate_engine.name in ['postgresql']: |
|
60 | elif migrate_engine.name in ['postgresql']: | |
61 | old_cons = UniqueConstraint('group_id', 'permission_id', table=tbl, name='group_to_perm_group_id_permission_id_key') |
|
61 | old_cons = UniqueConstraint('group_id', 'permission_id', table=tbl, name='group_to_perm_group_id_permission_id_key') | |
62 | else: |
|
62 | else: | |
63 | # sqlite doesn't support dropping constraints... |
|
63 | # sqlite doesn't support dropping constraints... | |
64 |
print |
|
64 | print("""Please manually drop UniqueConstraint('group_id', 'permission_id')""") | |
65 |
|
65 | |||
66 | if old_cons: |
|
66 | if old_cons: | |
67 | try: |
|
67 | try: | |
68 | old_cons.drop() |
|
68 | old_cons.drop() | |
69 | except Exception as e: |
|
69 | except Exception as e: | |
70 | # we don't care if this fails really... better to pass migration than |
|
70 | # we don't care if this fails really... better to pass migration than | |
71 | # leave this in intermidiate state |
|
71 | # leave this in intermidiate state | |
72 |
print |
|
72 | print('Failed to remove Unique for user_id, repository_id reason %s' % e) | |
73 |
|
73 | |||
74 | return |
|
74 | return | |
75 |
|
75 | |||
76 |
|
76 | |||
77 | def downgrade(migrate_engine): |
|
77 | def downgrade(migrate_engine): | |
78 | meta = MetaData() |
|
78 | meta = MetaData() | |
79 | meta.bind = migrate_engine |
|
79 | meta.bind = migrate_engine |
@@ -1,135 +1,135 b'' | |||||
1 | import logging |
|
1 | import logging | |
2 | import datetime |
|
2 | import datetime | |
3 |
|
3 | |||
4 | from sqlalchemy import * |
|
4 | from sqlalchemy import * | |
5 | from sqlalchemy.exc import DatabaseError |
|
5 | from sqlalchemy.exc import DatabaseError | |
6 | from sqlalchemy.orm import relation, backref, class_mapper, joinedload |
|
6 | from sqlalchemy.orm import relation, backref, class_mapper, joinedload | |
7 | from sqlalchemy.orm.session import Session |
|
7 | from sqlalchemy.orm.session import Session | |
8 | from sqlalchemy.ext.declarative import declarative_base |
|
8 | from sqlalchemy.ext.declarative import declarative_base | |
9 |
|
9 | |||
10 | from rhodecode.lib.dbmigrate.migrate import * |
|
10 | from rhodecode.lib.dbmigrate.migrate import * | |
11 | from rhodecode.lib.dbmigrate.migrate.changeset import * |
|
11 | from rhodecode.lib.dbmigrate.migrate.changeset import * | |
12 |
|
12 | |||
13 | from rhodecode.model.meta import Base |
|
13 | from rhodecode.model.meta import Base | |
14 | from rhodecode.model import meta |
|
14 | from rhodecode.model import meta | |
15 | from rhodecode.lib.dbmigrate.versions import _reset_base |
|
15 | from rhodecode.lib.dbmigrate.versions import _reset_base | |
16 |
|
16 | |||
17 | log = logging.getLogger(__name__) |
|
17 | log = logging.getLogger(__name__) | |
18 |
|
18 | |||
19 |
|
19 | |||
20 | def upgrade(migrate_engine): |
|
20 | def upgrade(migrate_engine): | |
21 | """ |
|
21 | """ | |
22 | Upgrade operations go here. |
|
22 | Upgrade operations go here. | |
23 | Don't create your own engine; bind migrate_engine to your metadata |
|
23 | Don't create your own engine; bind migrate_engine to your metadata | |
24 | """ |
|
24 | """ | |
25 | _reset_base(migrate_engine) |
|
25 | _reset_base(migrate_engine) | |
26 | from rhodecode.lib.dbmigrate.schema import db_1_5_0 |
|
26 | from rhodecode.lib.dbmigrate.schema import db_1_5_0 | |
27 | #========================================================================== |
|
27 | #========================================================================== | |
28 | # USER LOGS |
|
28 | # USER LOGS | |
29 | #========================================================================== |
|
29 | #========================================================================== | |
30 |
|
30 | |||
31 | tbl = db_1_5_0.UserLog.__table__ |
|
31 | tbl = db_1_5_0.UserLog.__table__ | |
32 | username = Column("username", String(255, convert_unicode=False), |
|
32 | username = Column("username", String(255, convert_unicode=False), | |
33 | nullable=True, unique=None, default=None) |
|
33 | nullable=True, unique=None, default=None) | |
34 | # create username column |
|
34 | # create username column | |
35 | username.create(table=tbl) |
|
35 | username.create(table=tbl) | |
36 |
|
36 | |||
37 | _Session = meta.Session() |
|
37 | _Session = meta.Session() | |
38 | ## after adding that column fix all usernames |
|
38 | ## after adding that column fix all usernames | |
39 | users_log = _Session.query(db_1_5_0.UserLog)\ |
|
39 | users_log = _Session.query(db_1_5_0.UserLog)\ | |
40 | .options(joinedload(db_1_5_0.UserLog.user))\ |
|
40 | .options(joinedload(db_1_5_0.UserLog.user))\ | |
41 | .options(joinedload(db_1_5_0.UserLog.repository)).all() |
|
41 | .options(joinedload(db_1_5_0.UserLog.repository)).all() | |
42 |
|
42 | |||
43 | for entry in users_log: |
|
43 | for entry in users_log: | |
44 | entry.username = entry.user.username |
|
44 | entry.username = entry.user.username | |
45 | _Session.add(entry) |
|
45 | _Session.add(entry) | |
46 | _Session.commit() |
|
46 | _Session.commit() | |
47 |
|
47 | |||
48 | #alter username to not null |
|
48 | #alter username to not null | |
49 | tbl_name = db_1_5_0.UserLog.__tablename__ |
|
49 | tbl_name = db_1_5_0.UserLog.__tablename__ | |
50 | tbl = Table(tbl_name, |
|
50 | tbl = Table(tbl_name, | |
51 | MetaData(bind=migrate_engine), autoload=True, |
|
51 | MetaData(bind=migrate_engine), autoload=True, | |
52 | autoload_with=migrate_engine) |
|
52 | autoload_with=migrate_engine) | |
53 | col = tbl.columns.username |
|
53 | col = tbl.columns.username | |
54 |
|
54 | |||
55 | # remove nullability from revision field |
|
55 | # remove nullability from revision field | |
56 | col.alter(nullable=False) |
|
56 | col.alter(nullable=False) | |
57 |
|
57 | |||
58 | # issue fixups |
|
58 | # issue fixups | |
59 | fixups(db_1_5_0, meta.Session) |
|
59 | fixups(db_1_5_0, meta.Session) | |
60 |
|
60 | |||
61 |
|
61 | |||
62 | def downgrade(migrate_engine): |
|
62 | def downgrade(migrate_engine): | |
63 | meta = MetaData() |
|
63 | meta = MetaData() | |
64 | meta.bind = migrate_engine |
|
64 | meta.bind = migrate_engine | |
65 |
|
65 | |||
66 |
|
66 | |||
67 | def get_by_key(cls, key): |
|
67 | def get_by_key(cls, key): | |
68 | return cls.query().filter(cls.permission_name == key).scalar() |
|
68 | return cls.query().filter(cls.permission_name == key).scalar() | |
69 |
|
69 | |||
70 |
|
70 | |||
71 | def get_by_name(cls, key): |
|
71 | def get_by_name(cls, key): | |
72 | return cls.query().filter(cls.app_settings_name == key).scalar() |
|
72 | return cls.query().filter(cls.app_settings_name == key).scalar() | |
73 |
|
73 | |||
74 |
|
74 | |||
75 | def fixups(models, _SESSION): |
|
75 | def fixups(models, _SESSION): | |
76 | # ** create default permissions ** # |
|
76 | # ** create default permissions ** # | |
77 | #===================================== |
|
77 | #===================================== | |
78 | for p in models.Permission.PERMS: |
|
78 | for p in models.Permission.PERMS: | |
79 | if not get_by_key(models.Permission, p[0]): |
|
79 | if not get_by_key(models.Permission, p[0]): | |
80 | new_perm = models.Permission() |
|
80 | new_perm = models.Permission() | |
81 | new_perm.permission_name = p[0] |
|
81 | new_perm.permission_name = p[0] | |
82 | new_perm.permission_longname = p[0] #translation err with p[1] |
|
82 | new_perm.permission_longname = p[0] #translation err with p[1] | |
83 |
print |
|
83 | print('Creating new permission %s' % p[0]) | |
84 | _SESSION().add(new_perm) |
|
84 | _SESSION().add(new_perm) | |
85 |
|
85 | |||
86 | _SESSION().commit() |
|
86 | _SESSION().commit() | |
87 |
|
87 | |||
88 | # ** populate default permissions ** # |
|
88 | # ** populate default permissions ** # | |
89 | #===================================== |
|
89 | #===================================== | |
90 |
|
90 | |||
91 | user = models.User.query().filter(models.User.username == 'default').scalar() |
|
91 | user = models.User.query().filter(models.User.username == 'default').scalar() | |
92 |
|
92 | |||
93 | def _make_perm(perm): |
|
93 | def _make_perm(perm): | |
94 | new_perm = models.UserToPerm() |
|
94 | new_perm = models.UserToPerm() | |
95 | new_perm.user = user |
|
95 | new_perm.user = user | |
96 | new_perm.permission = get_by_key(models.Permission, perm) |
|
96 | new_perm.permission = get_by_key(models.Permission, perm) | |
97 | return new_perm |
|
97 | return new_perm | |
98 |
|
98 | |||
99 | def _get_group(perm_name): |
|
99 | def _get_group(perm_name): | |
100 | return '.'.join(perm_name.split('.')[:1]) |
|
100 | return '.'.join(perm_name.split('.')[:1]) | |
101 |
|
101 | |||
102 | perms = models.UserToPerm.query().filter(models.UserToPerm.user == user).all() |
|
102 | perms = models.UserToPerm.query().filter(models.UserToPerm.user == user).all() | |
103 | defined_perms_groups = map( |
|
103 | defined_perms_groups = map( | |
104 | _get_group, (x.permission.permission_name for x in perms)) |
|
104 | _get_group, (x.permission.permission_name for x in perms)) | |
105 | log.debug('GOT ALREADY DEFINED:%s' % perms) |
|
105 | log.debug('GOT ALREADY DEFINED:%s' % perms) | |
106 | DEFAULT_PERMS = models.Permission.DEFAULT_USER_PERMISSIONS |
|
106 | DEFAULT_PERMS = models.Permission.DEFAULT_USER_PERMISSIONS | |
107 |
|
107 | |||
108 | # for every default permission that needs to be created, we check if |
|
108 | # for every default permission that needs to be created, we check if | |
109 | # it's group is already defined, if it's not we create default perm |
|
109 | # it's group is already defined, if it's not we create default perm | |
110 | for perm_name in DEFAULT_PERMS: |
|
110 | for perm_name in DEFAULT_PERMS: | |
111 | gr = _get_group(perm_name) |
|
111 | gr = _get_group(perm_name) | |
112 | if gr not in defined_perms_groups: |
|
112 | if gr not in defined_perms_groups: | |
113 | log.debug('GR:%s not found, creating permission %s' |
|
113 | log.debug('GR:%s not found, creating permission %s' | |
114 | % (gr, perm_name)) |
|
114 | % (gr, perm_name)) | |
115 | new_perm = _make_perm(perm_name) |
|
115 | new_perm = _make_perm(perm_name) | |
116 | _SESSION().add(new_perm) |
|
116 | _SESSION().add(new_perm) | |
117 | _SESSION().commit() |
|
117 | _SESSION().commit() | |
118 |
|
118 | |||
119 | # ** create default options ** # |
|
119 | # ** create default options ** # | |
120 | #=============================== |
|
120 | #=============================== | |
121 | skip_existing = True |
|
121 | skip_existing = True | |
122 | for k, v in [ |
|
122 | for k, v in [ | |
123 | ('default_repo_enable_locking', False), |
|
123 | ('default_repo_enable_locking', False), | |
124 | ('default_repo_enable_downloads', False), |
|
124 | ('default_repo_enable_downloads', False), | |
125 | ('default_repo_enable_statistics', False), |
|
125 | ('default_repo_enable_statistics', False), | |
126 | ('default_repo_private', False), |
|
126 | ('default_repo_private', False), | |
127 | ('default_repo_type', 'hg')]: |
|
127 | ('default_repo_type', 'hg')]: | |
128 |
|
128 | |||
129 | if skip_existing and get_by_name(models.RhodeCodeSetting, k) is not None: |
|
129 | if skip_existing and get_by_name(models.RhodeCodeSetting, k) is not None: | |
130 | log.debug('Skipping option %s' % k) |
|
130 | log.debug('Skipping option %s' % k) | |
131 | continue |
|
131 | continue | |
132 | setting = models.RhodeCodeSetting(k, v) |
|
132 | setting = models.RhodeCodeSetting(k, v) | |
133 | _SESSION().add(setting) |
|
133 | _SESSION().add(setting) | |
134 |
|
134 | |||
135 | _SESSION().commit() |
|
135 | _SESSION().commit() |
@@ -1,49 +1,49 b'' | |||||
1 | import logging |
|
1 | import logging | |
2 | import datetime |
|
2 | import datetime | |
3 |
|
3 | |||
4 | from sqlalchemy import * |
|
4 | from sqlalchemy import * | |
5 | from sqlalchemy.exc import DatabaseError |
|
5 | from sqlalchemy.exc import DatabaseError | |
6 | from sqlalchemy.orm import relation, backref, class_mapper, joinedload |
|
6 | from sqlalchemy.orm import relation, backref, class_mapper, joinedload | |
7 | from sqlalchemy.orm.session import Session |
|
7 | from sqlalchemy.orm.session import Session | |
8 | from sqlalchemy.ext.declarative import declarative_base |
|
8 | from sqlalchemy.ext.declarative import declarative_base | |
9 |
|
9 | |||
10 | from rhodecode.lib.dbmigrate.migrate import * |
|
10 | from rhodecode.lib.dbmigrate.migrate import * | |
11 | from rhodecode.lib.dbmigrate.migrate.changeset import * |
|
11 | from rhodecode.lib.dbmigrate.migrate.changeset import * | |
12 |
|
12 | |||
13 | from rhodecode.model.meta import Base |
|
13 | from rhodecode.model.meta import Base | |
14 | from rhodecode.model import meta |
|
14 | from rhodecode.model import meta | |
15 | from rhodecode.lib.dbmigrate.versions import _reset_base, notify |
|
15 | from rhodecode.lib.dbmigrate.versions import _reset_base, notify | |
16 |
|
16 | |||
17 | log = logging.getLogger(__name__) |
|
17 | log = logging.getLogger(__name__) | |
18 |
|
18 | |||
19 |
|
19 | |||
20 | def upgrade(migrate_engine): |
|
20 | def upgrade(migrate_engine): | |
21 | """ |
|
21 | """ | |
22 | Upgrade operations go here. |
|
22 | Upgrade operations go here. | |
23 | Don't create your own engine; bind migrate_engine to your metadata |
|
23 | Don't create your own engine; bind migrate_engine to your metadata | |
24 | """ |
|
24 | """ | |
25 | _reset_base(migrate_engine) |
|
25 | _reset_base(migrate_engine) | |
26 | from rhodecode.lib.dbmigrate.schema import db_1_6_0 |
|
26 | from rhodecode.lib.dbmigrate.schema import db_1_6_0 | |
27 |
|
27 | |||
28 | #========================================================================== |
|
28 | #========================================================================== | |
29 | # USER LOGS |
|
29 | # USER LOGS | |
30 | #========================================================================== |
|
30 | #========================================================================== | |
31 | tbl = db_1_6_0.RepositoryField.__table__ |
|
31 | tbl = db_1_6_0.RepositoryField.__table__ | |
32 | tbl.create() |
|
32 | tbl.create() | |
33 |
|
33 | |||
34 | # issue fixups |
|
34 | # issue fixups | |
35 | fixups(db_1_6_0, meta.Session) |
|
35 | fixups(db_1_6_0, meta.Session) | |
36 |
|
36 | |||
37 |
|
37 | |||
38 | def downgrade(migrate_engine): |
|
38 | def downgrade(migrate_engine): | |
39 | meta = MetaData() |
|
39 | meta = MetaData() | |
40 | meta.bind = migrate_engine |
|
40 | meta.bind = migrate_engine | |
41 |
|
41 | |||
42 |
|
42 | |||
43 | def fixups(models, _SESSION): |
|
43 | def fixups(models, _SESSION): | |
44 | notify('Upgrading repositories Caches') |
|
44 | notify('Upgrading repositories Caches') | |
45 | repositories = models.Repository.getAll() |
|
45 | repositories = models.Repository.getAll() | |
46 | for repo in repositories: |
|
46 | for repo in repositories: | |
47 |
print |
|
47 | print(repo) | |
48 | repo.update_commit_cache() |
|
48 | repo.update_commit_cache() | |
49 | _SESSION().commit() |
|
49 | _SESSION().commit() |
@@ -1,85 +1,85 b'' | |||||
1 | import os |
|
1 | import os | |
2 | import logging |
|
2 | import logging | |
3 | import datetime |
|
3 | import datetime | |
4 |
|
4 | |||
5 | from sqlalchemy import * |
|
5 | from sqlalchemy import * | |
6 | from sqlalchemy.exc import DatabaseError |
|
6 | from sqlalchemy.exc import DatabaseError | |
7 | from sqlalchemy.orm import relation, backref, class_mapper, joinedload |
|
7 | from sqlalchemy.orm import relation, backref, class_mapper, joinedload | |
8 | from sqlalchemy.orm.session import Session |
|
8 | from sqlalchemy.orm.session import Session | |
9 | from sqlalchemy.ext.declarative import declarative_base |
|
9 | from sqlalchemy.ext.declarative import declarative_base | |
10 |
|
10 | |||
11 | from rhodecode.lib.dbmigrate.migrate import * |
|
11 | from rhodecode.lib.dbmigrate.migrate import * | |
12 | from rhodecode.lib.dbmigrate.migrate.changeset import * |
|
12 | from rhodecode.lib.dbmigrate.migrate.changeset import * | |
13 | from rhodecode.lib.utils2 import str2bool |
|
13 | from rhodecode.lib.utils2 import str2bool | |
14 |
|
14 | |||
15 | from rhodecode.model.meta import Base |
|
15 | from rhodecode.model.meta import Base | |
16 | from rhodecode.model import meta |
|
16 | from rhodecode.model import meta | |
17 | from rhodecode.lib.dbmigrate.versions import _reset_base, notify |
|
17 | from rhodecode.lib.dbmigrate.versions import _reset_base, notify | |
18 |
|
18 | |||
19 | log = logging.getLogger(__name__) |
|
19 | log = logging.getLogger(__name__) | |
20 |
|
20 | |||
21 |
|
21 | |||
22 | def get_by_key(cls, key): |
|
22 | def get_by_key(cls, key): | |
23 | return cls.query().filter(cls.ui_key == key).scalar() |
|
23 | return cls.query().filter(cls.ui_key == key).scalar() | |
24 |
|
24 | |||
25 |
|
25 | |||
26 | def get_repos_location(cls): |
|
26 | def get_repos_location(cls): | |
27 | return get_by_key(cls, '/').ui_value |
|
27 | return get_by_key(cls, '/').ui_value | |
28 |
|
28 | |||
29 |
|
29 | |||
30 | def upgrade(migrate_engine): |
|
30 | def upgrade(migrate_engine): | |
31 | """ |
|
31 | """ | |
32 | Upgrade operations go here. |
|
32 | Upgrade operations go here. | |
33 | Don't create your own engine; bind migrate_engine to your metadata |
|
33 | Don't create your own engine; bind migrate_engine to your metadata | |
34 | """ |
|
34 | """ | |
35 | _reset_base(migrate_engine) |
|
35 | _reset_base(migrate_engine) | |
36 | from rhodecode.lib.dbmigrate.schema import db_2_0_1 |
|
36 | from rhodecode.lib.dbmigrate.schema import db_2_0_1 | |
37 | tbl = db_2_0_1.RepoGroup.__table__ |
|
37 | tbl = db_2_0_1.RepoGroup.__table__ | |
38 |
|
38 | |||
39 | created_on = Column('created_on', DateTime(timezone=False), nullable=True, |
|
39 | created_on = Column('created_on', DateTime(timezone=False), nullable=True, | |
40 | default=datetime.datetime.now) |
|
40 | default=datetime.datetime.now) | |
41 | created_on.create(table=tbl) |
|
41 | created_on.create(table=tbl) | |
42 |
|
42 | |||
43 | #fix null values on certain columns when upgrading from older releases |
|
43 | #fix null values on certain columns when upgrading from older releases | |
44 | tbl = db_2_0_1.UserLog.__table__ |
|
44 | tbl = db_2_0_1.UserLog.__table__ | |
45 | col = tbl.columns.user_id |
|
45 | col = tbl.columns.user_id | |
46 | col.alter(nullable=True) |
|
46 | col.alter(nullable=True) | |
47 |
|
47 | |||
48 | tbl = db_2_0_1.UserFollowing.__table__ |
|
48 | tbl = db_2_0_1.UserFollowing.__table__ | |
49 | col = tbl.columns.follows_repository_id |
|
49 | col = tbl.columns.follows_repository_id | |
50 | col.alter(nullable=True) |
|
50 | col.alter(nullable=True) | |
51 |
|
51 | |||
52 | tbl = db_2_0_1.UserFollowing.__table__ |
|
52 | tbl = db_2_0_1.UserFollowing.__table__ | |
53 | col = tbl.columns.follows_user_id |
|
53 | col = tbl.columns.follows_user_id | |
54 | col.alter(nullable=True) |
|
54 | col.alter(nullable=True) | |
55 |
|
55 | |||
56 | # issue fixups |
|
56 | # issue fixups | |
57 | fixups(db_2_0_1, meta.Session) |
|
57 | fixups(db_2_0_1, meta.Session) | |
58 |
|
58 | |||
59 |
|
59 | |||
60 | def downgrade(migrate_engine): |
|
60 | def downgrade(migrate_engine): | |
61 | meta = MetaData() |
|
61 | meta = MetaData() | |
62 | meta.bind = migrate_engine |
|
62 | meta.bind = migrate_engine | |
63 |
|
63 | |||
64 |
|
64 | |||
65 | def fixups(models, _SESSION): |
|
65 | def fixups(models, _SESSION): | |
66 | notify('Fixing default created on for repo groups') |
|
66 | notify('Fixing default created on for repo groups') | |
67 |
|
67 | |||
68 | for gr in models.RepoGroup.get_all(): |
|
68 | for gr in models.RepoGroup.get_all(): | |
69 | gr.created_on = datetime.datetime.now() |
|
69 | gr.created_on = datetime.datetime.now() | |
70 | _SESSION().add(gr) |
|
70 | _SESSION().add(gr) | |
71 | _SESSION().commit() |
|
71 | _SESSION().commit() | |
72 |
|
72 | |||
73 | repo_store_path = get_repos_location(models.RhodeCodeUi) |
|
73 | repo_store_path = get_repos_location(models.RhodeCodeUi) | |
74 | _store = os.path.join(repo_store_path, '.cache', 'largefiles') |
|
74 | _store = os.path.join(repo_store_path, '.cache', 'largefiles') | |
75 | notify('Setting largefiles usercache') |
|
75 | notify('Setting largefiles usercache') | |
76 |
print |
|
76 | print(_store) | |
77 |
|
77 | |||
78 | if not models.RhodeCodeUi.query().filter( |
|
78 | if not models.RhodeCodeUi.query().filter( | |
79 | models.RhodeCodeUi.ui_key == 'usercache').scalar(): |
|
79 | models.RhodeCodeUi.ui_key == 'usercache').scalar(): | |
80 | largefiles = models.RhodeCodeUi() |
|
80 | largefiles = models.RhodeCodeUi() | |
81 | largefiles.ui_section = 'largefiles' |
|
81 | largefiles.ui_section = 'largefiles' | |
82 | largefiles.ui_key = 'usercache' |
|
82 | largefiles.ui_key = 'usercache' | |
83 | largefiles.ui_value = _store |
|
83 | largefiles.ui_value = _store | |
84 | _SESSION().add(largefiles) |
|
84 | _SESSION().add(largefiles) | |
85 | _SESSION().commit() |
|
85 | _SESSION().commit() |
@@ -1,70 +1,70 b'' | |||||
1 | import logging |
|
1 | import logging | |
2 | import datetime |
|
2 | import datetime | |
3 |
|
3 | |||
4 | from sqlalchemy import * |
|
4 | from sqlalchemy import * | |
5 | from sqlalchemy.exc import DatabaseError |
|
5 | from sqlalchemy.exc import DatabaseError | |
6 | from sqlalchemy.orm import relation, backref, class_mapper, joinedload |
|
6 | from sqlalchemy.orm import relation, backref, class_mapper, joinedload | |
7 | from sqlalchemy.orm.session import Session |
|
7 | from sqlalchemy.orm.session import Session | |
8 | from sqlalchemy.ext.declarative import declarative_base |
|
8 | from sqlalchemy.ext.declarative import declarative_base | |
9 |
|
9 | |||
10 | from rhodecode.lib.dbmigrate.migrate import * |
|
10 | from rhodecode.lib.dbmigrate.migrate import * | |
11 | from rhodecode.lib.dbmigrate.migrate.changeset import * |
|
11 | from rhodecode.lib.dbmigrate.migrate.changeset import * | |
12 | from rhodecode.lib.utils2 import str2bool |
|
12 | from rhodecode.lib.utils2 import str2bool | |
13 |
|
13 | |||
14 | from rhodecode.model.meta import Base |
|
14 | from rhodecode.model.meta import Base | |
15 | from rhodecode.model import meta |
|
15 | from rhodecode.model import meta | |
16 | from rhodecode.lib.dbmigrate.versions import _reset_base, notify |
|
16 | from rhodecode.lib.dbmigrate.versions import _reset_base, notify | |
17 |
|
17 | |||
18 | log = logging.getLogger(__name__) |
|
18 | log = logging.getLogger(__name__) | |
19 |
|
19 | |||
20 |
|
20 | |||
21 | def upgrade(migrate_engine): |
|
21 | def upgrade(migrate_engine): | |
22 | """ |
|
22 | """ | |
23 | Upgrade operations go here. |
|
23 | Upgrade operations go here. | |
24 | Don't create your own engine; bind migrate_engine to your metadata |
|
24 | Don't create your own engine; bind migrate_engine to your metadata | |
25 | """ |
|
25 | """ | |
26 | _reset_base(migrate_engine) |
|
26 | _reset_base(migrate_engine) | |
27 | from rhodecode.lib.dbmigrate.schema import db_2_0_2 |
|
27 | from rhodecode.lib.dbmigrate.schema import db_2_0_2 | |
28 |
|
28 | |||
29 | # issue fixups |
|
29 | # issue fixups | |
30 | fixups(db_2_0_2, meta.Session) |
|
30 | fixups(db_2_0_2, meta.Session) | |
31 |
|
31 | |||
32 |
|
32 | |||
33 | def downgrade(migrate_engine): |
|
33 | def downgrade(migrate_engine): | |
34 | meta = MetaData() |
|
34 | meta = MetaData() | |
35 | meta.bind = migrate_engine |
|
35 | meta.bind = migrate_engine | |
36 |
|
36 | |||
37 |
|
37 | |||
38 | def fixups(models, _SESSION): |
|
38 | def fixups(models, _SESSION): | |
39 | notify('fixing new schema for landing_rev') |
|
39 | notify('fixing new schema for landing_rev') | |
40 |
|
40 | |||
41 | for repo in models.Repository.get_all(): |
|
41 | for repo in models.Repository.get_all(): | |
42 |
print |
|
42 | print(u'repo %s old landing rev is: %s' % (repo, repo.landing_rev)) | |
43 | _rev = repo.landing_rev[1] |
|
43 | _rev = repo.landing_rev[1] | |
44 | _rev_type = 'rev' # default |
|
44 | _rev_type = 'rev' # default | |
45 |
|
45 | |||
46 | if _rev in ['default', 'master']: |
|
46 | if _rev in ['default', 'master']: | |
47 | _rev_type = 'branch' |
|
47 | _rev_type = 'branch' | |
48 | elif _rev in ['tip']: |
|
48 | elif _rev in ['tip']: | |
49 | _rev_type = 'rev' |
|
49 | _rev_type = 'rev' | |
50 | else: |
|
50 | else: | |
51 | try: |
|
51 | try: | |
52 | scm = repo.scm_instance |
|
52 | scm = repo.scm_instance | |
53 | if scm: |
|
53 | if scm: | |
54 | known_branches = scm.branches.keys() |
|
54 | known_branches = scm.branches.keys() | |
55 | known_bookmarks = scm.bookmarks.keys() |
|
55 | known_bookmarks = scm.bookmarks.keys() | |
56 | if _rev in known_branches: |
|
56 | if _rev in known_branches: | |
57 | _rev_type = 'branch' |
|
57 | _rev_type = 'branch' | |
58 | elif _rev in known_bookmarks: |
|
58 | elif _rev in known_bookmarks: | |
59 | _rev_type = 'book' |
|
59 | _rev_type = 'book' | |
60 | except Exception as e: |
|
60 | except Exception as e: | |
61 |
print |
|
61 | print(e) | |
62 |
print |
|
62 | print('continue...') | |
63 | #we don't want any error to break the process |
|
63 | #we don't want any error to break the process | |
64 | pass |
|
64 | pass | |
65 |
|
65 | |||
66 | _new_landing_rev = '%s:%s' % (_rev_type, _rev) |
|
66 | _new_landing_rev = '%s:%s' % (_rev_type, _rev) | |
67 |
print |
|
67 | print(u'setting to %s' % _new_landing_rev) | |
68 | repo.landing_rev = _new_landing_rev |
|
68 | repo.landing_rev = _new_landing_rev | |
69 | _SESSION().add(repo) |
|
69 | _SESSION().add(repo) | |
70 | _SESSION().commit() |
|
70 | _SESSION().commit() |
@@ -1,65 +1,65 b'' | |||||
1 | import logging |
|
1 | import logging | |
2 | import datetime |
|
2 | import datetime | |
3 |
|
3 | |||
4 | from sqlalchemy import * |
|
4 | from sqlalchemy import * | |
5 | from sqlalchemy.exc import DatabaseError |
|
5 | from sqlalchemy.exc import DatabaseError | |
6 | from sqlalchemy.orm import relation, backref, class_mapper, joinedload |
|
6 | from sqlalchemy.orm import relation, backref, class_mapper, joinedload | |
7 | from sqlalchemy.orm.session import Session |
|
7 | from sqlalchemy.orm.session import Session | |
8 | from sqlalchemy.ext.declarative import declarative_base |
|
8 | from sqlalchemy.ext.declarative import declarative_base | |
9 |
|
9 | |||
10 | from rhodecode.lib.dbmigrate.migrate import * |
|
10 | from rhodecode.lib.dbmigrate.migrate import * | |
11 | from rhodecode.lib.dbmigrate.migrate.changeset import * |
|
11 | from rhodecode.lib.dbmigrate.migrate.changeset import * | |
12 | from rhodecode.lib.utils2 import str2bool |
|
12 | from rhodecode.lib.utils2 import str2bool | |
13 |
|
13 | |||
14 | from rhodecode.model.meta import Base |
|
14 | from rhodecode.model.meta import Base | |
15 | from rhodecode.model import meta |
|
15 | from rhodecode.model import meta | |
16 | from rhodecode.lib.dbmigrate.versions import _reset_base, notify |
|
16 | from rhodecode.lib.dbmigrate.versions import _reset_base, notify | |
17 |
|
17 | |||
18 | log = logging.getLogger(__name__) |
|
18 | log = logging.getLogger(__name__) | |
19 |
|
19 | |||
20 |
|
20 | |||
21 | def get_by_key(cls, key): |
|
21 | def get_by_key(cls, key): | |
22 | return cls.query().filter(cls.permission_name == key).scalar() |
|
22 | return cls.query().filter(cls.permission_name == key).scalar() | |
23 |
|
23 | |||
24 |
|
24 | |||
25 | def upgrade(migrate_engine): |
|
25 | def upgrade(migrate_engine): | |
26 | """ |
|
26 | """ | |
27 | Upgrade operations go here. |
|
27 | Upgrade operations go here. | |
28 | Don't create your own engine; bind migrate_engine to your metadata |
|
28 | Don't create your own engine; bind migrate_engine to your metadata | |
29 | """ |
|
29 | """ | |
30 | _reset_base(migrate_engine) |
|
30 | _reset_base(migrate_engine) | |
31 | from rhodecode.lib.dbmigrate.schema import db_2_2_0 |
|
31 | from rhodecode.lib.dbmigrate.schema import db_2_2_0 | |
32 |
|
32 | |||
33 | # issue fixups |
|
33 | # issue fixups | |
34 | fixups(db_2_2_0, meta.Session) |
|
34 | fixups(db_2_2_0, meta.Session) | |
35 |
|
35 | |||
36 |
|
36 | |||
37 | def downgrade(migrate_engine): |
|
37 | def downgrade(migrate_engine): | |
38 | meta = MetaData() |
|
38 | meta = MetaData() | |
39 | meta.bind = migrate_engine |
|
39 | meta.bind = migrate_engine | |
40 |
|
40 | |||
41 |
|
41 | |||
42 | def fixups(models, _SESSION): |
|
42 | def fixups(models, _SESSION): | |
43 | # ** create default permissions ** # |
|
43 | # ** create default permissions ** # | |
44 | #===================================== |
|
44 | #===================================== | |
45 | for p in models.Permission.PERMS: |
|
45 | for p in models.Permission.PERMS: | |
46 | if not get_by_key(models.Permission, p[0]): |
|
46 | if not get_by_key(models.Permission, p[0]): | |
47 | new_perm = models.Permission() |
|
47 | new_perm = models.Permission() | |
48 | new_perm.permission_name = p[0] |
|
48 | new_perm.permission_name = p[0] | |
49 | new_perm.permission_longname = p[0] #translation err with p[1] |
|
49 | new_perm.permission_longname = p[0] #translation err with p[1] | |
50 |
print |
|
50 | print('Creating new permission %s' % p[0]) | |
51 | _SESSION().add(new_perm) |
|
51 | _SESSION().add(new_perm) | |
52 |
|
52 | |||
53 | _SESSION().commit() |
|
53 | _SESSION().commit() | |
54 |
|
54 | |||
55 | # ** set default create_on_write to active |
|
55 | # ** set default create_on_write to active | |
56 | user = models.User.query().filter( |
|
56 | user = models.User.query().filter( | |
57 | models.User.username == 'default').scalar() |
|
57 | models.User.username == 'default').scalar() | |
58 |
|
58 | |||
59 | _def = 'hg.create.write_on_repogroup.true' |
|
59 | _def = 'hg.create.write_on_repogroup.true' | |
60 | new = models.UserToPerm() |
|
60 | new = models.UserToPerm() | |
61 | new.user = user |
|
61 | new.user = user | |
62 | new.permission = get_by_key(models.Permission, _def) |
|
62 | new.permission = get_by_key(models.Permission, _def) | |
63 |
print |
|
63 | print('Setting default to %s' % _def) | |
64 | _SESSION().add(new) |
|
64 | _SESSION().add(new) | |
65 | _SESSION().commit() |
|
65 | _SESSION().commit() |
@@ -1,44 +1,44 b'' | |||||
1 | import logging |
|
1 | import logging | |
2 | import datetime |
|
2 | import datetime | |
3 |
|
3 | |||
4 | from sqlalchemy import * |
|
4 | from sqlalchemy import * | |
5 | from sqlalchemy.exc import DatabaseError |
|
5 | from sqlalchemy.exc import DatabaseError | |
6 | from sqlalchemy.orm import relation, backref, class_mapper, joinedload |
|
6 | from sqlalchemy.orm import relation, backref, class_mapper, joinedload | |
7 | from sqlalchemy.orm.session import Session |
|
7 | from sqlalchemy.orm.session import Session | |
8 | from sqlalchemy.ext.declarative import declarative_base |
|
8 | from sqlalchemy.ext.declarative import declarative_base | |
9 |
|
9 | |||
10 | from rhodecode.lib.dbmigrate.migrate import * |
|
10 | from rhodecode.lib.dbmigrate.migrate import * | |
11 | from rhodecode.lib.dbmigrate.migrate.changeset import * |
|
11 | from rhodecode.lib.dbmigrate.migrate.changeset import * | |
12 | from rhodecode.lib.utils2 import str2bool |
|
12 | from rhodecode.lib.utils2 import str2bool | |
13 |
|
13 | |||
14 | from rhodecode.model.meta import Base |
|
14 | from rhodecode.model.meta import Base | |
15 | from rhodecode.model import meta |
|
15 | from rhodecode.model import meta | |
16 | from rhodecode.lib.dbmigrate.versions import _reset_base, notify |
|
16 | from rhodecode.lib.dbmigrate.versions import _reset_base, notify | |
17 |
|
17 | |||
18 | log = logging.getLogger(__name__) |
|
18 | log = logging.getLogger(__name__) | |
19 |
|
19 | |||
20 |
|
20 | |||
21 | def upgrade(migrate_engine): |
|
21 | def upgrade(migrate_engine): | |
22 | """ |
|
22 | """ | |
23 | Upgrade operations go here. |
|
23 | Upgrade operations go here. | |
24 | Don't create your own engine; bind migrate_engine to your metadata |
|
24 | Don't create your own engine; bind migrate_engine to your metadata | |
25 | """ |
|
25 | """ | |
26 | _reset_base(migrate_engine) |
|
26 | _reset_base(migrate_engine) | |
27 | from rhodecode.lib.dbmigrate.schema import db_2_3_0_1 |
|
27 | from rhodecode.lib.dbmigrate.schema import db_2_3_0_1 | |
28 |
|
28 | |||
29 | # issue fixups |
|
29 | # issue fixups | |
30 | fixups(db_2_3_0_1, meta.Session) |
|
30 | fixups(db_2_3_0_1, meta.Session) | |
31 |
|
31 | |||
32 |
|
32 | |||
33 | def downgrade(migrate_engine): |
|
33 | def downgrade(migrate_engine): | |
34 | meta = MetaData() |
|
34 | meta = MetaData() | |
35 | meta.bind = migrate_engine |
|
35 | meta.bind = migrate_engine | |
36 |
|
36 | |||
37 |
|
37 | |||
38 | def fixups(models, _SESSION): |
|
38 | def fixups(models, _SESSION): | |
39 | notify('Setting default renderer to rst') |
|
39 | notify('Setting default renderer to rst') | |
40 | for cs_comment in models.ChangesetComment.get_all(): |
|
40 | for cs_comment in models.ChangesetComment.get_all(): | |
41 |
print |
|
41 | print('comment_id %s renderer rst' % (cs_comment.comment_id)) | |
42 | cs_comment.renderer = 'rst' |
|
42 | cs_comment.renderer = 'rst' | |
43 | _SESSION().add(cs_comment) |
|
43 | _SESSION().add(cs_comment) | |
44 | _SESSION().commit() |
|
44 | _SESSION().commit() |
@@ -1,149 +1,149 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2018 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2018 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import os |
|
21 | import os | |
22 | import errno |
|
22 | import errno | |
23 |
|
23 | |||
24 | from multiprocessing.util import Finalize |
|
24 | from multiprocessing.util import Finalize | |
25 |
|
25 | |||
26 | from rhodecode.lib.compat import kill |
|
26 | from rhodecode.lib.compat import kill | |
27 |
|
27 | |||
28 |
|
28 | |||
29 | class LockHeld(Exception): |
|
29 | class LockHeld(Exception): | |
30 | pass |
|
30 | pass | |
31 |
|
31 | |||
32 |
|
32 | |||
33 | class DaemonLock(object): |
|
33 | class DaemonLock(object): | |
34 | """daemon locking |
|
34 | """daemon locking | |
35 | USAGE: |
|
35 | USAGE: | |
36 | try: |
|
36 | try: | |
37 | l = DaemonLock(file_='/path/tolockfile',desc='test lock') |
|
37 | l = DaemonLock(file_='/path/tolockfile',desc='test lock') | |
38 | main() |
|
38 | main() | |
39 | l.release() |
|
39 | l.release() | |
40 | except LockHeld: |
|
40 | except LockHeld: | |
41 | sys.exit(1) |
|
41 | sys.exit(1) | |
42 | """ |
|
42 | """ | |
43 |
|
43 | |||
44 | def __init__(self, file_=None, callbackfn=None, |
|
44 | def __init__(self, file_=None, callbackfn=None, | |
45 | desc='daemon lock', debug=False): |
|
45 | desc='daemon lock', debug=False): | |
46 |
|
46 | |||
47 | lock_name = os.path.join(os.path.dirname(__file__), 'running.lock') |
|
47 | lock_name = os.path.join(os.path.dirname(__file__), 'running.lock') | |
48 | self.pidfile = file_ if file_ else lock_name |
|
48 | self.pidfile = file_ if file_ else lock_name | |
49 | self.callbackfn = callbackfn |
|
49 | self.callbackfn = callbackfn | |
50 | self.desc = desc |
|
50 | self.desc = desc | |
51 | self.debug = debug |
|
51 | self.debug = debug | |
52 | self.held = False |
|
52 | self.held = False | |
53 | #run the lock automatically ! |
|
53 | #run the lock automatically ! | |
54 | self.lock() |
|
54 | self.lock() | |
55 | self._finalize = Finalize(self, DaemonLock._on_finalize, |
|
55 | self._finalize = Finalize(self, DaemonLock._on_finalize, | |
56 | args=(self, debug), exitpriority=10) |
|
56 | args=(self, debug), exitpriority=10) | |
57 |
|
57 | |||
58 | @staticmethod |
|
58 | @staticmethod | |
59 | def _on_finalize(lock, debug): |
|
59 | def _on_finalize(lock, debug): | |
60 | if lock.held: |
|
60 | if lock.held: | |
61 | if debug: |
|
61 | if debug: | |
62 |
print |
|
62 | print('leck held finilazing and running lock.release()') | |
63 | lock.release() |
|
63 | lock.release() | |
64 |
|
64 | |||
65 | def lock(self): |
|
65 | def lock(self): | |
66 | """ |
|
66 | """ | |
67 | locking function, if lock is present it |
|
67 | locking function, if lock is present it | |
68 | will raise LockHeld exception |
|
68 | will raise LockHeld exception | |
69 | """ |
|
69 | """ | |
70 | lockname = '%s' % (os.getpid()) |
|
70 | lockname = '%s' % (os.getpid()) | |
71 | if self.debug: |
|
71 | if self.debug: | |
72 |
print |
|
72 | print('running lock') | |
73 | self.trylock() |
|
73 | self.trylock() | |
74 | self.makelock(lockname, self.pidfile) |
|
74 | self.makelock(lockname, self.pidfile) | |
75 | return True |
|
75 | return True | |
76 |
|
76 | |||
77 | def trylock(self): |
|
77 | def trylock(self): | |
78 | running_pid = False |
|
78 | running_pid = False | |
79 | if self.debug: |
|
79 | if self.debug: | |
80 |
print |
|
80 | print('checking for already running process') | |
81 | try: |
|
81 | try: | |
82 | with open(self.pidfile, 'r') as f: |
|
82 | with open(self.pidfile, 'r') as f: | |
83 | try: |
|
83 | try: | |
84 | running_pid = int(f.readline()) |
|
84 | running_pid = int(f.readline()) | |
85 | except ValueError: |
|
85 | except ValueError: | |
86 | running_pid = -1 |
|
86 | running_pid = -1 | |
87 |
|
87 | |||
88 | if self.debug: |
|
88 | if self.debug: | |
89 |
print |
|
89 | print('lock file present running_pid: %s, ' | |
90 |
|
|
90 | 'checking for execution' % (running_pid,)) | |
91 | # Now we check the PID from lock file matches to the current |
|
91 | # Now we check the PID from lock file matches to the current | |
92 | # process PID |
|
92 | # process PID | |
93 | if running_pid: |
|
93 | if running_pid: | |
94 | try: |
|
94 | try: | |
95 | kill(running_pid, 0) |
|
95 | kill(running_pid, 0) | |
96 | except OSError as exc: |
|
96 | except OSError as exc: | |
97 | if exc.errno in (errno.ESRCH, errno.EPERM): |
|
97 | if exc.errno in (errno.ESRCH, errno.EPERM): | |
98 |
print |
|
98 | print("Lock File is there but" | |
99 |
|
|
99 | " the program is not running") | |
100 |
print |
|
100 | print("Removing lock file for the: %s" % running_pid) | |
101 | self.release() |
|
101 | self.release() | |
102 | else: |
|
102 | else: | |
103 | raise |
|
103 | raise | |
104 | else: |
|
104 | else: | |
105 |
print |
|
105 | print("You already have an instance of the program running") | |
106 |
print |
|
106 | print("It is running as process %s" % running_pid) | |
107 | raise LockHeld() |
|
107 | raise LockHeld() | |
108 |
|
108 | |||
109 | except IOError as e: |
|
109 | except IOError as e: | |
110 | if e.errno != 2: |
|
110 | if e.errno != 2: | |
111 | raise |
|
111 | raise | |
112 |
|
112 | |||
113 | def release(self): |
|
113 | def release(self): | |
114 | """releases the pid by removing the pidfile |
|
114 | """releases the pid by removing the pidfile | |
115 | """ |
|
115 | """ | |
116 | if self.debug: |
|
116 | if self.debug: | |
117 |
print |
|
117 | print('trying to release the pidlock') | |
118 |
|
118 | |||
119 | if self.callbackfn: |
|
119 | if self.callbackfn: | |
120 | #execute callback function on release |
|
120 | # execute callback function on release | |
121 | if self.debug: |
|
121 | if self.debug: | |
122 |
print |
|
122 | print('executing callback function %s' % self.callbackfn) | |
123 | self.callbackfn() |
|
123 | self.callbackfn() | |
124 | try: |
|
124 | try: | |
125 | if self.debug: |
|
125 | if self.debug: | |
126 |
print |
|
126 | print('removing pidfile %s' % self.pidfile) | |
127 | os.remove(self.pidfile) |
|
127 | os.remove(self.pidfile) | |
128 | self.held = False |
|
128 | self.held = False | |
129 | except OSError as e: |
|
129 | except OSError as e: | |
130 | if self.debug: |
|
130 | if self.debug: | |
131 |
print |
|
131 | print('removing pidfile failed %s' % e) | |
132 | pass |
|
132 | pass | |
133 |
|
133 | |||
134 | def makelock(self, lockname, pidfile): |
|
134 | def makelock(self, lockname, pidfile): | |
135 | """ |
|
135 | """ | |
136 | this function will make an actual lock |
|
136 | this function will make an actual lock | |
137 |
|
137 | |||
138 | :param lockname: acctual pid of file |
|
138 | :param lockname: acctual pid of file | |
139 | :param pidfile: the file to write the pid in |
|
139 | :param pidfile: the file to write the pid in | |
140 | """ |
|
140 | """ | |
141 | if self.debug: |
|
141 | if self.debug: | |
142 |
print |
|
142 | print('creating a file %s and pid: %s' % (pidfile, lockname)) | |
143 |
|
143 | |||
144 | dir_, file_ = os.path.split(pidfile) |
|
144 | dir_, file_ = os.path.split(pidfile) | |
145 | if not os.path.isdir(dir_): |
|
145 | if not os.path.isdir(dir_): | |
146 | os.makedirs(dir_) |
|
146 | os.makedirs(dir_) | |
147 | with open(self.pidfile, 'wb') as f: |
|
147 | with open(self.pidfile, 'wb') as f: | |
148 | f.write(lockname) |
|
148 | f.write(lockname) | |
149 | self.held = True |
|
149 | self.held = True |
@@ -1,1025 +1,1025 b'' | |||||
1 | # (c) 2005 Ian Bicking and contributors; written for Paste |
|
1 | # (c) 2005 Ian Bicking and contributors; written for Paste | |
2 | # (http://pythonpaste.org) Licensed under the MIT license: |
|
2 | # (http://pythonpaste.org) Licensed under the MIT license: | |
3 | # http://www.opensource.org/licenses/mit-license.php |
|
3 | # http://www.opensource.org/licenses/mit-license.php | |
4 | # |
|
4 | # | |
5 | # For discussion of daemonizing: |
|
5 | # For discussion of daemonizing: | |
6 | # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/278731 |
|
6 | # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/278731 | |
7 | # |
|
7 | # | |
8 | # Code taken also from QP: http://www.mems-exchange.org/software/qp/ From |
|
8 | # Code taken also from QP: http://www.mems-exchange.org/software/qp/ From | |
9 | # lib/site.py |
|
9 | # lib/site.py | |
10 |
|
10 | |||
11 | import atexit |
|
11 | import atexit | |
12 | import errno |
|
12 | import errno | |
13 | import fnmatch |
|
13 | import fnmatch | |
14 | import logging |
|
14 | import logging | |
15 | import optparse |
|
15 | import optparse | |
16 | import os |
|
16 | import os | |
17 | import re |
|
17 | import re | |
18 | import subprocess32 |
|
18 | import subprocess32 | |
19 | import sys |
|
19 | import sys | |
20 | import textwrap |
|
20 | import textwrap | |
21 | import threading |
|
21 | import threading | |
22 | import time |
|
22 | import time | |
23 | import traceback |
|
23 | import traceback | |
24 |
|
24 | |||
25 | from logging.config import fileConfig |
|
25 | from logging.config import fileConfig | |
26 | import ConfigParser as configparser |
|
26 | import ConfigParser as configparser | |
27 | from paste.deploy import loadserver |
|
27 | from paste.deploy import loadserver | |
28 | from paste.deploy import loadapp |
|
28 | from paste.deploy import loadapp | |
29 |
|
29 | |||
30 | import rhodecode |
|
30 | import rhodecode | |
31 | from rhodecode.lib.compat import kill |
|
31 | from rhodecode.lib.compat import kill | |
32 |
|
32 | |||
33 |
|
33 | |||
34 | def make_web_build_callback(filename): |
|
34 | def make_web_build_callback(filename): | |
35 | p = subprocess32.Popen('make web-build', shell=True, |
|
35 | p = subprocess32.Popen('make web-build', shell=True, | |
36 | stdout=subprocess32.PIPE, |
|
36 | stdout=subprocess32.PIPE, | |
37 | stderr=subprocess32.PIPE, |
|
37 | stderr=subprocess32.PIPE, | |
38 | cwd=os.path.dirname(os.path.dirname(__file__))) |
|
38 | cwd=os.path.dirname(os.path.dirname(__file__))) | |
39 | stdout, stderr = p.communicate() |
|
39 | stdout, stderr = p.communicate() | |
40 | stdout = ''.join(stdout) |
|
40 | stdout = ''.join(stdout) | |
41 | stderr = ''.join(stderr) |
|
41 | stderr = ''.join(stderr) | |
42 | if stdout: |
|
42 | if stdout: | |
43 |
print |
|
43 | print(stdout) | |
44 | if stderr: |
|
44 | if stderr: | |
45 |
print |
|
45 | print('%s %s %s' % ('-' * 20, 'ERRORS', '-' * 20)) | |
46 |
print |
|
46 | print(stderr) | |
47 |
|
47 | |||
48 |
|
48 | |||
49 | MAXFD = 1024 |
|
49 | MAXFD = 1024 | |
50 | HERE = os.path.dirname(os.path.abspath(__file__)) |
|
50 | HERE = os.path.dirname(os.path.abspath(__file__)) | |
51 | SERVER_RUNNING_FILE = None |
|
51 | SERVER_RUNNING_FILE = None | |
52 |
|
52 | |||
53 |
|
53 | |||
54 | # watch those extra files for changes, server gets restarted if file changes |
|
54 | # watch those extra files for changes, server gets restarted if file changes | |
55 | GLOBAL_EXTRA_FILES = { |
|
55 | GLOBAL_EXTRA_FILES = { | |
56 | 'rhodecode/public/css/*.less': make_web_build_callback, |
|
56 | 'rhodecode/public/css/*.less': make_web_build_callback, | |
57 | 'rhodecode/public/js/src/**/*.js': make_web_build_callback, |
|
57 | 'rhodecode/public/js/src/**/*.js': make_web_build_callback, | |
58 | } |
|
58 | } | |
59 |
|
59 | |||
60 |
|
60 | |||
61 |
|
61 | |||
62 | ## HOOKS - inspired by gunicorn # |
|
62 | ## HOOKS - inspired by gunicorn # | |
63 |
|
63 | |||
64 | def when_ready(server): |
|
64 | def when_ready(server): | |
65 | """ |
|
65 | """ | |
66 | Called just after the server is started. |
|
66 | Called just after the server is started. | |
67 | """ |
|
67 | """ | |
68 |
|
68 | |||
69 | def _remove_server_running_file(): |
|
69 | def _remove_server_running_file(): | |
70 | if os.path.isfile(SERVER_RUNNING_FILE): |
|
70 | if os.path.isfile(SERVER_RUNNING_FILE): | |
71 | os.remove(SERVER_RUNNING_FILE) |
|
71 | os.remove(SERVER_RUNNING_FILE) | |
72 |
|
72 | |||
73 | if SERVER_RUNNING_FILE: |
|
73 | if SERVER_RUNNING_FILE: | |
74 | with open(SERVER_RUNNING_FILE, 'wb') as f: |
|
74 | with open(SERVER_RUNNING_FILE, 'wb') as f: | |
75 | f.write(str(os.getpid())) |
|
75 | f.write(str(os.getpid())) | |
76 | # register cleanup of that file when server exits |
|
76 | # register cleanup of that file when server exits | |
77 | atexit.register(_remove_server_running_file) |
|
77 | atexit.register(_remove_server_running_file) | |
78 |
|
78 | |||
79 |
|
79 | |||
80 | def setup_logging(config_uri, fileConfig=fileConfig, |
|
80 | def setup_logging(config_uri, fileConfig=fileConfig, | |
81 | configparser=configparser): |
|
81 | configparser=configparser): | |
82 | """ |
|
82 | """ | |
83 | Set up logging via the logging module's fileConfig function with the |
|
83 | Set up logging via the logging module's fileConfig function with the | |
84 | filename specified via ``config_uri`` (a string in the form |
|
84 | filename specified via ``config_uri`` (a string in the form | |
85 | ``filename#sectionname``). |
|
85 | ``filename#sectionname``). | |
86 |
|
86 | |||
87 | ConfigParser defaults are specified for the special ``__file__`` |
|
87 | ConfigParser defaults are specified for the special ``__file__`` | |
88 | and ``here`` variables, similar to PasteDeploy config loading. |
|
88 | and ``here`` variables, similar to PasteDeploy config loading. | |
89 | """ |
|
89 | """ | |
90 | path, _ = _getpathsec(config_uri, None) |
|
90 | path, _ = _getpathsec(config_uri, None) | |
91 | parser = configparser.ConfigParser() |
|
91 | parser = configparser.ConfigParser() | |
92 | parser.read([path]) |
|
92 | parser.read([path]) | |
93 | if parser.has_section('loggers'): |
|
93 | if parser.has_section('loggers'): | |
94 | config_file = os.path.abspath(path) |
|
94 | config_file = os.path.abspath(path) | |
95 | return fileConfig( |
|
95 | return fileConfig( | |
96 | config_file, |
|
96 | config_file, | |
97 | {'__file__': config_file, 'here': os.path.dirname(config_file)} |
|
97 | {'__file__': config_file, 'here': os.path.dirname(config_file)} | |
98 | ) |
|
98 | ) | |
99 |
|
99 | |||
100 |
|
100 | |||
101 | def set_rhodecode_is_test(config_uri): |
|
101 | def set_rhodecode_is_test(config_uri): | |
102 | """If is_test is defined in the config file sets rhodecode.is_test.""" |
|
102 | """If is_test is defined in the config file sets rhodecode.is_test.""" | |
103 | path, _ = _getpathsec(config_uri, None) |
|
103 | path, _ = _getpathsec(config_uri, None) | |
104 | parser = configparser.ConfigParser() |
|
104 | parser = configparser.ConfigParser() | |
105 | parser.read(path) |
|
105 | parser.read(path) | |
106 | rhodecode.is_test = ( |
|
106 | rhodecode.is_test = ( | |
107 | parser.has_option('app:main', 'is_test') and |
|
107 | parser.has_option('app:main', 'is_test') and | |
108 | parser.getboolean('app:main', 'is_test')) |
|
108 | parser.getboolean('app:main', 'is_test')) | |
109 |
|
109 | |||
110 |
|
110 | |||
111 | def _getpathsec(config_uri, name): |
|
111 | def _getpathsec(config_uri, name): | |
112 | if '#' in config_uri: |
|
112 | if '#' in config_uri: | |
113 | path, section = config_uri.split('#', 1) |
|
113 | path, section = config_uri.split('#', 1) | |
114 | else: |
|
114 | else: | |
115 | path, section = config_uri, 'main' |
|
115 | path, section = config_uri, 'main' | |
116 | if name: |
|
116 | if name: | |
117 | section = name |
|
117 | section = name | |
118 | return path, section |
|
118 | return path, section | |
119 |
|
119 | |||
120 |
|
120 | |||
121 | def parse_vars(args): |
|
121 | def parse_vars(args): | |
122 | """ |
|
122 | """ | |
123 | Given variables like ``['a=b', 'c=d']`` turns it into ``{'a': |
|
123 | Given variables like ``['a=b', 'c=d']`` turns it into ``{'a': | |
124 | 'b', 'c': 'd'}`` |
|
124 | 'b', 'c': 'd'}`` | |
125 | """ |
|
125 | """ | |
126 | result = {} |
|
126 | result = {} | |
127 | for arg in args: |
|
127 | for arg in args: | |
128 | if '=' not in arg: |
|
128 | if '=' not in arg: | |
129 | raise ValueError( |
|
129 | raise ValueError( | |
130 | 'Variable assignment %r invalid (no "=")' |
|
130 | 'Variable assignment %r invalid (no "=")' | |
131 | % arg) |
|
131 | % arg) | |
132 | name, value = arg.split('=', 1) |
|
132 | name, value = arg.split('=', 1) | |
133 | result[name] = value |
|
133 | result[name] = value | |
134 | return result |
|
134 | return result | |
135 |
|
135 | |||
136 |
|
136 | |||
137 | def _match_pattern(filename): |
|
137 | def _match_pattern(filename): | |
138 | for pattern in GLOBAL_EXTRA_FILES: |
|
138 | for pattern in GLOBAL_EXTRA_FILES: | |
139 | if fnmatch.fnmatch(filename, pattern): |
|
139 | if fnmatch.fnmatch(filename, pattern): | |
140 | return pattern |
|
140 | return pattern | |
141 | return False |
|
141 | return False | |
142 |
|
142 | |||
143 |
|
143 | |||
144 | def generate_extra_file_list(): |
|
144 | def generate_extra_file_list(): | |
145 |
|
145 | |||
146 | extra_list = [] |
|
146 | extra_list = [] | |
147 | for root, dirs, files in os.walk(HERE, topdown=True): |
|
147 | for root, dirs, files in os.walk(HERE, topdown=True): | |
148 | for fname in files: |
|
148 | for fname in files: | |
149 | stripped_src = os.path.join( |
|
149 | stripped_src = os.path.join( | |
150 | 'rhodecode', os.path.relpath(os.path.join(root, fname), HERE)) |
|
150 | 'rhodecode', os.path.relpath(os.path.join(root, fname), HERE)) | |
151 |
|
151 | |||
152 | if _match_pattern(stripped_src): |
|
152 | if _match_pattern(stripped_src): | |
153 | extra_list.append(stripped_src) |
|
153 | extra_list.append(stripped_src) | |
154 |
|
154 | |||
155 | return extra_list |
|
155 | return extra_list | |
156 |
|
156 | |||
157 |
|
157 | |||
158 | def run_callback_for_pattern(filename): |
|
158 | def run_callback_for_pattern(filename): | |
159 | pattern = _match_pattern(filename) |
|
159 | pattern = _match_pattern(filename) | |
160 | if pattern: |
|
160 | if pattern: | |
161 | _file_callback = GLOBAL_EXTRA_FILES.get(pattern) |
|
161 | _file_callback = GLOBAL_EXTRA_FILES.get(pattern) | |
162 | if callable(_file_callback): |
|
162 | if callable(_file_callback): | |
163 | _file_callback(filename) |
|
163 | _file_callback(filename) | |
164 |
|
164 | |||
165 |
|
165 | |||
166 | class DaemonizeException(Exception): |
|
166 | class DaemonizeException(Exception): | |
167 | pass |
|
167 | pass | |
168 |
|
168 | |||
169 |
|
169 | |||
170 | class RcServerCommand(object): |
|
170 | class RcServerCommand(object): | |
171 |
|
171 | |||
172 | usage = '%prog config_uri [start|stop|restart|status] [var=value]' |
|
172 | usage = '%prog config_uri [start|stop|restart|status] [var=value]' | |
173 | description = """\ |
|
173 | description = """\ | |
174 | This command serves a web application that uses a PasteDeploy |
|
174 | This command serves a web application that uses a PasteDeploy | |
175 | configuration file for the server and application. |
|
175 | configuration file for the server and application. | |
176 |
|
176 | |||
177 | If start/stop/restart is given, then --daemon is implied, and it will |
|
177 | If start/stop/restart is given, then --daemon is implied, and it will | |
178 | start (normal operation), stop (--stop-daemon), or do both. |
|
178 | start (normal operation), stop (--stop-daemon), or do both. | |
179 |
|
179 | |||
180 | You can also include variable assignments like 'http_port=8080' |
|
180 | You can also include variable assignments like 'http_port=8080' | |
181 | and then use %(http_port)s in your config files. |
|
181 | and then use %(http_port)s in your config files. | |
182 | """ |
|
182 | """ | |
183 | default_verbosity = 1 |
|
183 | default_verbosity = 1 | |
184 |
|
184 | |||
185 | parser = optparse.OptionParser( |
|
185 | parser = optparse.OptionParser( | |
186 | usage, |
|
186 | usage, | |
187 | description=textwrap.dedent(description) |
|
187 | description=textwrap.dedent(description) | |
188 | ) |
|
188 | ) | |
189 | parser.add_option( |
|
189 | parser.add_option( | |
190 | '-n', '--app-name', |
|
190 | '-n', '--app-name', | |
191 | dest='app_name', |
|
191 | dest='app_name', | |
192 | metavar='NAME', |
|
192 | metavar='NAME', | |
193 | help="Load the named application (default main)") |
|
193 | help="Load the named application (default main)") | |
194 | parser.add_option( |
|
194 | parser.add_option( | |
195 | '-s', '--server', |
|
195 | '-s', '--server', | |
196 | dest='server', |
|
196 | dest='server', | |
197 | metavar='SERVER_TYPE', |
|
197 | metavar='SERVER_TYPE', | |
198 | help="Use the named server.") |
|
198 | help="Use the named server.") | |
199 | parser.add_option( |
|
199 | parser.add_option( | |
200 | '--server-name', |
|
200 | '--server-name', | |
201 | dest='server_name', |
|
201 | dest='server_name', | |
202 | metavar='SECTION_NAME', |
|
202 | metavar='SECTION_NAME', | |
203 | help=("Use the named server as defined in the configuration file " |
|
203 | help=("Use the named server as defined in the configuration file " | |
204 | "(default: main)")) |
|
204 | "(default: main)")) | |
205 | parser.add_option( |
|
205 | parser.add_option( | |
206 | '--with-vcsserver', |
|
206 | '--with-vcsserver', | |
207 | dest='vcs_server', |
|
207 | dest='vcs_server', | |
208 | action='store_true', |
|
208 | action='store_true', | |
209 | help=("Start the vcsserver instance together with the RhodeCode server")) |
|
209 | help=("Start the vcsserver instance together with the RhodeCode server")) | |
210 | if hasattr(os, 'fork'): |
|
210 | if hasattr(os, 'fork'): | |
211 | parser.add_option( |
|
211 | parser.add_option( | |
212 | '--daemon', |
|
212 | '--daemon', | |
213 | dest="daemon", |
|
213 | dest="daemon", | |
214 | action="store_true", |
|
214 | action="store_true", | |
215 | help="Run in daemon (background) mode") |
|
215 | help="Run in daemon (background) mode") | |
216 | parser.add_option( |
|
216 | parser.add_option( | |
217 | '--pid-file', |
|
217 | '--pid-file', | |
218 | dest='pid_file', |
|
218 | dest='pid_file', | |
219 | metavar='FILENAME', |
|
219 | metavar='FILENAME', | |
220 | help=("Save PID to file (default to pyramid.pid if running in " |
|
220 | help=("Save PID to file (default to pyramid.pid if running in " | |
221 | "daemon mode)")) |
|
221 | "daemon mode)")) | |
222 | parser.add_option( |
|
222 | parser.add_option( | |
223 | '--running-file', |
|
223 | '--running-file', | |
224 | dest='running_file', |
|
224 | dest='running_file', | |
225 | metavar='RUNNING_FILE', |
|
225 | metavar='RUNNING_FILE', | |
226 | help="Create a running file after the server is initalized with " |
|
226 | help="Create a running file after the server is initalized with " | |
227 | "stored PID of process") |
|
227 | "stored PID of process") | |
228 | parser.add_option( |
|
228 | parser.add_option( | |
229 | '--log-file', |
|
229 | '--log-file', | |
230 | dest='log_file', |
|
230 | dest='log_file', | |
231 | metavar='LOG_FILE', |
|
231 | metavar='LOG_FILE', | |
232 | help="Save output to the given log file (redirects stdout)") |
|
232 | help="Save output to the given log file (redirects stdout)") | |
233 | parser.add_option( |
|
233 | parser.add_option( | |
234 | '--reload', |
|
234 | '--reload', | |
235 | dest='reload', |
|
235 | dest='reload', | |
236 | action='store_true', |
|
236 | action='store_true', | |
237 | help="Use auto-restart file monitor") |
|
237 | help="Use auto-restart file monitor") | |
238 | parser.add_option( |
|
238 | parser.add_option( | |
239 | '--reload-interval', |
|
239 | '--reload-interval', | |
240 | dest='reload_interval', |
|
240 | dest='reload_interval', | |
241 | default=1, |
|
241 | default=1, | |
242 | help=("Seconds between checking files (low number can cause " |
|
242 | help=("Seconds between checking files (low number can cause " | |
243 | "significant CPU usage)")) |
|
243 | "significant CPU usage)")) | |
244 | parser.add_option( |
|
244 | parser.add_option( | |
245 | '--monitor-restart', |
|
245 | '--monitor-restart', | |
246 | dest='monitor_restart', |
|
246 | dest='monitor_restart', | |
247 | action='store_true', |
|
247 | action='store_true', | |
248 | help="Auto-restart server if it dies") |
|
248 | help="Auto-restart server if it dies") | |
249 | parser.add_option( |
|
249 | parser.add_option( | |
250 | '--status', |
|
250 | '--status', | |
251 | action='store_true', |
|
251 | action='store_true', | |
252 | dest='show_status', |
|
252 | dest='show_status', | |
253 | help="Show the status of the (presumably daemonized) server") |
|
253 | help="Show the status of the (presumably daemonized) server") | |
254 | parser.add_option( |
|
254 | parser.add_option( | |
255 | '-v', '--verbose', |
|
255 | '-v', '--verbose', | |
256 | default=default_verbosity, |
|
256 | default=default_verbosity, | |
257 | dest='verbose', |
|
257 | dest='verbose', | |
258 | action='count', |
|
258 | action='count', | |
259 | help="Set verbose level (default "+str(default_verbosity)+")") |
|
259 | help="Set verbose level (default "+str(default_verbosity)+")") | |
260 | parser.add_option( |
|
260 | parser.add_option( | |
261 | '-q', '--quiet', |
|
261 | '-q', '--quiet', | |
262 | action='store_const', |
|
262 | action='store_const', | |
263 | const=0, |
|
263 | const=0, | |
264 | dest='verbose', |
|
264 | dest='verbose', | |
265 | help="Suppress verbose output") |
|
265 | help="Suppress verbose output") | |
266 |
|
266 | |||
267 | if hasattr(os, 'setuid'): |
|
267 | if hasattr(os, 'setuid'): | |
268 | # I don't think these are available on Windows |
|
268 | # I don't think these are available on Windows | |
269 | parser.add_option( |
|
269 | parser.add_option( | |
270 | '--user', |
|
270 | '--user', | |
271 | dest='set_user', |
|
271 | dest='set_user', | |
272 | metavar="USERNAME", |
|
272 | metavar="USERNAME", | |
273 | help="Set the user (usually only possible when run as root)") |
|
273 | help="Set the user (usually only possible when run as root)") | |
274 | parser.add_option( |
|
274 | parser.add_option( | |
275 | '--group', |
|
275 | '--group', | |
276 | dest='set_group', |
|
276 | dest='set_group', | |
277 | metavar="GROUP", |
|
277 | metavar="GROUP", | |
278 | help="Set the group (usually only possible when run as root)") |
|
278 | help="Set the group (usually only possible when run as root)") | |
279 |
|
279 | |||
280 | parser.add_option( |
|
280 | parser.add_option( | |
281 | '--stop-daemon', |
|
281 | '--stop-daemon', | |
282 | dest='stop_daemon', |
|
282 | dest='stop_daemon', | |
283 | action='store_true', |
|
283 | action='store_true', | |
284 | help=('Stop a daemonized server (given a PID file, or default ' |
|
284 | help=('Stop a daemonized server (given a PID file, or default ' | |
285 | 'pyramid.pid file)')) |
|
285 | 'pyramid.pid file)')) | |
286 |
|
286 | |||
287 | _scheme_re = re.compile(r'^[a-z][a-z]+:', re.I) |
|
287 | _scheme_re = re.compile(r'^[a-z][a-z]+:', re.I) | |
288 |
|
288 | |||
289 | _reloader_environ_key = 'PYTHON_RELOADER_SHOULD_RUN' |
|
289 | _reloader_environ_key = 'PYTHON_RELOADER_SHOULD_RUN' | |
290 | _monitor_environ_key = 'PASTE_MONITOR_SHOULD_RUN' |
|
290 | _monitor_environ_key = 'PASTE_MONITOR_SHOULD_RUN' | |
291 |
|
291 | |||
292 | possible_subcommands = ('start', 'stop', 'restart', 'status') |
|
292 | possible_subcommands = ('start', 'stop', 'restart', 'status') | |
293 |
|
293 | |||
294 | def __init__(self, argv, quiet=False): |
|
294 | def __init__(self, argv, quiet=False): | |
295 | self.options, self.args = self.parser.parse_args(argv[1:]) |
|
295 | self.options, self.args = self.parser.parse_args(argv[1:]) | |
296 | if quiet: |
|
296 | if quiet: | |
297 | self.options.verbose = 0 |
|
297 | self.options.verbose = 0 | |
298 |
|
298 | |||
299 | def out(self, msg): # pragma: no cover |
|
299 | def out(self, msg): # pragma: no cover | |
300 | if self.options.verbose > 0: |
|
300 | if self.options.verbose > 0: | |
301 | print(msg) |
|
301 | print(msg) | |
302 |
|
302 | |||
303 | def get_options(self): |
|
303 | def get_options(self): | |
304 | if (len(self.args) > 1 |
|
304 | if (len(self.args) > 1 | |
305 | and self.args[1] in self.possible_subcommands): |
|
305 | and self.args[1] in self.possible_subcommands): | |
306 | restvars = self.args[2:] |
|
306 | restvars = self.args[2:] | |
307 | else: |
|
307 | else: | |
308 | restvars = self.args[1:] |
|
308 | restvars = self.args[1:] | |
309 |
|
309 | |||
310 | return parse_vars(restvars) |
|
310 | return parse_vars(restvars) | |
311 |
|
311 | |||
312 | def run(self): # pragma: no cover |
|
312 | def run(self): # pragma: no cover | |
313 | if self.options.stop_daemon: |
|
313 | if self.options.stop_daemon: | |
314 | return self.stop_daemon() |
|
314 | return self.stop_daemon() | |
315 |
|
315 | |||
316 | if not hasattr(self.options, 'set_user'): |
|
316 | if not hasattr(self.options, 'set_user'): | |
317 | # Windows case: |
|
317 | # Windows case: | |
318 | self.options.set_user = self.options.set_group = None |
|
318 | self.options.set_user = self.options.set_group = None | |
319 |
|
319 | |||
320 | # @@: Is this the right stage to set the user at? |
|
320 | # @@: Is this the right stage to set the user at? | |
321 | self.change_user_group( |
|
321 | self.change_user_group( | |
322 | self.options.set_user, self.options.set_group) |
|
322 | self.options.set_user, self.options.set_group) | |
323 |
|
323 | |||
324 | if not self.args: |
|
324 | if not self.args: | |
325 | self.out('Please provide configuration file as first argument, ' |
|
325 | self.out('Please provide configuration file as first argument, ' | |
326 | 'most likely it should be production.ini') |
|
326 | 'most likely it should be production.ini') | |
327 | return 2 |
|
327 | return 2 | |
328 | app_spec = self.args[0] |
|
328 | app_spec = self.args[0] | |
329 |
|
329 | |||
330 | if (len(self.args) > 1 |
|
330 | if (len(self.args) > 1 | |
331 | and self.args[1] in self.possible_subcommands): |
|
331 | and self.args[1] in self.possible_subcommands): | |
332 | cmd = self.args[1] |
|
332 | cmd = self.args[1] | |
333 | else: |
|
333 | else: | |
334 | cmd = None |
|
334 | cmd = None | |
335 |
|
335 | |||
336 | if self.options.reload: |
|
336 | if self.options.reload: | |
337 | if os.environ.get(self._reloader_environ_key): |
|
337 | if os.environ.get(self._reloader_environ_key): | |
338 | if self.options.verbose > 1: |
|
338 | if self.options.verbose > 1: | |
339 | self.out('Running reloading file monitor') |
|
339 | self.out('Running reloading file monitor') | |
340 |
|
340 | |||
341 | install_reloader(int(self.options.reload_interval), |
|
341 | install_reloader(int(self.options.reload_interval), | |
342 | [app_spec] + generate_extra_file_list()) |
|
342 | [app_spec] + generate_extra_file_list()) | |
343 | # if self.requires_config_file: |
|
343 | # if self.requires_config_file: | |
344 | # watch_file(self.args[0]) |
|
344 | # watch_file(self.args[0]) | |
345 | else: |
|
345 | else: | |
346 | return self.restart_with_reloader() |
|
346 | return self.restart_with_reloader() | |
347 |
|
347 | |||
348 | if cmd not in (None, 'start', 'stop', 'restart', 'status'): |
|
348 | if cmd not in (None, 'start', 'stop', 'restart', 'status'): | |
349 | self.out( |
|
349 | self.out( | |
350 | 'Error: must give start|stop|restart (not %s)' % cmd) |
|
350 | 'Error: must give start|stop|restart (not %s)' % cmd) | |
351 | return 2 |
|
351 | return 2 | |
352 |
|
352 | |||
353 | if cmd == 'status' or self.options.show_status: |
|
353 | if cmd == 'status' or self.options.show_status: | |
354 | return self.show_status() |
|
354 | return self.show_status() | |
355 |
|
355 | |||
356 | if cmd == 'restart' or cmd == 'stop': |
|
356 | if cmd == 'restart' or cmd == 'stop': | |
357 | result = self.stop_daemon() |
|
357 | result = self.stop_daemon() | |
358 | if result: |
|
358 | if result: | |
359 | if cmd == 'restart': |
|
359 | if cmd == 'restart': | |
360 | self.out("Could not stop daemon; aborting") |
|
360 | self.out("Could not stop daemon; aborting") | |
361 | else: |
|
361 | else: | |
362 | self.out("Could not stop daemon") |
|
362 | self.out("Could not stop daemon") | |
363 | return result |
|
363 | return result | |
364 | if cmd == 'stop': |
|
364 | if cmd == 'stop': | |
365 | return result |
|
365 | return result | |
366 | self.options.daemon = True |
|
366 | self.options.daemon = True | |
367 |
|
367 | |||
368 | if cmd == 'start': |
|
368 | if cmd == 'start': | |
369 | self.options.daemon = True |
|
369 | self.options.daemon = True | |
370 |
|
370 | |||
371 | app_name = self.options.app_name |
|
371 | app_name = self.options.app_name | |
372 |
|
372 | |||
373 | vars = self.get_options() |
|
373 | vars = self.get_options() | |
374 |
|
374 | |||
375 | if self.options.vcs_server: |
|
375 | if self.options.vcs_server: | |
376 | vars['vcs.start_server'] = 'true' |
|
376 | vars['vcs.start_server'] = 'true' | |
377 |
|
377 | |||
378 | if self.options.running_file: |
|
378 | if self.options.running_file: | |
379 | global SERVER_RUNNING_FILE |
|
379 | global SERVER_RUNNING_FILE | |
380 | SERVER_RUNNING_FILE = self.options.running_file |
|
380 | SERVER_RUNNING_FILE = self.options.running_file | |
381 |
|
381 | |||
382 | if not self._scheme_re.search(app_spec): |
|
382 | if not self._scheme_re.search(app_spec): | |
383 | app_spec = 'config:' + app_spec |
|
383 | app_spec = 'config:' + app_spec | |
384 | server_name = self.options.server_name |
|
384 | server_name = self.options.server_name | |
385 | if self.options.server: |
|
385 | if self.options.server: | |
386 | server_spec = 'egg:pyramid' |
|
386 | server_spec = 'egg:pyramid' | |
387 | assert server_name is None |
|
387 | assert server_name is None | |
388 | server_name = self.options.server |
|
388 | server_name = self.options.server | |
389 | else: |
|
389 | else: | |
390 | server_spec = app_spec |
|
390 | server_spec = app_spec | |
391 | base = os.getcwd() |
|
391 | base = os.getcwd() | |
392 |
|
392 | |||
393 | if getattr(self.options, 'daemon', False): |
|
393 | if getattr(self.options, 'daemon', False): | |
394 | if not self.options.pid_file: |
|
394 | if not self.options.pid_file: | |
395 | self.options.pid_file = 'pyramid.pid' |
|
395 | self.options.pid_file = 'pyramid.pid' | |
396 | if not self.options.log_file: |
|
396 | if not self.options.log_file: | |
397 | self.options.log_file = 'pyramid.log' |
|
397 | self.options.log_file = 'pyramid.log' | |
398 |
|
398 | |||
399 | # Ensure the log file is writeable |
|
399 | # Ensure the log file is writeable | |
400 | if self.options.log_file: |
|
400 | if self.options.log_file: | |
401 | try: |
|
401 | try: | |
402 | writeable_log_file = open(self.options.log_file, 'a') |
|
402 | writeable_log_file = open(self.options.log_file, 'a') | |
403 | except IOError as ioe: |
|
403 | except IOError as ioe: | |
404 | msg = 'Error: Unable to write to log file: %s' % ioe |
|
404 | msg = 'Error: Unable to write to log file: %s' % ioe | |
405 | raise ValueError(msg) |
|
405 | raise ValueError(msg) | |
406 | writeable_log_file.close() |
|
406 | writeable_log_file.close() | |
407 |
|
407 | |||
408 | # Ensure the pid file is writeable |
|
408 | # Ensure the pid file is writeable | |
409 | if self.options.pid_file: |
|
409 | if self.options.pid_file: | |
410 | try: |
|
410 | try: | |
411 | writeable_pid_file = open(self.options.pid_file, 'a') |
|
411 | writeable_pid_file = open(self.options.pid_file, 'a') | |
412 | except IOError as ioe: |
|
412 | except IOError as ioe: | |
413 | msg = 'Error: Unable to write to pid file: %s' % ioe |
|
413 | msg = 'Error: Unable to write to pid file: %s' % ioe | |
414 | raise ValueError(msg) |
|
414 | raise ValueError(msg) | |
415 | writeable_pid_file.close() |
|
415 | writeable_pid_file.close() | |
416 |
|
416 | |||
417 |
|
417 | |||
418 | if getattr(self.options, 'daemon', False): |
|
418 | if getattr(self.options, 'daemon', False): | |
419 | try: |
|
419 | try: | |
420 | self.daemonize() |
|
420 | self.daemonize() | |
421 | except DaemonizeException as ex: |
|
421 | except DaemonizeException as ex: | |
422 | if self.options.verbose > 0: |
|
422 | if self.options.verbose > 0: | |
423 | self.out(str(ex)) |
|
423 | self.out(str(ex)) | |
424 | return 2 |
|
424 | return 2 | |
425 |
|
425 | |||
426 | if (self.options.monitor_restart |
|
426 | if (self.options.monitor_restart | |
427 | and not os.environ.get(self._monitor_environ_key)): |
|
427 | and not os.environ.get(self._monitor_environ_key)): | |
428 | return self.restart_with_monitor() |
|
428 | return self.restart_with_monitor() | |
429 |
|
429 | |||
430 | if self.options.pid_file: |
|
430 | if self.options.pid_file: | |
431 | self.record_pid(self.options.pid_file) |
|
431 | self.record_pid(self.options.pid_file) | |
432 |
|
432 | |||
433 | if self.options.log_file: |
|
433 | if self.options.log_file: | |
434 | stdout_log = LazyWriter(self.options.log_file, 'a') |
|
434 | stdout_log = LazyWriter(self.options.log_file, 'a') | |
435 | sys.stdout = stdout_log |
|
435 | sys.stdout = stdout_log | |
436 | sys.stderr = stdout_log |
|
436 | sys.stderr = stdout_log | |
437 | logging.basicConfig(stream=stdout_log) |
|
437 | logging.basicConfig(stream=stdout_log) | |
438 |
|
438 | |||
439 | log_fn = app_spec |
|
439 | log_fn = app_spec | |
440 | if log_fn.startswith('config:'): |
|
440 | if log_fn.startswith('config:'): | |
441 | log_fn = app_spec[len('config:'):] |
|
441 | log_fn = app_spec[len('config:'):] | |
442 | elif log_fn.startswith('egg:'): |
|
442 | elif log_fn.startswith('egg:'): | |
443 | log_fn = None |
|
443 | log_fn = None | |
444 | if log_fn: |
|
444 | if log_fn: | |
445 | log_fn = os.path.join(base, log_fn) |
|
445 | log_fn = os.path.join(base, log_fn) | |
446 | setup_logging(log_fn) |
|
446 | setup_logging(log_fn) | |
447 | set_rhodecode_is_test(log_fn) |
|
447 | set_rhodecode_is_test(log_fn) | |
448 |
|
448 | |||
449 | server = self.loadserver(server_spec, name=server_name, |
|
449 | server = self.loadserver(server_spec, name=server_name, | |
450 | relative_to=base, global_conf=vars) |
|
450 | relative_to=base, global_conf=vars) | |
451 | # starting hooks |
|
451 | # starting hooks | |
452 | app = self.loadapp(app_spec, name=app_name, relative_to=base, |
|
452 | app = self.loadapp(app_spec, name=app_name, relative_to=base, | |
453 | global_conf=vars) |
|
453 | global_conf=vars) | |
454 |
|
454 | |||
455 | if self.options.verbose > 0: |
|
455 | if self.options.verbose > 0: | |
456 | if hasattr(os, 'getpid'): |
|
456 | if hasattr(os, 'getpid'): | |
457 | msg = 'Starting %s in PID %i.' % (__name__, os.getpid()) |
|
457 | msg = 'Starting %s in PID %i.' % (__name__, os.getpid()) | |
458 | else: |
|
458 | else: | |
459 | msg = 'Starting %s.' % (__name__,) |
|
459 | msg = 'Starting %s.' % (__name__,) | |
460 | self.out(msg) |
|
460 | self.out(msg) | |
461 | if SERVER_RUNNING_FILE: |
|
461 | if SERVER_RUNNING_FILE: | |
462 | self.out('PID file written as %s' % (SERVER_RUNNING_FILE, )) |
|
462 | self.out('PID file written as %s' % (SERVER_RUNNING_FILE, )) | |
463 | elif not self.options.pid_file: |
|
463 | elif not self.options.pid_file: | |
464 | self.out('No PID file written by default.') |
|
464 | self.out('No PID file written by default.') | |
465 |
|
465 | |||
466 | try: |
|
466 | try: | |
467 | when_ready(server) |
|
467 | when_ready(server) | |
468 | server(app) |
|
468 | server(app) | |
469 | except (SystemExit, KeyboardInterrupt) as e: |
|
469 | except (SystemExit, KeyboardInterrupt) as e: | |
470 | if self.options.verbose > 1: |
|
470 | if self.options.verbose > 1: | |
471 | raise |
|
471 | raise | |
472 | if str(e): |
|
472 | if str(e): | |
473 | msg = ' ' + str(e) |
|
473 | msg = ' ' + str(e) | |
474 | else: |
|
474 | else: | |
475 | msg = '' |
|
475 | msg = '' | |
476 | self.out('Exiting%s (-v to see traceback)' % msg) |
|
476 | self.out('Exiting%s (-v to see traceback)' % msg) | |
477 |
|
477 | |||
478 |
|
478 | |||
479 | def loadapp(self, app_spec, name, relative_to, **kw): # pragma: no cover |
|
479 | def loadapp(self, app_spec, name, relative_to, **kw): # pragma: no cover | |
480 | return loadapp(app_spec, name=name, relative_to=relative_to, **kw) |
|
480 | return loadapp(app_spec, name=name, relative_to=relative_to, **kw) | |
481 |
|
481 | |||
482 | def loadserver(self, server_spec, name, relative_to, **kw): # pragma:no cover |
|
482 | def loadserver(self, server_spec, name, relative_to, **kw): # pragma:no cover | |
483 | return loadserver( |
|
483 | return loadserver( | |
484 | server_spec, name=name, relative_to=relative_to, **kw) |
|
484 | server_spec, name=name, relative_to=relative_to, **kw) | |
485 |
|
485 | |||
486 | def quote_first_command_arg(self, arg): # pragma: no cover |
|
486 | def quote_first_command_arg(self, arg): # pragma: no cover | |
487 | """ |
|
487 | """ | |
488 | There's a bug in Windows when running an executable that's |
|
488 | There's a bug in Windows when running an executable that's | |
489 | located inside a path with a space in it. This method handles |
|
489 | located inside a path with a space in it. This method handles | |
490 | that case, or on non-Windows systems or an executable with no |
|
490 | that case, or on non-Windows systems or an executable with no | |
491 | spaces, it just leaves well enough alone. |
|
491 | spaces, it just leaves well enough alone. | |
492 | """ |
|
492 | """ | |
493 | if sys.platform != 'win32' or ' ' not in arg: |
|
493 | if sys.platform != 'win32' or ' ' not in arg: | |
494 | # Problem does not apply: |
|
494 | # Problem does not apply: | |
495 | return arg |
|
495 | return arg | |
496 | try: |
|
496 | try: | |
497 | import win32api |
|
497 | import win32api | |
498 | except ImportError: |
|
498 | except ImportError: | |
499 | raise ValueError( |
|
499 | raise ValueError( | |
500 | "The executable %r contains a space, and in order to " |
|
500 | "The executable %r contains a space, and in order to " | |
501 | "handle this issue you must have the win32api module " |
|
501 | "handle this issue you must have the win32api module " | |
502 | "installed" % arg) |
|
502 | "installed" % arg) | |
503 | arg = win32api.GetShortPathName(arg) |
|
503 | arg = win32api.GetShortPathName(arg) | |
504 | return arg |
|
504 | return arg | |
505 |
|
505 | |||
506 | def daemonize(self): # pragma: no cover |
|
506 | def daemonize(self): # pragma: no cover | |
507 | pid = live_pidfile(self.options.pid_file) |
|
507 | pid = live_pidfile(self.options.pid_file) | |
508 | if pid: |
|
508 | if pid: | |
509 | raise DaemonizeException( |
|
509 | raise DaemonizeException( | |
510 | "Daemon is already running (PID: %s from PID file %s)" |
|
510 | "Daemon is already running (PID: %s from PID file %s)" | |
511 | % (pid, self.options.pid_file)) |
|
511 | % (pid, self.options.pid_file)) | |
512 |
|
512 | |||
513 | if self.options.verbose > 0: |
|
513 | if self.options.verbose > 0: | |
514 | self.out('Entering daemon mode') |
|
514 | self.out('Entering daemon mode') | |
515 | pid = os.fork() |
|
515 | pid = os.fork() | |
516 | if pid: |
|
516 | if pid: | |
517 | # The forked process also has a handle on resources, so we |
|
517 | # The forked process also has a handle on resources, so we | |
518 | # *don't* want proper termination of the process, we just |
|
518 | # *don't* want proper termination of the process, we just | |
519 | # want to exit quick (which os._exit() does) |
|
519 | # want to exit quick (which os._exit() does) | |
520 | os._exit(0) |
|
520 | os._exit(0) | |
521 | # Make this the session leader |
|
521 | # Make this the session leader | |
522 | os.setsid() |
|
522 | os.setsid() | |
523 | # Fork again for good measure! |
|
523 | # Fork again for good measure! | |
524 | pid = os.fork() |
|
524 | pid = os.fork() | |
525 | if pid: |
|
525 | if pid: | |
526 | os._exit(0) |
|
526 | os._exit(0) | |
527 |
|
527 | |||
528 | # @@: Should we set the umask and cwd now? |
|
528 | # @@: Should we set the umask and cwd now? | |
529 |
|
529 | |||
530 | import resource # Resource usage information. |
|
530 | import resource # Resource usage information. | |
531 | maxfd = resource.getrlimit(resource.RLIMIT_NOFILE)[1] |
|
531 | maxfd = resource.getrlimit(resource.RLIMIT_NOFILE)[1] | |
532 | if maxfd == resource.RLIM_INFINITY: |
|
532 | if maxfd == resource.RLIM_INFINITY: | |
533 | maxfd = MAXFD |
|
533 | maxfd = MAXFD | |
534 | # Iterate through and close all file descriptors. |
|
534 | # Iterate through and close all file descriptors. | |
535 | for fd in range(0, maxfd): |
|
535 | for fd in range(0, maxfd): | |
536 | try: |
|
536 | try: | |
537 | os.close(fd) |
|
537 | os.close(fd) | |
538 | except OSError: # ERROR, fd wasn't open to begin with (ignored) |
|
538 | except OSError: # ERROR, fd wasn't open to begin with (ignored) | |
539 | pass |
|
539 | pass | |
540 |
|
540 | |||
541 | if hasattr(os, "devnull"): |
|
541 | if hasattr(os, "devnull"): | |
542 | REDIRECT_TO = os.devnull |
|
542 | REDIRECT_TO = os.devnull | |
543 | else: |
|
543 | else: | |
544 | REDIRECT_TO = "/dev/null" |
|
544 | REDIRECT_TO = "/dev/null" | |
545 | os.open(REDIRECT_TO, os.O_RDWR) # standard input (0) |
|
545 | os.open(REDIRECT_TO, os.O_RDWR) # standard input (0) | |
546 | # Duplicate standard input to standard output and standard error. |
|
546 | # Duplicate standard input to standard output and standard error. | |
547 | os.dup2(0, 1) # standard output (1) |
|
547 | os.dup2(0, 1) # standard output (1) | |
548 | os.dup2(0, 2) # standard error (2) |
|
548 | os.dup2(0, 2) # standard error (2) | |
549 |
|
549 | |||
550 | def _remove_pid_file(self, written_pid, filename, verbosity): |
|
550 | def _remove_pid_file(self, written_pid, filename, verbosity): | |
551 | current_pid = os.getpid() |
|
551 | current_pid = os.getpid() | |
552 | if written_pid != current_pid: |
|
552 | if written_pid != current_pid: | |
553 | # A forked process must be exiting, not the process that |
|
553 | # A forked process must be exiting, not the process that | |
554 | # wrote the PID file |
|
554 | # wrote the PID file | |
555 | return |
|
555 | return | |
556 | if not os.path.exists(filename): |
|
556 | if not os.path.exists(filename): | |
557 | return |
|
557 | return | |
558 | with open(filename) as f: |
|
558 | with open(filename) as f: | |
559 | content = f.read().strip() |
|
559 | content = f.read().strip() | |
560 | try: |
|
560 | try: | |
561 | pid_in_file = int(content) |
|
561 | pid_in_file = int(content) | |
562 | except ValueError: |
|
562 | except ValueError: | |
563 | pass |
|
563 | pass | |
564 | else: |
|
564 | else: | |
565 | if pid_in_file != current_pid: |
|
565 | if pid_in_file != current_pid: | |
566 | msg = "PID file %s contains %s, not expected PID %s" |
|
566 | msg = "PID file %s contains %s, not expected PID %s" | |
567 | self.out(msg % (filename, pid_in_file, current_pid)) |
|
567 | self.out(msg % (filename, pid_in_file, current_pid)) | |
568 | return |
|
568 | return | |
569 | if verbosity > 0: |
|
569 | if verbosity > 0: | |
570 | self.out("Removing PID file %s" % filename) |
|
570 | self.out("Removing PID file %s" % filename) | |
571 | try: |
|
571 | try: | |
572 | os.unlink(filename) |
|
572 | os.unlink(filename) | |
573 | return |
|
573 | return | |
574 | except OSError as e: |
|
574 | except OSError as e: | |
575 | # Record, but don't give traceback |
|
575 | # Record, but don't give traceback | |
576 | self.out("Cannot remove PID file: (%s)" % e) |
|
576 | self.out("Cannot remove PID file: (%s)" % e) | |
577 | # well, at least lets not leave the invalid PID around... |
|
577 | # well, at least lets not leave the invalid PID around... | |
578 | try: |
|
578 | try: | |
579 | with open(filename, 'w') as f: |
|
579 | with open(filename, 'w') as f: | |
580 | f.write('') |
|
580 | f.write('') | |
581 | except OSError as e: |
|
581 | except OSError as e: | |
582 | self.out('Stale PID left in file: %s (%s)' % (filename, e)) |
|
582 | self.out('Stale PID left in file: %s (%s)' % (filename, e)) | |
583 | else: |
|
583 | else: | |
584 | self.out('Stale PID removed') |
|
584 | self.out('Stale PID removed') | |
585 |
|
585 | |||
586 | def record_pid(self, pid_file): |
|
586 | def record_pid(self, pid_file): | |
587 | pid = os.getpid() |
|
587 | pid = os.getpid() | |
588 | if self.options.verbose > 1: |
|
588 | if self.options.verbose > 1: | |
589 | self.out('Writing PID %s to %s' % (pid, pid_file)) |
|
589 | self.out('Writing PID %s to %s' % (pid, pid_file)) | |
590 | with open(pid_file, 'w') as f: |
|
590 | with open(pid_file, 'w') as f: | |
591 | f.write(str(pid)) |
|
591 | f.write(str(pid)) | |
592 | atexit.register(self._remove_pid_file, pid, pid_file, self.options.verbose) |
|
592 | atexit.register(self._remove_pid_file, pid, pid_file, self.options.verbose) | |
593 |
|
593 | |||
594 | def stop_daemon(self): # pragma: no cover |
|
594 | def stop_daemon(self): # pragma: no cover | |
595 | pid_file = self.options.pid_file or 'pyramid.pid' |
|
595 | pid_file = self.options.pid_file or 'pyramid.pid' | |
596 | if not os.path.exists(pid_file): |
|
596 | if not os.path.exists(pid_file): | |
597 | self.out('No PID file exists in %s' % pid_file) |
|
597 | self.out('No PID file exists in %s' % pid_file) | |
598 | return 1 |
|
598 | return 1 | |
599 | pid = read_pidfile(pid_file) |
|
599 | pid = read_pidfile(pid_file) | |
600 | if not pid: |
|
600 | if not pid: | |
601 | self.out("Not a valid PID file in %s" % pid_file) |
|
601 | self.out("Not a valid PID file in %s" % pid_file) | |
602 | return 1 |
|
602 | return 1 | |
603 | pid = live_pidfile(pid_file) |
|
603 | pid = live_pidfile(pid_file) | |
604 | if not pid: |
|
604 | if not pid: | |
605 | self.out("PID in %s is not valid (deleting)" % pid_file) |
|
605 | self.out("PID in %s is not valid (deleting)" % pid_file) | |
606 | try: |
|
606 | try: | |
607 | os.unlink(pid_file) |
|
607 | os.unlink(pid_file) | |
608 | except (OSError, IOError) as e: |
|
608 | except (OSError, IOError) as e: | |
609 | self.out("Could not delete: %s" % e) |
|
609 | self.out("Could not delete: %s" % e) | |
610 | return 2 |
|
610 | return 2 | |
611 | return 1 |
|
611 | return 1 | |
612 | for j in range(10): |
|
612 | for j in range(10): | |
613 | if not live_pidfile(pid_file): |
|
613 | if not live_pidfile(pid_file): | |
614 | break |
|
614 | break | |
615 | import signal |
|
615 | import signal | |
616 | kill(pid, signal.SIGTERM) |
|
616 | kill(pid, signal.SIGTERM) | |
617 | time.sleep(1) |
|
617 | time.sleep(1) | |
618 | else: |
|
618 | else: | |
619 | self.out("failed to kill web process %s" % pid) |
|
619 | self.out("failed to kill web process %s" % pid) | |
620 | return 3 |
|
620 | return 3 | |
621 | if os.path.exists(pid_file): |
|
621 | if os.path.exists(pid_file): | |
622 | os.unlink(pid_file) |
|
622 | os.unlink(pid_file) | |
623 | return 0 |
|
623 | return 0 | |
624 |
|
624 | |||
625 | def show_status(self): # pragma: no cover |
|
625 | def show_status(self): # pragma: no cover | |
626 | pid_file = self.options.pid_file or 'pyramid.pid' |
|
626 | pid_file = self.options.pid_file or 'pyramid.pid' | |
627 | if not os.path.exists(pid_file): |
|
627 | if not os.path.exists(pid_file): | |
628 | self.out('No PID file %s' % pid_file) |
|
628 | self.out('No PID file %s' % pid_file) | |
629 | return 1 |
|
629 | return 1 | |
630 | pid = read_pidfile(pid_file) |
|
630 | pid = read_pidfile(pid_file) | |
631 | if not pid: |
|
631 | if not pid: | |
632 | self.out('No PID in file %s' % pid_file) |
|
632 | self.out('No PID in file %s' % pid_file) | |
633 | return 1 |
|
633 | return 1 | |
634 | pid = live_pidfile(pid_file) |
|
634 | pid = live_pidfile(pid_file) | |
635 | if not pid: |
|
635 | if not pid: | |
636 | self.out('PID %s in %s is not running' % (pid, pid_file)) |
|
636 | self.out('PID %s in %s is not running' % (pid, pid_file)) | |
637 | return 1 |
|
637 | return 1 | |
638 | self.out('Server running in PID %s' % pid) |
|
638 | self.out('Server running in PID %s' % pid) | |
639 | return 0 |
|
639 | return 0 | |
640 |
|
640 | |||
641 | def restart_with_reloader(self): # pragma: no cover |
|
641 | def restart_with_reloader(self): # pragma: no cover | |
642 | self.restart_with_monitor(reloader=True) |
|
642 | self.restart_with_monitor(reloader=True) | |
643 |
|
643 | |||
644 | def restart_with_monitor(self, reloader=False): # pragma: no cover |
|
644 | def restart_with_monitor(self, reloader=False): # pragma: no cover | |
645 | if self.options.verbose > 0: |
|
645 | if self.options.verbose > 0: | |
646 | if reloader: |
|
646 | if reloader: | |
647 | self.out('Starting subprocess with file monitor') |
|
647 | self.out('Starting subprocess with file monitor') | |
648 | else: |
|
648 | else: | |
649 | self.out('Starting subprocess with monitor parent') |
|
649 | self.out('Starting subprocess with monitor parent') | |
650 | while 1: |
|
650 | while 1: | |
651 | args = [self.quote_first_command_arg(sys.executable)] + sys.argv |
|
651 | args = [self.quote_first_command_arg(sys.executable)] + sys.argv | |
652 | new_environ = os.environ.copy() |
|
652 | new_environ = os.environ.copy() | |
653 | if reloader: |
|
653 | if reloader: | |
654 | new_environ[self._reloader_environ_key] = 'true' |
|
654 | new_environ[self._reloader_environ_key] = 'true' | |
655 | else: |
|
655 | else: | |
656 | new_environ[self._monitor_environ_key] = 'true' |
|
656 | new_environ[self._monitor_environ_key] = 'true' | |
657 | proc = None |
|
657 | proc = None | |
658 | try: |
|
658 | try: | |
659 | try: |
|
659 | try: | |
660 | _turn_sigterm_into_systemexit() |
|
660 | _turn_sigterm_into_systemexit() | |
661 | proc = subprocess32.Popen(args, env=new_environ) |
|
661 | proc = subprocess32.Popen(args, env=new_environ) | |
662 | exit_code = proc.wait() |
|
662 | exit_code = proc.wait() | |
663 | proc = None |
|
663 | proc = None | |
664 | except KeyboardInterrupt: |
|
664 | except KeyboardInterrupt: | |
665 | self.out('^C caught in monitor process') |
|
665 | self.out('^C caught in monitor process') | |
666 | if self.options.verbose > 1: |
|
666 | if self.options.verbose > 1: | |
667 | raise |
|
667 | raise | |
668 | return 1 |
|
668 | return 1 | |
669 | finally: |
|
669 | finally: | |
670 | if proc is not None: |
|
670 | if proc is not None: | |
671 | import signal |
|
671 | import signal | |
672 | try: |
|
672 | try: | |
673 | kill(proc.pid, signal.SIGTERM) |
|
673 | kill(proc.pid, signal.SIGTERM) | |
674 | except (OSError, IOError): |
|
674 | except (OSError, IOError): | |
675 | pass |
|
675 | pass | |
676 |
|
676 | |||
677 | if reloader: |
|
677 | if reloader: | |
678 | # Reloader always exits with code 3; but if we are |
|
678 | # Reloader always exits with code 3; but if we are | |
679 | # a monitor, any exit code will restart |
|
679 | # a monitor, any exit code will restart | |
680 | if exit_code != 3: |
|
680 | if exit_code != 3: | |
681 | return exit_code |
|
681 | return exit_code | |
682 | if self.options.verbose > 0: |
|
682 | if self.options.verbose > 0: | |
683 | self.out('%s %s %s' % ('-' * 20, 'Restarting', '-' * 20)) |
|
683 | self.out('%s %s %s' % ('-' * 20, 'Restarting', '-' * 20)) | |
684 |
|
684 | |||
685 | def change_user_group(self, user, group): # pragma: no cover |
|
685 | def change_user_group(self, user, group): # pragma: no cover | |
686 | if not user and not group: |
|
686 | if not user and not group: | |
687 | return |
|
687 | return | |
688 | import pwd |
|
688 | import pwd | |
689 | import grp |
|
689 | import grp | |
690 | uid = gid = None |
|
690 | uid = gid = None | |
691 | if group: |
|
691 | if group: | |
692 | try: |
|
692 | try: | |
693 | gid = int(group) |
|
693 | gid = int(group) | |
694 | group = grp.getgrgid(gid).gr_name |
|
694 | group = grp.getgrgid(gid).gr_name | |
695 | except ValueError: |
|
695 | except ValueError: | |
696 | try: |
|
696 | try: | |
697 | entry = grp.getgrnam(group) |
|
697 | entry = grp.getgrnam(group) | |
698 | except KeyError: |
|
698 | except KeyError: | |
699 | raise ValueError( |
|
699 | raise ValueError( | |
700 | "Bad group: %r; no such group exists" % group) |
|
700 | "Bad group: %r; no such group exists" % group) | |
701 | gid = entry.gr_gid |
|
701 | gid = entry.gr_gid | |
702 | try: |
|
702 | try: | |
703 | uid = int(user) |
|
703 | uid = int(user) | |
704 | user = pwd.getpwuid(uid).pw_name |
|
704 | user = pwd.getpwuid(uid).pw_name | |
705 | except ValueError: |
|
705 | except ValueError: | |
706 | try: |
|
706 | try: | |
707 | entry = pwd.getpwnam(user) |
|
707 | entry = pwd.getpwnam(user) | |
708 | except KeyError: |
|
708 | except KeyError: | |
709 | raise ValueError( |
|
709 | raise ValueError( | |
710 | "Bad username: %r; no such user exists" % user) |
|
710 | "Bad username: %r; no such user exists" % user) | |
711 | if not gid: |
|
711 | if not gid: | |
712 | gid = entry.pw_gid |
|
712 | gid = entry.pw_gid | |
713 | uid = entry.pw_uid |
|
713 | uid = entry.pw_uid | |
714 | if self.options.verbose > 0: |
|
714 | if self.options.verbose > 0: | |
715 | self.out('Changing user to %s:%s (%s:%s)' % ( |
|
715 | self.out('Changing user to %s:%s (%s:%s)' % ( | |
716 | user, group or '(unknown)', uid, gid)) |
|
716 | user, group or '(unknown)', uid, gid)) | |
717 | if gid: |
|
717 | if gid: | |
718 | os.setgid(gid) |
|
718 | os.setgid(gid) | |
719 | if uid: |
|
719 | if uid: | |
720 | os.setuid(uid) |
|
720 | os.setuid(uid) | |
721 |
|
721 | |||
722 |
|
722 | |||
723 | class LazyWriter(object): |
|
723 | class LazyWriter(object): | |
724 |
|
724 | |||
725 | """ |
|
725 | """ | |
726 | File-like object that opens a file lazily when it is first written |
|
726 | File-like object that opens a file lazily when it is first written | |
727 | to. |
|
727 | to. | |
728 | """ |
|
728 | """ | |
729 |
|
729 | |||
730 | def __init__(self, filename, mode='w'): |
|
730 | def __init__(self, filename, mode='w'): | |
731 | self.filename = filename |
|
731 | self.filename = filename | |
732 | self.fileobj = None |
|
732 | self.fileobj = None | |
733 | self.lock = threading.Lock() |
|
733 | self.lock = threading.Lock() | |
734 | self.mode = mode |
|
734 | self.mode = mode | |
735 |
|
735 | |||
736 | def open(self): |
|
736 | def open(self): | |
737 | if self.fileobj is None: |
|
737 | if self.fileobj is None: | |
738 | with self.lock: |
|
738 | with self.lock: | |
739 | self.fileobj = open(self.filename, self.mode) |
|
739 | self.fileobj = open(self.filename, self.mode) | |
740 | return self.fileobj |
|
740 | return self.fileobj | |
741 |
|
741 | |||
742 | def close(self): |
|
742 | def close(self): | |
743 | fileobj = self.fileobj |
|
743 | fileobj = self.fileobj | |
744 | if fileobj is not None: |
|
744 | if fileobj is not None: | |
745 | fileobj.close() |
|
745 | fileobj.close() | |
746 |
|
746 | |||
747 | def __del__(self): |
|
747 | def __del__(self): | |
748 | self.close() |
|
748 | self.close() | |
749 |
|
749 | |||
750 | def write(self, text): |
|
750 | def write(self, text): | |
751 | fileobj = self.open() |
|
751 | fileobj = self.open() | |
752 | fileobj.write(text) |
|
752 | fileobj.write(text) | |
753 | fileobj.flush() |
|
753 | fileobj.flush() | |
754 |
|
754 | |||
755 | def writelines(self, text): |
|
755 | def writelines(self, text): | |
756 | fileobj = self.open() |
|
756 | fileobj = self.open() | |
757 | fileobj.writelines(text) |
|
757 | fileobj.writelines(text) | |
758 | fileobj.flush() |
|
758 | fileobj.flush() | |
759 |
|
759 | |||
760 | def flush(self): |
|
760 | def flush(self): | |
761 | self.open().flush() |
|
761 | self.open().flush() | |
762 |
|
762 | |||
763 |
|
763 | |||
764 | def live_pidfile(pidfile): # pragma: no cover |
|
764 | def live_pidfile(pidfile): # pragma: no cover | |
765 | """ |
|
765 | """ | |
766 | (pidfile:str) -> int | None |
|
766 | (pidfile:str) -> int | None | |
767 | Returns an int found in the named file, if there is one, |
|
767 | Returns an int found in the named file, if there is one, | |
768 | and if there is a running process with that process id. |
|
768 | and if there is a running process with that process id. | |
769 | Return None if no such process exists. |
|
769 | Return None if no such process exists. | |
770 | """ |
|
770 | """ | |
771 | pid = read_pidfile(pidfile) |
|
771 | pid = read_pidfile(pidfile) | |
772 | if pid: |
|
772 | if pid: | |
773 | try: |
|
773 | try: | |
774 | kill(int(pid), 0) |
|
774 | kill(int(pid), 0) | |
775 | return pid |
|
775 | return pid | |
776 | except OSError as e: |
|
776 | except OSError as e: | |
777 | if e.errno == errno.EPERM: |
|
777 | if e.errno == errno.EPERM: | |
778 | return pid |
|
778 | return pid | |
779 | return None |
|
779 | return None | |
780 |
|
780 | |||
781 |
|
781 | |||
782 | def read_pidfile(filename): |
|
782 | def read_pidfile(filename): | |
783 | if os.path.exists(filename): |
|
783 | if os.path.exists(filename): | |
784 | try: |
|
784 | try: | |
785 | with open(filename) as f: |
|
785 | with open(filename) as f: | |
786 | content = f.read() |
|
786 | content = f.read() | |
787 | return int(content.strip()) |
|
787 | return int(content.strip()) | |
788 | except (ValueError, IOError): |
|
788 | except (ValueError, IOError): | |
789 | return None |
|
789 | return None | |
790 | else: |
|
790 | else: | |
791 | return None |
|
791 | return None | |
792 |
|
792 | |||
793 |
|
793 | |||
794 | def ensure_port_cleanup( |
|
794 | def ensure_port_cleanup( | |
795 | bound_addresses, maxtries=30, sleeptime=2): # pragma: no cover |
|
795 | bound_addresses, maxtries=30, sleeptime=2): # pragma: no cover | |
796 | """ |
|
796 | """ | |
797 | This makes sure any open ports are closed. |
|
797 | This makes sure any open ports are closed. | |
798 |
|
798 | |||
799 | Does this by connecting to them until they give connection |
|
799 | Does this by connecting to them until they give connection | |
800 | refused. Servers should call like:: |
|
800 | refused. Servers should call like:: | |
801 |
|
801 | |||
802 | ensure_port_cleanup([80, 443]) |
|
802 | ensure_port_cleanup([80, 443]) | |
803 | """ |
|
803 | """ | |
804 | atexit.register(_cleanup_ports, bound_addresses, maxtries=maxtries, |
|
804 | atexit.register(_cleanup_ports, bound_addresses, maxtries=maxtries, | |
805 | sleeptime=sleeptime) |
|
805 | sleeptime=sleeptime) | |
806 |
|
806 | |||
807 |
|
807 | |||
808 | def _cleanup_ports( |
|
808 | def _cleanup_ports( | |
809 | bound_addresses, maxtries=30, sleeptime=2): # pragma: no cover |
|
809 | bound_addresses, maxtries=30, sleeptime=2): # pragma: no cover | |
810 | # Wait for the server to bind to the port. |
|
810 | # Wait for the server to bind to the port. | |
811 | import socket |
|
811 | import socket | |
812 | import errno |
|
812 | import errno | |
813 | for bound_address in bound_addresses: |
|
813 | for bound_address in bound_addresses: | |
814 | for attempt in range(maxtries): |
|
814 | for attempt in range(maxtries): | |
815 | sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) |
|
815 | sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) | |
816 | try: |
|
816 | try: | |
817 | sock.connect(bound_address) |
|
817 | sock.connect(bound_address) | |
818 | except socket.error as e: |
|
818 | except socket.error as e: | |
819 | if e.args[0] != errno.ECONNREFUSED: |
|
819 | if e.args[0] != errno.ECONNREFUSED: | |
820 | raise |
|
820 | raise | |
821 | break |
|
821 | break | |
822 | else: |
|
822 | else: | |
823 | time.sleep(sleeptime) |
|
823 | time.sleep(sleeptime) | |
824 | else: |
|
824 | else: | |
825 | raise SystemExit('Timeout waiting for port.') |
|
825 | raise SystemExit('Timeout waiting for port.') | |
826 | sock.close() |
|
826 | sock.close() | |
827 |
|
827 | |||
828 |
|
828 | |||
829 | def _turn_sigterm_into_systemexit(): # pragma: no cover |
|
829 | def _turn_sigterm_into_systemexit(): # pragma: no cover | |
830 | """ |
|
830 | """ | |
831 | Attempts to turn a SIGTERM exception into a SystemExit exception. |
|
831 | Attempts to turn a SIGTERM exception into a SystemExit exception. | |
832 | """ |
|
832 | """ | |
833 | try: |
|
833 | try: | |
834 | import signal |
|
834 | import signal | |
835 | except ImportError: |
|
835 | except ImportError: | |
836 | return |
|
836 | return | |
837 | def handle_term(signo, frame): |
|
837 | def handle_term(signo, frame): | |
838 | raise SystemExit |
|
838 | raise SystemExit | |
839 | signal.signal(signal.SIGTERM, handle_term) |
|
839 | signal.signal(signal.SIGTERM, handle_term) | |
840 |
|
840 | |||
841 |
|
841 | |||
842 | def install_reloader(poll_interval=1, extra_files=None): # pragma: no cover |
|
842 | def install_reloader(poll_interval=1, extra_files=None): # pragma: no cover | |
843 | """ |
|
843 | """ | |
844 | Install the reloading monitor. |
|
844 | Install the reloading monitor. | |
845 |
|
845 | |||
846 | On some platforms server threads may not terminate when the main |
|
846 | On some platforms server threads may not terminate when the main | |
847 | thread does, causing ports to remain open/locked. The |
|
847 | thread does, causing ports to remain open/locked. The | |
848 | ``raise_keyboard_interrupt`` option creates a unignorable signal |
|
848 | ``raise_keyboard_interrupt`` option creates a unignorable signal | |
849 | which causes the whole application to shut-down (rudely). |
|
849 | which causes the whole application to shut-down (rudely). | |
850 | """ |
|
850 | """ | |
851 | mon = Monitor(poll_interval=poll_interval) |
|
851 | mon = Monitor(poll_interval=poll_interval) | |
852 | if extra_files is None: |
|
852 | if extra_files is None: | |
853 | extra_files = [] |
|
853 | extra_files = [] | |
854 | mon.extra_files.extend(extra_files) |
|
854 | mon.extra_files.extend(extra_files) | |
855 | t = threading.Thread(target=mon.periodic_reload) |
|
855 | t = threading.Thread(target=mon.periodic_reload) | |
856 | t.setDaemon(True) |
|
856 | t.setDaemon(True) | |
857 | t.start() |
|
857 | t.start() | |
858 |
|
858 | |||
859 |
|
859 | |||
860 | class classinstancemethod(object): |
|
860 | class classinstancemethod(object): | |
861 | """ |
|
861 | """ | |
862 | Acts like a class method when called from a class, like an |
|
862 | Acts like a class method when called from a class, like an | |
863 | instance method when called by an instance. The method should |
|
863 | instance method when called by an instance. The method should | |
864 | take two arguments, 'self' and 'cls'; one of these will be None |
|
864 | take two arguments, 'self' and 'cls'; one of these will be None | |
865 | depending on how the method was called. |
|
865 | depending on how the method was called. | |
866 | """ |
|
866 | """ | |
867 |
|
867 | |||
868 | def __init__(self, func): |
|
868 | def __init__(self, func): | |
869 | self.func = func |
|
869 | self.func = func | |
870 | self.__doc__ = func.__doc__ |
|
870 | self.__doc__ = func.__doc__ | |
871 |
|
871 | |||
872 | def __get__(self, obj, type=None): |
|
872 | def __get__(self, obj, type=None): | |
873 | return _methodwrapper(self.func, obj=obj, type=type) |
|
873 | return _methodwrapper(self.func, obj=obj, type=type) | |
874 |
|
874 | |||
875 |
|
875 | |||
876 | class _methodwrapper(object): |
|
876 | class _methodwrapper(object): | |
877 |
|
877 | |||
878 | def __init__(self, func, obj, type): |
|
878 | def __init__(self, func, obj, type): | |
879 | self.func = func |
|
879 | self.func = func | |
880 | self.obj = obj |
|
880 | self.obj = obj | |
881 | self.type = type |
|
881 | self.type = type | |
882 |
|
882 | |||
883 | def __call__(self, *args, **kw): |
|
883 | def __call__(self, *args, **kw): | |
884 | assert not 'self' in kw and not 'cls' in kw, ( |
|
884 | assert not 'self' in kw and not 'cls' in kw, ( | |
885 | "You cannot use 'self' or 'cls' arguments to a " |
|
885 | "You cannot use 'self' or 'cls' arguments to a " | |
886 | "classinstancemethod") |
|
886 | "classinstancemethod") | |
887 | return self.func(*((self.obj, self.type) + args), **kw) |
|
887 | return self.func(*((self.obj, self.type) + args), **kw) | |
888 |
|
888 | |||
889 |
|
889 | |||
890 | class Monitor(object): # pragma: no cover |
|
890 | class Monitor(object): # pragma: no cover | |
891 | """ |
|
891 | """ | |
892 | A file monitor and server restarter. |
|
892 | A file monitor and server restarter. | |
893 |
|
893 | |||
894 | Use this like: |
|
894 | Use this like: | |
895 |
|
895 | |||
896 | ..code-block:: Python |
|
896 | ..code-block:: Python | |
897 |
|
897 | |||
898 | install_reloader() |
|
898 | install_reloader() | |
899 |
|
899 | |||
900 | Then make sure your server is installed with a shell script like:: |
|
900 | Then make sure your server is installed with a shell script like:: | |
901 |
|
901 | |||
902 | err=3 |
|
902 | err=3 | |
903 | while test "$err" -eq 3 ; do |
|
903 | while test "$err" -eq 3 ; do | |
904 | python server.py |
|
904 | python server.py | |
905 | err="$?" |
|
905 | err="$?" | |
906 | done |
|
906 | done | |
907 |
|
907 | |||
908 | or is run from this .bat file (if you use Windows):: |
|
908 | or is run from this .bat file (if you use Windows):: | |
909 |
|
909 | |||
910 | @echo off |
|
910 | @echo off | |
911 | :repeat |
|
911 | :repeat | |
912 | python server.py |
|
912 | python server.py | |
913 | if %errorlevel% == 3 goto repeat |
|
913 | if %errorlevel% == 3 goto repeat | |
914 |
|
914 | |||
915 | or run a monitoring process in Python (``pserve --reload`` does |
|
915 | or run a monitoring process in Python (``pserve --reload`` does | |
916 | this). |
|
916 | this). | |
917 |
|
917 | |||
918 | Use the ``watch_file(filename)`` function to cause a reload/restart for |
|
918 | Use the ``watch_file(filename)`` function to cause a reload/restart for | |
919 | other non-Python files (e.g., configuration files). If you have |
|
919 | other non-Python files (e.g., configuration files). If you have | |
920 | a dynamic set of files that grows over time you can use something like:: |
|
920 | a dynamic set of files that grows over time you can use something like:: | |
921 |
|
921 | |||
922 | def watch_config_files(): |
|
922 | def watch_config_files(): | |
923 | return CONFIG_FILE_CACHE.keys() |
|
923 | return CONFIG_FILE_CACHE.keys() | |
924 | add_file_callback(watch_config_files) |
|
924 | add_file_callback(watch_config_files) | |
925 |
|
925 | |||
926 | Then every time the reloader polls files it will call |
|
926 | Then every time the reloader polls files it will call | |
927 | ``watch_config_files`` and check all the filenames it returns. |
|
927 | ``watch_config_files`` and check all the filenames it returns. | |
928 | """ |
|
928 | """ | |
929 | instances = [] |
|
929 | instances = [] | |
930 | global_extra_files = [] |
|
930 | global_extra_files = [] | |
931 | global_file_callbacks = [] |
|
931 | global_file_callbacks = [] | |
932 |
|
932 | |||
933 | def __init__(self, poll_interval): |
|
933 | def __init__(self, poll_interval): | |
934 | self.module_mtimes = {} |
|
934 | self.module_mtimes = {} | |
935 | self.keep_running = True |
|
935 | self.keep_running = True | |
936 | self.poll_interval = poll_interval |
|
936 | self.poll_interval = poll_interval | |
937 | self.extra_files = list(self.global_extra_files) |
|
937 | self.extra_files = list(self.global_extra_files) | |
938 | self.instances.append(self) |
|
938 | self.instances.append(self) | |
939 | self.file_callbacks = list(self.global_file_callbacks) |
|
939 | self.file_callbacks = list(self.global_file_callbacks) | |
940 |
|
940 | |||
941 | def _exit(self): |
|
941 | def _exit(self): | |
942 | # use os._exit() here and not sys.exit() since within a |
|
942 | # use os._exit() here and not sys.exit() since within a | |
943 | # thread sys.exit() just closes the given thread and |
|
943 | # thread sys.exit() just closes the given thread and | |
944 | # won't kill the process; note os._exit does not call |
|
944 | # won't kill the process; note os._exit does not call | |
945 | # any atexit callbacks, nor does it do finally blocks, |
|
945 | # any atexit callbacks, nor does it do finally blocks, | |
946 | # flush open files, etc. In otherwords, it is rude. |
|
946 | # flush open files, etc. In otherwords, it is rude. | |
947 | os._exit(3) |
|
947 | os._exit(3) | |
948 |
|
948 | |||
949 | def periodic_reload(self): |
|
949 | def periodic_reload(self): | |
950 | while True: |
|
950 | while True: | |
951 | if not self.check_reload(): |
|
951 | if not self.check_reload(): | |
952 | self._exit() |
|
952 | self._exit() | |
953 | break |
|
953 | break | |
954 | time.sleep(self.poll_interval) |
|
954 | time.sleep(self.poll_interval) | |
955 |
|
955 | |||
956 | def check_reload(self): |
|
956 | def check_reload(self): | |
957 | filenames = list(self.extra_files) |
|
957 | filenames = list(self.extra_files) | |
958 | for file_callback in self.file_callbacks: |
|
958 | for file_callback in self.file_callbacks: | |
959 | try: |
|
959 | try: | |
960 | filenames.extend(file_callback()) |
|
960 | filenames.extend(file_callback()) | |
961 | except: |
|
961 | except: | |
962 | print( |
|
962 | print( | |
963 | "Error calling reloader callback %r:" % file_callback) |
|
963 | "Error calling reloader callback %r:" % file_callback) | |
964 | traceback.print_exc() |
|
964 | traceback.print_exc() | |
965 | for module in list(sys.modules.values()): |
|
965 | for module in list(sys.modules.values()): | |
966 | try: |
|
966 | try: | |
967 | filename = module.__file__ |
|
967 | filename = module.__file__ | |
968 | except (AttributeError, ImportError): |
|
968 | except (AttributeError, ImportError): | |
969 | continue |
|
969 | continue | |
970 | if filename is not None: |
|
970 | if filename is not None: | |
971 | filenames.append(filename) |
|
971 | filenames.append(filename) | |
972 |
|
972 | |||
973 | for filename in filenames: |
|
973 | for filename in filenames: | |
974 | try: |
|
974 | try: | |
975 | stat = os.stat(filename) |
|
975 | stat = os.stat(filename) | |
976 | if stat: |
|
976 | if stat: | |
977 | mtime = stat.st_mtime |
|
977 | mtime = stat.st_mtime | |
978 | else: |
|
978 | else: | |
979 | mtime = 0 |
|
979 | mtime = 0 | |
980 | except (OSError, IOError): |
|
980 | except (OSError, IOError): | |
981 | continue |
|
981 | continue | |
982 | if filename.endswith('.pyc') and os.path.exists(filename[:-1]): |
|
982 | if filename.endswith('.pyc') and os.path.exists(filename[:-1]): | |
983 | mtime = max(os.stat(filename[:-1]).st_mtime, mtime) |
|
983 | mtime = max(os.stat(filename[:-1]).st_mtime, mtime) | |
984 | if not filename in self.module_mtimes: |
|
984 | if not filename in self.module_mtimes: | |
985 | self.module_mtimes[filename] = mtime |
|
985 | self.module_mtimes[filename] = mtime | |
986 | elif self.module_mtimes[filename] < mtime: |
|
986 | elif self.module_mtimes[filename] < mtime: | |
987 | print("%s changed; reloading..." % filename) |
|
987 | print("%s changed; reloading..." % filename) | |
988 | run_callback_for_pattern(filename) |
|
988 | run_callback_for_pattern(filename) | |
989 | return False |
|
989 | return False | |
990 | return True |
|
990 | return True | |
991 |
|
991 | |||
992 | def watch_file(self, cls, filename): |
|
992 | def watch_file(self, cls, filename): | |
993 | """Watch the named file for changes""" |
|
993 | """Watch the named file for changes""" | |
994 | filename = os.path.abspath(filename) |
|
994 | filename = os.path.abspath(filename) | |
995 | if self is None: |
|
995 | if self is None: | |
996 | for instance in cls.instances: |
|
996 | for instance in cls.instances: | |
997 | instance.watch_file(filename) |
|
997 | instance.watch_file(filename) | |
998 | cls.global_extra_files.append(filename) |
|
998 | cls.global_extra_files.append(filename) | |
999 | else: |
|
999 | else: | |
1000 | self.extra_files.append(filename) |
|
1000 | self.extra_files.append(filename) | |
1001 |
|
1001 | |||
1002 | watch_file = classinstancemethod(watch_file) |
|
1002 | watch_file = classinstancemethod(watch_file) | |
1003 |
|
1003 | |||
1004 | def add_file_callback(self, cls, callback): |
|
1004 | def add_file_callback(self, cls, callback): | |
1005 | """Add a callback -- a function that takes no parameters -- that will |
|
1005 | """Add a callback -- a function that takes no parameters -- that will | |
1006 | return a list of filenames to watch for changes.""" |
|
1006 | return a list of filenames to watch for changes.""" | |
1007 | if self is None: |
|
1007 | if self is None: | |
1008 | for instance in cls.instances: |
|
1008 | for instance in cls.instances: | |
1009 | instance.add_file_callback(callback) |
|
1009 | instance.add_file_callback(callback) | |
1010 | cls.global_file_callbacks.append(callback) |
|
1010 | cls.global_file_callbacks.append(callback) | |
1011 | else: |
|
1011 | else: | |
1012 | self.file_callbacks.append(callback) |
|
1012 | self.file_callbacks.append(callback) | |
1013 |
|
1013 | |||
1014 | add_file_callback = classinstancemethod(add_file_callback) |
|
1014 | add_file_callback = classinstancemethod(add_file_callback) | |
1015 |
|
1015 | |||
1016 | watch_file = Monitor.watch_file |
|
1016 | watch_file = Monitor.watch_file | |
1017 | add_file_callback = Monitor.add_file_callback |
|
1017 | add_file_callback = Monitor.add_file_callback | |
1018 |
|
1018 | |||
1019 |
|
1019 | |||
1020 | def main(argv=sys.argv, quiet=False): |
|
1020 | def main(argv=sys.argv, quiet=False): | |
1021 | command = RcServerCommand(argv, quiet=quiet) |
|
1021 | command = RcServerCommand(argv, quiet=quiet) | |
1022 | return command.run() |
|
1022 | return command.run() | |
1023 |
|
1023 | |||
1024 | if __name__ == '__main__': # pragma: no cover |
|
1024 | if __name__ == '__main__': # pragma: no cover | |
1025 | sys.exit(main() or 0) |
|
1025 | sys.exit(main() or 0) |
@@ -1,349 +1,349 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2018 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2018 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | Helpers for fixture generation |
|
22 | Helpers for fixture generation | |
23 | """ |
|
23 | """ | |
24 |
|
24 | |||
25 | import os |
|
25 | import os | |
26 | import time |
|
26 | import time | |
27 | import tempfile |
|
27 | import tempfile | |
28 | import shutil |
|
28 | import shutil | |
29 |
|
29 | |||
30 | import configobj |
|
30 | import configobj | |
31 |
|
31 | |||
32 | from rhodecode.tests import * |
|
32 | from rhodecode.tests import * | |
33 | from rhodecode.model.db import Repository, User, RepoGroup, UserGroup, Gist, UserEmailMap |
|
33 | from rhodecode.model.db import Repository, User, RepoGroup, UserGroup, Gist, UserEmailMap | |
34 | from rhodecode.model.meta import Session |
|
34 | from rhodecode.model.meta import Session | |
35 | from rhodecode.model.repo import RepoModel |
|
35 | from rhodecode.model.repo import RepoModel | |
36 | from rhodecode.model.user import UserModel |
|
36 | from rhodecode.model.user import UserModel | |
37 | from rhodecode.model.repo_group import RepoGroupModel |
|
37 | from rhodecode.model.repo_group import RepoGroupModel | |
38 | from rhodecode.model.user_group import UserGroupModel |
|
38 | from rhodecode.model.user_group import UserGroupModel | |
39 | from rhodecode.model.gist import GistModel |
|
39 | from rhodecode.model.gist import GistModel | |
40 | from rhodecode.model.auth_token import AuthTokenModel |
|
40 | from rhodecode.model.auth_token import AuthTokenModel | |
41 |
|
41 | |||
42 | dn = os.path.dirname |
|
42 | dn = os.path.dirname | |
43 | FIXTURES = os.path.join(dn(dn(os.path.abspath(__file__))), 'tests', 'fixtures') |
|
43 | FIXTURES = os.path.join(dn(dn(os.path.abspath(__file__))), 'tests', 'fixtures') | |
44 |
|
44 | |||
45 |
|
45 | |||
46 | def error_function(*args, **kwargs): |
|
46 | def error_function(*args, **kwargs): | |
47 | raise Exception('Total Crash !') |
|
47 | raise Exception('Total Crash !') | |
48 |
|
48 | |||
49 |
|
49 | |||
50 | class TestINI(object): |
|
50 | class TestINI(object): | |
51 | """ |
|
51 | """ | |
52 | Allows to create a new test.ini file as a copy of existing one with edited |
|
52 | Allows to create a new test.ini file as a copy of existing one with edited | |
53 | data. Example usage:: |
|
53 | data. Example usage:: | |
54 |
|
54 | |||
55 | with TestINI('test.ini', [{'section':{'key':val'}]) as new_test_ini_path: |
|
55 | with TestINI('test.ini', [{'section':{'key':val'}]) as new_test_ini_path: | |
56 |
print |
|
56 | print('paster server %s' % new_test_ini) | |
57 | """ |
|
57 | """ | |
58 |
|
58 | |||
59 | def __init__(self, ini_file_path, ini_params, new_file_prefix='DEFAULT', |
|
59 | def __init__(self, ini_file_path, ini_params, new_file_prefix='DEFAULT', | |
60 | destroy=True, dir=None): |
|
60 | destroy=True, dir=None): | |
61 | self.ini_file_path = ini_file_path |
|
61 | self.ini_file_path = ini_file_path | |
62 | self.ini_params = ini_params |
|
62 | self.ini_params = ini_params | |
63 | self.new_path = None |
|
63 | self.new_path = None | |
64 | self.new_path_prefix = new_file_prefix |
|
64 | self.new_path_prefix = new_file_prefix | |
65 | self._destroy = destroy |
|
65 | self._destroy = destroy | |
66 | self._dir = dir |
|
66 | self._dir = dir | |
67 |
|
67 | |||
68 | def __enter__(self): |
|
68 | def __enter__(self): | |
69 | return self.create() |
|
69 | return self.create() | |
70 |
|
70 | |||
71 | def __exit__(self, exc_type, exc_val, exc_tb): |
|
71 | def __exit__(self, exc_type, exc_val, exc_tb): | |
72 | self.destroy() |
|
72 | self.destroy() | |
73 |
|
73 | |||
74 | def create(self): |
|
74 | def create(self): | |
75 | config = configobj.ConfigObj( |
|
75 | config = configobj.ConfigObj( | |
76 | self.ini_file_path, file_error=True, write_empty_values=True) |
|
76 | self.ini_file_path, file_error=True, write_empty_values=True) | |
77 |
|
77 | |||
78 | for data in self.ini_params: |
|
78 | for data in self.ini_params: | |
79 | section, ini_params = data.items()[0] |
|
79 | section, ini_params = data.items()[0] | |
80 | for key, val in ini_params.items(): |
|
80 | for key, val in ini_params.items(): | |
81 | config[section][key] = val |
|
81 | config[section][key] = val | |
82 | with tempfile.NamedTemporaryFile( |
|
82 | with tempfile.NamedTemporaryFile( | |
83 | prefix=self.new_path_prefix, suffix='.ini', dir=self._dir, |
|
83 | prefix=self.new_path_prefix, suffix='.ini', dir=self._dir, | |
84 | delete=False) as new_ini_file: |
|
84 | delete=False) as new_ini_file: | |
85 | config.write(new_ini_file) |
|
85 | config.write(new_ini_file) | |
86 | self.new_path = new_ini_file.name |
|
86 | self.new_path = new_ini_file.name | |
87 |
|
87 | |||
88 | return self.new_path |
|
88 | return self.new_path | |
89 |
|
89 | |||
90 | def destroy(self): |
|
90 | def destroy(self): | |
91 | if self._destroy: |
|
91 | if self._destroy: | |
92 | os.remove(self.new_path) |
|
92 | os.remove(self.new_path) | |
93 |
|
93 | |||
94 |
|
94 | |||
95 | class Fixture(object): |
|
95 | class Fixture(object): | |
96 |
|
96 | |||
97 | def anon_access(self, status): |
|
97 | def anon_access(self, status): | |
98 | """ |
|
98 | """ | |
99 | Context process for disabling anonymous access. use like: |
|
99 | Context process for disabling anonymous access. use like: | |
100 | fixture = Fixture() |
|
100 | fixture = Fixture() | |
101 | with fixture.anon_access(False): |
|
101 | with fixture.anon_access(False): | |
102 | #tests |
|
102 | #tests | |
103 |
|
103 | |||
104 | after this block anon access will be set to `not status` |
|
104 | after this block anon access will be set to `not status` | |
105 | """ |
|
105 | """ | |
106 |
|
106 | |||
107 | class context(object): |
|
107 | class context(object): | |
108 | def __enter__(self): |
|
108 | def __enter__(self): | |
109 | anon = User.get_default_user() |
|
109 | anon = User.get_default_user() | |
110 | anon.active = status |
|
110 | anon.active = status | |
111 | Session().add(anon) |
|
111 | Session().add(anon) | |
112 | Session().commit() |
|
112 | Session().commit() | |
113 | time.sleep(1.5) # must sleep for cache (1s to expire) |
|
113 | time.sleep(1.5) # must sleep for cache (1s to expire) | |
114 |
|
114 | |||
115 | def __exit__(self, exc_type, exc_val, exc_tb): |
|
115 | def __exit__(self, exc_type, exc_val, exc_tb): | |
116 | anon = User.get_default_user() |
|
116 | anon = User.get_default_user() | |
117 | anon.active = not status |
|
117 | anon.active = not status | |
118 | Session().add(anon) |
|
118 | Session().add(anon) | |
119 | Session().commit() |
|
119 | Session().commit() | |
120 |
|
120 | |||
121 | return context() |
|
121 | return context() | |
122 |
|
122 | |||
123 | def _get_repo_create_params(self, **custom): |
|
123 | def _get_repo_create_params(self, **custom): | |
124 | defs = { |
|
124 | defs = { | |
125 | 'repo_name': None, |
|
125 | 'repo_name': None, | |
126 | 'repo_type': 'hg', |
|
126 | 'repo_type': 'hg', | |
127 | 'clone_uri': '', |
|
127 | 'clone_uri': '', | |
128 | 'push_uri': '', |
|
128 | 'push_uri': '', | |
129 | 'repo_group': '-1', |
|
129 | 'repo_group': '-1', | |
130 | 'repo_description': 'DESC', |
|
130 | 'repo_description': 'DESC', | |
131 | 'repo_private': False, |
|
131 | 'repo_private': False, | |
132 | 'repo_landing_rev': 'rev:tip', |
|
132 | 'repo_landing_rev': 'rev:tip', | |
133 | 'repo_copy_permissions': False, |
|
133 | 'repo_copy_permissions': False, | |
134 | 'repo_state': Repository.STATE_CREATED, |
|
134 | 'repo_state': Repository.STATE_CREATED, | |
135 | } |
|
135 | } | |
136 | defs.update(custom) |
|
136 | defs.update(custom) | |
137 | if 'repo_name_full' not in custom: |
|
137 | if 'repo_name_full' not in custom: | |
138 | defs.update({'repo_name_full': defs['repo_name']}) |
|
138 | defs.update({'repo_name_full': defs['repo_name']}) | |
139 |
|
139 | |||
140 | # fix the repo name if passed as repo_name_full |
|
140 | # fix the repo name if passed as repo_name_full | |
141 | if defs['repo_name']: |
|
141 | if defs['repo_name']: | |
142 | defs['repo_name'] = defs['repo_name'].split('/')[-1] |
|
142 | defs['repo_name'] = defs['repo_name'].split('/')[-1] | |
143 |
|
143 | |||
144 | return defs |
|
144 | return defs | |
145 |
|
145 | |||
146 | def _get_group_create_params(self, **custom): |
|
146 | def _get_group_create_params(self, **custom): | |
147 | defs = { |
|
147 | defs = { | |
148 | 'group_name': None, |
|
148 | 'group_name': None, | |
149 | 'group_description': 'DESC', |
|
149 | 'group_description': 'DESC', | |
150 | 'perm_updates': [], |
|
150 | 'perm_updates': [], | |
151 | 'perm_additions': [], |
|
151 | 'perm_additions': [], | |
152 | 'perm_deletions': [], |
|
152 | 'perm_deletions': [], | |
153 | 'group_parent_id': -1, |
|
153 | 'group_parent_id': -1, | |
154 | 'enable_locking': False, |
|
154 | 'enable_locking': False, | |
155 | 'recursive': False, |
|
155 | 'recursive': False, | |
156 | } |
|
156 | } | |
157 | defs.update(custom) |
|
157 | defs.update(custom) | |
158 |
|
158 | |||
159 | return defs |
|
159 | return defs | |
160 |
|
160 | |||
161 | def _get_user_create_params(self, name, **custom): |
|
161 | def _get_user_create_params(self, name, **custom): | |
162 | defs = { |
|
162 | defs = { | |
163 | 'username': name, |
|
163 | 'username': name, | |
164 | 'password': 'qweqwe', |
|
164 | 'password': 'qweqwe', | |
165 | 'email': '%s+test@rhodecode.org' % name, |
|
165 | 'email': '%s+test@rhodecode.org' % name, | |
166 | 'firstname': 'TestUser', |
|
166 | 'firstname': 'TestUser', | |
167 | 'lastname': 'Test', |
|
167 | 'lastname': 'Test', | |
168 | 'active': True, |
|
168 | 'active': True, | |
169 | 'admin': False, |
|
169 | 'admin': False, | |
170 | 'extern_type': 'rhodecode', |
|
170 | 'extern_type': 'rhodecode', | |
171 | 'extern_name': None, |
|
171 | 'extern_name': None, | |
172 | } |
|
172 | } | |
173 | defs.update(custom) |
|
173 | defs.update(custom) | |
174 |
|
174 | |||
175 | return defs |
|
175 | return defs | |
176 |
|
176 | |||
177 | def _get_user_group_create_params(self, name, **custom): |
|
177 | def _get_user_group_create_params(self, name, **custom): | |
178 | defs = { |
|
178 | defs = { | |
179 | 'users_group_name': name, |
|
179 | 'users_group_name': name, | |
180 | 'user_group_description': 'DESC', |
|
180 | 'user_group_description': 'DESC', | |
181 | 'users_group_active': True, |
|
181 | 'users_group_active': True, | |
182 | 'user_group_data': {}, |
|
182 | 'user_group_data': {}, | |
183 | } |
|
183 | } | |
184 | defs.update(custom) |
|
184 | defs.update(custom) | |
185 |
|
185 | |||
186 | return defs |
|
186 | return defs | |
187 |
|
187 | |||
188 | def create_repo(self, name, **kwargs): |
|
188 | def create_repo(self, name, **kwargs): | |
189 | repo_group = kwargs.get('repo_group') |
|
189 | repo_group = kwargs.get('repo_group') | |
190 | if isinstance(repo_group, RepoGroup): |
|
190 | if isinstance(repo_group, RepoGroup): | |
191 | kwargs['repo_group'] = repo_group.group_id |
|
191 | kwargs['repo_group'] = repo_group.group_id | |
192 | name = name.split(Repository.NAME_SEP)[-1] |
|
192 | name = name.split(Repository.NAME_SEP)[-1] | |
193 | name = Repository.NAME_SEP.join((repo_group.group_name, name)) |
|
193 | name = Repository.NAME_SEP.join((repo_group.group_name, name)) | |
194 |
|
194 | |||
195 | if 'skip_if_exists' in kwargs: |
|
195 | if 'skip_if_exists' in kwargs: | |
196 | del kwargs['skip_if_exists'] |
|
196 | del kwargs['skip_if_exists'] | |
197 | r = Repository.get_by_repo_name(name) |
|
197 | r = Repository.get_by_repo_name(name) | |
198 | if r: |
|
198 | if r: | |
199 | return r |
|
199 | return r | |
200 |
|
200 | |||
201 | form_data = self._get_repo_create_params(repo_name=name, **kwargs) |
|
201 | form_data = self._get_repo_create_params(repo_name=name, **kwargs) | |
202 | cur_user = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) |
|
202 | cur_user = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) | |
203 | RepoModel().create(form_data, cur_user) |
|
203 | RepoModel().create(form_data, cur_user) | |
204 | Session().commit() |
|
204 | Session().commit() | |
205 | repo = Repository.get_by_repo_name(name) |
|
205 | repo = Repository.get_by_repo_name(name) | |
206 | assert repo |
|
206 | assert repo | |
207 | return repo |
|
207 | return repo | |
208 |
|
208 | |||
209 | def create_fork(self, repo_to_fork, fork_name, **kwargs): |
|
209 | def create_fork(self, repo_to_fork, fork_name, **kwargs): | |
210 | repo_to_fork = Repository.get_by_repo_name(repo_to_fork) |
|
210 | repo_to_fork = Repository.get_by_repo_name(repo_to_fork) | |
211 |
|
211 | |||
212 | form_data = self._get_repo_create_params(repo_name=fork_name, |
|
212 | form_data = self._get_repo_create_params(repo_name=fork_name, | |
213 | fork_parent_id=repo_to_fork.repo_id, |
|
213 | fork_parent_id=repo_to_fork.repo_id, | |
214 | repo_type=repo_to_fork.repo_type, |
|
214 | repo_type=repo_to_fork.repo_type, | |
215 | **kwargs) |
|
215 | **kwargs) | |
216 | #TODO: fix it !! |
|
216 | #TODO: fix it !! | |
217 | form_data['description'] = form_data['repo_description'] |
|
217 | form_data['description'] = form_data['repo_description'] | |
218 | form_data['private'] = form_data['repo_private'] |
|
218 | form_data['private'] = form_data['repo_private'] | |
219 | form_data['landing_rev'] = form_data['repo_landing_rev'] |
|
219 | form_data['landing_rev'] = form_data['repo_landing_rev'] | |
220 |
|
220 | |||
221 | owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) |
|
221 | owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) | |
222 | RepoModel().create_fork(form_data, cur_user=owner) |
|
222 | RepoModel().create_fork(form_data, cur_user=owner) | |
223 | Session().commit() |
|
223 | Session().commit() | |
224 | r = Repository.get_by_repo_name(fork_name) |
|
224 | r = Repository.get_by_repo_name(fork_name) | |
225 | assert r |
|
225 | assert r | |
226 | return r |
|
226 | return r | |
227 |
|
227 | |||
228 | def destroy_repo(self, repo_name, **kwargs): |
|
228 | def destroy_repo(self, repo_name, **kwargs): | |
229 | RepoModel().delete(repo_name, **kwargs) |
|
229 | RepoModel().delete(repo_name, **kwargs) | |
230 | Session().commit() |
|
230 | Session().commit() | |
231 |
|
231 | |||
232 | def destroy_repo_on_filesystem(self, repo_name): |
|
232 | def destroy_repo_on_filesystem(self, repo_name): | |
233 | rm_path = os.path.join(RepoModel().repos_path, repo_name) |
|
233 | rm_path = os.path.join(RepoModel().repos_path, repo_name) | |
234 | if os.path.isdir(rm_path): |
|
234 | if os.path.isdir(rm_path): | |
235 | shutil.rmtree(rm_path) |
|
235 | shutil.rmtree(rm_path) | |
236 |
|
236 | |||
237 | def create_repo_group(self, name, **kwargs): |
|
237 | def create_repo_group(self, name, **kwargs): | |
238 | if 'skip_if_exists' in kwargs: |
|
238 | if 'skip_if_exists' in kwargs: | |
239 | del kwargs['skip_if_exists'] |
|
239 | del kwargs['skip_if_exists'] | |
240 | gr = RepoGroup.get_by_group_name(group_name=name) |
|
240 | gr = RepoGroup.get_by_group_name(group_name=name) | |
241 | if gr: |
|
241 | if gr: | |
242 | return gr |
|
242 | return gr | |
243 | form_data = self._get_group_create_params(group_name=name, **kwargs) |
|
243 | form_data = self._get_group_create_params(group_name=name, **kwargs) | |
244 | owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) |
|
244 | owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) | |
245 | gr = RepoGroupModel().create( |
|
245 | gr = RepoGroupModel().create( | |
246 | group_name=form_data['group_name'], |
|
246 | group_name=form_data['group_name'], | |
247 | group_description=form_data['group_name'], |
|
247 | group_description=form_data['group_name'], | |
248 | owner=owner) |
|
248 | owner=owner) | |
249 | Session().commit() |
|
249 | Session().commit() | |
250 | gr = RepoGroup.get_by_group_name(gr.group_name) |
|
250 | gr = RepoGroup.get_by_group_name(gr.group_name) | |
251 | return gr |
|
251 | return gr | |
252 |
|
252 | |||
253 | def destroy_repo_group(self, repogroupid): |
|
253 | def destroy_repo_group(self, repogroupid): | |
254 | RepoGroupModel().delete(repogroupid) |
|
254 | RepoGroupModel().delete(repogroupid) | |
255 | Session().commit() |
|
255 | Session().commit() | |
256 |
|
256 | |||
257 | def create_user(self, name, **kwargs): |
|
257 | def create_user(self, name, **kwargs): | |
258 | if 'skip_if_exists' in kwargs: |
|
258 | if 'skip_if_exists' in kwargs: | |
259 | del kwargs['skip_if_exists'] |
|
259 | del kwargs['skip_if_exists'] | |
260 | user = User.get_by_username(name) |
|
260 | user = User.get_by_username(name) | |
261 | if user: |
|
261 | if user: | |
262 | return user |
|
262 | return user | |
263 | form_data = self._get_user_create_params(name, **kwargs) |
|
263 | form_data = self._get_user_create_params(name, **kwargs) | |
264 | user = UserModel().create(form_data) |
|
264 | user = UserModel().create(form_data) | |
265 |
|
265 | |||
266 | # create token for user |
|
266 | # create token for user | |
267 | AuthTokenModel().create( |
|
267 | AuthTokenModel().create( | |
268 | user=user, description=u'TEST_USER_TOKEN') |
|
268 | user=user, description=u'TEST_USER_TOKEN') | |
269 |
|
269 | |||
270 | Session().commit() |
|
270 | Session().commit() | |
271 | user = User.get_by_username(user.username) |
|
271 | user = User.get_by_username(user.username) | |
272 | return user |
|
272 | return user | |
273 |
|
273 | |||
274 | def destroy_user(self, userid): |
|
274 | def destroy_user(self, userid): | |
275 | UserModel().delete(userid) |
|
275 | UserModel().delete(userid) | |
276 | Session().commit() |
|
276 | Session().commit() | |
277 |
|
277 | |||
278 | def create_additional_user_email(self, user, email): |
|
278 | def create_additional_user_email(self, user, email): | |
279 | uem = UserEmailMap() |
|
279 | uem = UserEmailMap() | |
280 | uem.user = user |
|
280 | uem.user = user | |
281 | uem.email = email |
|
281 | uem.email = email | |
282 | Session().add(uem) |
|
282 | Session().add(uem) | |
283 | return uem |
|
283 | return uem | |
284 |
|
284 | |||
285 | def destroy_users(self, userid_iter): |
|
285 | def destroy_users(self, userid_iter): | |
286 | for user_id in userid_iter: |
|
286 | for user_id in userid_iter: | |
287 | if User.get_by_username(user_id): |
|
287 | if User.get_by_username(user_id): | |
288 | UserModel().delete(user_id) |
|
288 | UserModel().delete(user_id) | |
289 | Session().commit() |
|
289 | Session().commit() | |
290 |
|
290 | |||
291 | def create_user_group(self, name, **kwargs): |
|
291 | def create_user_group(self, name, **kwargs): | |
292 | if 'skip_if_exists' in kwargs: |
|
292 | if 'skip_if_exists' in kwargs: | |
293 | del kwargs['skip_if_exists'] |
|
293 | del kwargs['skip_if_exists'] | |
294 | gr = UserGroup.get_by_group_name(group_name=name) |
|
294 | gr = UserGroup.get_by_group_name(group_name=name) | |
295 | if gr: |
|
295 | if gr: | |
296 | return gr |
|
296 | return gr | |
297 | # map active flag to the real attribute. For API consistency of fixtures |
|
297 | # map active flag to the real attribute. For API consistency of fixtures | |
298 | if 'active' in kwargs: |
|
298 | if 'active' in kwargs: | |
299 | kwargs['users_group_active'] = kwargs['active'] |
|
299 | kwargs['users_group_active'] = kwargs['active'] | |
300 | del kwargs['active'] |
|
300 | del kwargs['active'] | |
301 | form_data = self._get_user_group_create_params(name, **kwargs) |
|
301 | form_data = self._get_user_group_create_params(name, **kwargs) | |
302 | owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) |
|
302 | owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) | |
303 | user_group = UserGroupModel().create( |
|
303 | user_group = UserGroupModel().create( | |
304 | name=form_data['users_group_name'], |
|
304 | name=form_data['users_group_name'], | |
305 | description=form_data['user_group_description'], |
|
305 | description=form_data['user_group_description'], | |
306 | owner=owner, active=form_data['users_group_active'], |
|
306 | owner=owner, active=form_data['users_group_active'], | |
307 | group_data=form_data['user_group_data']) |
|
307 | group_data=form_data['user_group_data']) | |
308 | Session().commit() |
|
308 | Session().commit() | |
309 | user_group = UserGroup.get_by_group_name(user_group.users_group_name) |
|
309 | user_group = UserGroup.get_by_group_name(user_group.users_group_name) | |
310 | return user_group |
|
310 | return user_group | |
311 |
|
311 | |||
312 | def destroy_user_group(self, usergroupid): |
|
312 | def destroy_user_group(self, usergroupid): | |
313 | UserGroupModel().delete(user_group=usergroupid, force=True) |
|
313 | UserGroupModel().delete(user_group=usergroupid, force=True) | |
314 | Session().commit() |
|
314 | Session().commit() | |
315 |
|
315 | |||
316 | def create_gist(self, **kwargs): |
|
316 | def create_gist(self, **kwargs): | |
317 | form_data = { |
|
317 | form_data = { | |
318 | 'description': 'new-gist', |
|
318 | 'description': 'new-gist', | |
319 | 'owner': TEST_USER_ADMIN_LOGIN, |
|
319 | 'owner': TEST_USER_ADMIN_LOGIN, | |
320 | 'gist_type': GistModel.cls.GIST_PUBLIC, |
|
320 | 'gist_type': GistModel.cls.GIST_PUBLIC, | |
321 | 'lifetime': -1, |
|
321 | 'lifetime': -1, | |
322 | 'acl_level': Gist.ACL_LEVEL_PUBLIC, |
|
322 | 'acl_level': Gist.ACL_LEVEL_PUBLIC, | |
323 | 'gist_mapping': {'filename1.txt': {'content': 'hello world'},} |
|
323 | 'gist_mapping': {'filename1.txt': {'content': 'hello world'},} | |
324 | } |
|
324 | } | |
325 | form_data.update(kwargs) |
|
325 | form_data.update(kwargs) | |
326 | gist = GistModel().create( |
|
326 | gist = GistModel().create( | |
327 | description=form_data['description'], owner=form_data['owner'], |
|
327 | description=form_data['description'], owner=form_data['owner'], | |
328 | gist_mapping=form_data['gist_mapping'], gist_type=form_data['gist_type'], |
|
328 | gist_mapping=form_data['gist_mapping'], gist_type=form_data['gist_type'], | |
329 | lifetime=form_data['lifetime'], gist_acl_level=form_data['acl_level'] |
|
329 | lifetime=form_data['lifetime'], gist_acl_level=form_data['acl_level'] | |
330 | ) |
|
330 | ) | |
331 | Session().commit() |
|
331 | Session().commit() | |
332 | return gist |
|
332 | return gist | |
333 |
|
333 | |||
334 | def destroy_gists(self, gistid=None): |
|
334 | def destroy_gists(self, gistid=None): | |
335 | for g in GistModel.cls.get_all(): |
|
335 | for g in GistModel.cls.get_all(): | |
336 | if gistid: |
|
336 | if gistid: | |
337 | if gistid == g.gist_access_id: |
|
337 | if gistid == g.gist_access_id: | |
338 | GistModel().delete(g) |
|
338 | GistModel().delete(g) | |
339 | else: |
|
339 | else: | |
340 | GistModel().delete(g) |
|
340 | GistModel().delete(g) | |
341 | Session().commit() |
|
341 | Session().commit() | |
342 |
|
342 | |||
343 | def load_resource(self, resource_name, strip=False): |
|
343 | def load_resource(self, resource_name, strip=False): | |
344 | with open(os.path.join(FIXTURES, resource_name)) as f: |
|
344 | with open(os.path.join(FIXTURES, resource_name)) as f: | |
345 | source = f.read() |
|
345 | source = f.read() | |
346 | if strip: |
|
346 | if strip: | |
347 | source = source.strip() |
|
347 | source = source.strip() | |
348 |
|
348 | |||
349 | return source |
|
349 | return source |
@@ -1,201 +1,201 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2018 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2018 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | from StringIO import StringIO |
|
21 | from StringIO import StringIO | |
22 |
|
22 | |||
23 | import pytest |
|
23 | import pytest | |
24 | from mock import patch, Mock |
|
24 | from mock import patch, Mock | |
25 |
|
25 | |||
26 | from rhodecode.lib.middleware.simplesvn import SimpleSvn, SimpleSvnApp |
|
26 | from rhodecode.lib.middleware.simplesvn import SimpleSvn, SimpleSvnApp | |
27 | from rhodecode.lib.utils import get_rhodecode_base_path |
|
27 | from rhodecode.lib.utils import get_rhodecode_base_path | |
28 |
|
28 | |||
29 |
|
29 | |||
30 | class TestSimpleSvn(object): |
|
30 | class TestSimpleSvn(object): | |
31 | @pytest.fixture(autouse=True) |
|
31 | @pytest.fixture(autouse=True) | |
32 | def simple_svn(self, baseapp, request_stub): |
|
32 | def simple_svn(self, baseapp, request_stub): | |
33 | base_path = get_rhodecode_base_path() |
|
33 | base_path = get_rhodecode_base_path() | |
34 | self.app = SimpleSvn( |
|
34 | self.app = SimpleSvn( | |
35 | config={'auth_ret_code': '', 'base_path': base_path}, |
|
35 | config={'auth_ret_code': '', 'base_path': base_path}, | |
36 | registry=request_stub.registry) |
|
36 | registry=request_stub.registry) | |
37 |
|
37 | |||
38 | def test_get_config(self): |
|
38 | def test_get_config(self): | |
39 | extras = {'foo': 'FOO', 'bar': 'BAR'} |
|
39 | extras = {'foo': 'FOO', 'bar': 'BAR'} | |
40 | config = self.app._create_config(extras, repo_name='test-repo') |
|
40 | config = self.app._create_config(extras, repo_name='test-repo') | |
41 | assert config == extras |
|
41 | assert config == extras | |
42 |
|
42 | |||
43 | @pytest.mark.parametrize( |
|
43 | @pytest.mark.parametrize( | |
44 | 'method', ['OPTIONS', 'PROPFIND', 'GET', 'REPORT']) |
|
44 | 'method', ['OPTIONS', 'PROPFIND', 'GET', 'REPORT']) | |
45 | def test_get_action_returns_pull(self, method): |
|
45 | def test_get_action_returns_pull(self, method): | |
46 | environment = {'REQUEST_METHOD': method} |
|
46 | environment = {'REQUEST_METHOD': method} | |
47 | action = self.app._get_action(environment) |
|
47 | action = self.app._get_action(environment) | |
48 | assert action == 'pull' |
|
48 | assert action == 'pull' | |
49 |
|
49 | |||
50 | @pytest.mark.parametrize( |
|
50 | @pytest.mark.parametrize( | |
51 | 'method', [ |
|
51 | 'method', [ | |
52 | 'MKACTIVITY', 'PROPPATCH', 'PUT', 'CHECKOUT', 'MKCOL', 'MOVE', |
|
52 | 'MKACTIVITY', 'PROPPATCH', 'PUT', 'CHECKOUT', 'MKCOL', 'MOVE', | |
53 | 'COPY', 'DELETE', 'LOCK', 'UNLOCK', 'MERGE' |
|
53 | 'COPY', 'DELETE', 'LOCK', 'UNLOCK', 'MERGE' | |
54 | ]) |
|
54 | ]) | |
55 | def test_get_action_returns_push(self, method): |
|
55 | def test_get_action_returns_push(self, method): | |
56 | environment = {'REQUEST_METHOD': method} |
|
56 | environment = {'REQUEST_METHOD': method} | |
57 | action = self.app._get_action(environment) |
|
57 | action = self.app._get_action(environment) | |
58 | assert action == 'push' |
|
58 | assert action == 'push' | |
59 |
|
59 | |||
60 | @pytest.mark.parametrize( |
|
60 | @pytest.mark.parametrize( | |
61 | 'path, expected_name', [ |
|
61 | 'path, expected_name', [ | |
62 | ('/hello-svn', 'hello-svn'), |
|
62 | ('/hello-svn', 'hello-svn'), | |
63 | ('/hello-svn/', 'hello-svn'), |
|
63 | ('/hello-svn/', 'hello-svn'), | |
64 | ('/group/hello-svn/', 'group/hello-svn'), |
|
64 | ('/group/hello-svn/', 'group/hello-svn'), | |
65 | ('/group/hello-svn/!svn/vcc/default', 'group/hello-svn'), |
|
65 | ('/group/hello-svn/!svn/vcc/default', 'group/hello-svn'), | |
66 | ]) |
|
66 | ]) | |
67 | def test_get_repository_name(self, path, expected_name): |
|
67 | def test_get_repository_name(self, path, expected_name): | |
68 | environment = {'PATH_INFO': path} |
|
68 | environment = {'PATH_INFO': path} | |
69 | name = self.app._get_repository_name(environment) |
|
69 | name = self.app._get_repository_name(environment) | |
70 | assert name == expected_name |
|
70 | assert name == expected_name | |
71 |
|
71 | |||
72 | def test_get_repository_name_subfolder(self, backend_svn): |
|
72 | def test_get_repository_name_subfolder(self, backend_svn): | |
73 | repo = backend_svn.repo |
|
73 | repo = backend_svn.repo | |
74 | environment = { |
|
74 | environment = { | |
75 | 'PATH_INFO': '/{}/path/with/subfolders'.format(repo.repo_name)} |
|
75 | 'PATH_INFO': '/{}/path/with/subfolders'.format(repo.repo_name)} | |
76 | name = self.app._get_repository_name(environment) |
|
76 | name = self.app._get_repository_name(environment) | |
77 | assert name == repo.repo_name |
|
77 | assert name == repo.repo_name | |
78 |
|
78 | |||
79 | def test_create_wsgi_app(self): |
|
79 | def test_create_wsgi_app(self): | |
80 | with patch.object(SimpleSvn, '_is_svn_enabled') as mock_method: |
|
80 | with patch.object(SimpleSvn, '_is_svn_enabled') as mock_method: | |
81 | mock_method.return_value = False |
|
81 | mock_method.return_value = False | |
82 | with patch('rhodecode.lib.middleware.simplesvn.DisabledSimpleSvnApp') as ( |
|
82 | with patch('rhodecode.lib.middleware.simplesvn.DisabledSimpleSvnApp') as ( | |
83 | wsgi_app_mock): |
|
83 | wsgi_app_mock): | |
84 | config = Mock() |
|
84 | config = Mock() | |
85 | wsgi_app = self.app._create_wsgi_app( |
|
85 | wsgi_app = self.app._create_wsgi_app( | |
86 | repo_path='', repo_name='', config=config) |
|
86 | repo_path='', repo_name='', config=config) | |
87 |
|
87 | |||
88 | wsgi_app_mock.assert_called_once_with(config) |
|
88 | wsgi_app_mock.assert_called_once_with(config) | |
89 | assert wsgi_app == wsgi_app_mock() |
|
89 | assert wsgi_app == wsgi_app_mock() | |
90 |
|
90 | |||
91 | def test_create_wsgi_app_when_enabled(self): |
|
91 | def test_create_wsgi_app_when_enabled(self): | |
92 | with patch.object(SimpleSvn, '_is_svn_enabled') as mock_method: |
|
92 | with patch.object(SimpleSvn, '_is_svn_enabled') as mock_method: | |
93 | mock_method.return_value = True |
|
93 | mock_method.return_value = True | |
94 | with patch('rhodecode.lib.middleware.simplesvn.SimpleSvnApp') as ( |
|
94 | with patch('rhodecode.lib.middleware.simplesvn.SimpleSvnApp') as ( | |
95 | wsgi_app_mock): |
|
95 | wsgi_app_mock): | |
96 | config = Mock() |
|
96 | config = Mock() | |
97 | wsgi_app = self.app._create_wsgi_app( |
|
97 | wsgi_app = self.app._create_wsgi_app( | |
98 | repo_path='', repo_name='', config=config) |
|
98 | repo_path='', repo_name='', config=config) | |
99 |
|
99 | |||
100 | wsgi_app_mock.assert_called_once_with(config) |
|
100 | wsgi_app_mock.assert_called_once_with(config) | |
101 | assert wsgi_app == wsgi_app_mock() |
|
101 | assert wsgi_app == wsgi_app_mock() | |
102 |
|
102 | |||
103 |
|
103 | |||
104 | class TestSimpleSvnApp(object): |
|
104 | class TestSimpleSvnApp(object): | |
105 | data = '<xml></xml>' |
|
105 | data = '<xml></xml>' | |
106 | path = '/group/my-repo' |
|
106 | path = '/group/my-repo' | |
107 | wsgi_input = StringIO(data) |
|
107 | wsgi_input = StringIO(data) | |
108 | environment = { |
|
108 | environment = { | |
109 | 'HTTP_DAV': ( |
|
109 | 'HTTP_DAV': ( | |
110 | 'http://subversion.tigris.org/xmlns/dav/svn/depth,' |
|
110 | 'http://subversion.tigris.org/xmlns/dav/svn/depth,' | |
111 | ' http://subversion.tigris.org/xmlns/dav/svn/mergeinfo'), |
|
111 | ' http://subversion.tigris.org/xmlns/dav/svn/mergeinfo'), | |
112 | 'HTTP_USER_AGENT': 'SVN/1.8.11 (x86_64-linux) serf/1.3.8', |
|
112 | 'HTTP_USER_AGENT': 'SVN/1.8.11 (x86_64-linux) serf/1.3.8', | |
113 | 'REQUEST_METHOD': 'OPTIONS', |
|
113 | 'REQUEST_METHOD': 'OPTIONS', | |
114 | 'PATH_INFO': path, |
|
114 | 'PATH_INFO': path, | |
115 | 'wsgi.input': wsgi_input, |
|
115 | 'wsgi.input': wsgi_input, | |
116 | 'CONTENT_TYPE': 'text/xml', |
|
116 | 'CONTENT_TYPE': 'text/xml', | |
117 | 'CONTENT_LENGTH': '130' |
|
117 | 'CONTENT_LENGTH': '130' | |
118 | } |
|
118 | } | |
119 |
|
119 | |||
120 | def setup_method(self, method): |
|
120 | def setup_method(self, method): | |
121 | self.host = 'http://localhost/' |
|
121 | self.host = 'http://localhost/' | |
122 | base_path = get_rhodecode_base_path() |
|
122 | base_path = get_rhodecode_base_path() | |
123 | self.app = SimpleSvnApp( |
|
123 | self.app = SimpleSvnApp( | |
124 | config={'subversion_http_server_url': self.host, |
|
124 | config={'subversion_http_server_url': self.host, | |
125 | 'base_path': base_path}) |
|
125 | 'base_path': base_path}) | |
126 |
|
126 | |||
127 | def test_get_request_headers_with_content_type(self): |
|
127 | def test_get_request_headers_with_content_type(self): | |
128 | expected_headers = { |
|
128 | expected_headers = { | |
129 | 'Dav': self.environment['HTTP_DAV'], |
|
129 | 'Dav': self.environment['HTTP_DAV'], | |
130 | 'User-Agent': self.environment['HTTP_USER_AGENT'], |
|
130 | 'User-Agent': self.environment['HTTP_USER_AGENT'], | |
131 | 'Content-Type': self.environment['CONTENT_TYPE'], |
|
131 | 'Content-Type': self.environment['CONTENT_TYPE'], | |
132 | 'Content-Length': self.environment['CONTENT_LENGTH'] |
|
132 | 'Content-Length': self.environment['CONTENT_LENGTH'] | |
133 | } |
|
133 | } | |
134 | headers = self.app._get_request_headers(self.environment) |
|
134 | headers = self.app._get_request_headers(self.environment) | |
135 | assert headers == expected_headers |
|
135 | assert headers == expected_headers | |
136 |
|
136 | |||
137 | def test_get_request_headers_without_content_type(self): |
|
137 | def test_get_request_headers_without_content_type(self): | |
138 | environment = self.environment.copy() |
|
138 | environment = self.environment.copy() | |
139 | environment.pop('CONTENT_TYPE') |
|
139 | environment.pop('CONTENT_TYPE') | |
140 | expected_headers = { |
|
140 | expected_headers = { | |
141 | 'Dav': environment['HTTP_DAV'], |
|
141 | 'Dav': environment['HTTP_DAV'], | |
142 | 'Content-Length': self.environment['CONTENT_LENGTH'], |
|
142 | 'Content-Length': self.environment['CONTENT_LENGTH'], | |
143 | 'User-Agent': environment['HTTP_USER_AGENT'], |
|
143 | 'User-Agent': environment['HTTP_USER_AGENT'], | |
144 | } |
|
144 | } | |
145 | request_headers = self.app._get_request_headers(environment) |
|
145 | request_headers = self.app._get_request_headers(environment) | |
146 | assert request_headers == expected_headers |
|
146 | assert request_headers == expected_headers | |
147 |
|
147 | |||
148 | def test_get_response_headers(self): |
|
148 | def test_get_response_headers(self): | |
149 | headers = { |
|
149 | headers = { | |
150 | 'Connection': 'keep-alive', |
|
150 | 'Connection': 'keep-alive', | |
151 | 'Keep-Alive': 'timeout=5, max=100', |
|
151 | 'Keep-Alive': 'timeout=5, max=100', | |
152 | 'Transfer-Encoding': 'chunked', |
|
152 | 'Transfer-Encoding': 'chunked', | |
153 | 'Content-Encoding': 'gzip', |
|
153 | 'Content-Encoding': 'gzip', | |
154 | 'MS-Author-Via': 'DAV', |
|
154 | 'MS-Author-Via': 'DAV', | |
155 | 'SVN-Supported-Posts': 'create-txn-with-props' |
|
155 | 'SVN-Supported-Posts': 'create-txn-with-props' | |
156 | } |
|
156 | } | |
157 | expected_headers = [ |
|
157 | expected_headers = [ | |
158 | ('MS-Author-Via', 'DAV'), |
|
158 | ('MS-Author-Via', 'DAV'), | |
159 | ('SVN-Supported-Posts', 'create-txn-with-props'), |
|
159 | ('SVN-Supported-Posts', 'create-txn-with-props'), | |
160 | ] |
|
160 | ] | |
161 | response_headers = self.app._get_response_headers(headers) |
|
161 | response_headers = self.app._get_response_headers(headers) | |
162 | assert sorted(response_headers) == sorted(expected_headers) |
|
162 | assert sorted(response_headers) == sorted(expected_headers) | |
163 |
|
163 | |||
164 | def test_get_url(self): |
|
164 | def test_get_url(self): | |
165 | url = self.app._get_url(self.path) |
|
165 | url = self.app._get_url(self.path) | |
166 | expected_url = '{}{}'.format(self.host.strip('/'), self.path) |
|
166 | expected_url = '{}{}'.format(self.host.strip('/'), self.path) | |
167 | assert url == expected_url |
|
167 | assert url == expected_url | |
168 |
|
168 | |||
169 | def test_call(self): |
|
169 | def test_call(self): | |
170 | start_response = Mock() |
|
170 | start_response = Mock() | |
171 | response_mock = Mock() |
|
171 | response_mock = Mock() | |
172 | response_mock.headers = { |
|
172 | response_mock.headers = { | |
173 | 'Content-Encoding': 'gzip', |
|
173 | 'Content-Encoding': 'gzip', | |
174 | 'MS-Author-Via': 'DAV', |
|
174 | 'MS-Author-Via': 'DAV', | |
175 | 'SVN-Supported-Posts': 'create-txn-with-props' |
|
175 | 'SVN-Supported-Posts': 'create-txn-with-props' | |
176 | } |
|
176 | } | |
177 | response_mock.status_code = 200 |
|
177 | response_mock.status_code = 200 | |
178 | response_mock.reason = 'OK' |
|
178 | response_mock.reason = 'OK' | |
179 | with patch('rhodecode.lib.middleware.simplesvn.requests.request') as ( |
|
179 | with patch('rhodecode.lib.middleware.simplesvn.requests.request') as ( | |
180 | request_mock): |
|
180 | request_mock): | |
181 | request_mock.return_value = response_mock |
|
181 | request_mock.return_value = response_mock | |
182 | self.app(self.environment, start_response) |
|
182 | self.app(self.environment, start_response) | |
183 |
|
183 | |||
184 | expected_url = '{}{}'.format(self.host.strip('/'), self.path) |
|
184 | expected_url = '{}{}'.format(self.host.strip('/'), self.path) | |
185 | expected_request_headers = { |
|
185 | expected_request_headers = { | |
186 | 'Dav': self.environment['HTTP_DAV'], |
|
186 | 'Dav': self.environment['HTTP_DAV'], | |
187 | 'User-Agent': self.environment['HTTP_USER_AGENT'], |
|
187 | 'User-Agent': self.environment['HTTP_USER_AGENT'], | |
188 | 'Content-Type': self.environment['CONTENT_TYPE'], |
|
188 | 'Content-Type': self.environment['CONTENT_TYPE'], | |
189 | 'Content-Length': self.environment['CONTENT_LENGTH'] |
|
189 | 'Content-Length': self.environment['CONTENT_LENGTH'] | |
190 | } |
|
190 | } | |
191 | expected_response_headers = [ |
|
191 | expected_response_headers = [ | |
192 | ('SVN-Supported-Posts', 'create-txn-with-props'), |
|
192 | ('SVN-Supported-Posts', 'create-txn-with-props'), | |
193 | ('MS-Author-Via', 'DAV'), |
|
193 | ('MS-Author-Via', 'DAV'), | |
194 | ] |
|
194 | ] | |
195 | request_mock.assert_called_once_with( |
|
195 | request_mock.assert_called_once_with( | |
196 | self.environment['REQUEST_METHOD'], expected_url, |
|
196 | self.environment['REQUEST_METHOD'], expected_url, | |
197 | data=self.data, headers=expected_request_headers) |
|
197 | data=self.data, headers=expected_request_headers, stream=False) | |
198 | response_mock.iter_content.assert_called_once_with(chunk_size=1024) |
|
198 | response_mock.iter_content.assert_called_once_with(chunk_size=1024) | |
199 | args, _ = start_response.call_args |
|
199 | args, _ = start_response.call_args | |
200 | assert args[0] == '200 OK' |
|
200 | assert args[0] == '200 OK' | |
201 | assert sorted(args[1]) == sorted(expected_response_headers) |
|
201 | assert sorted(args[1]) == sorted(expected_response_headers) |
@@ -1,311 +1,312 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2016-2018 RhodeCode GmbH |
|
3 | # Copyright (C) 2016-2018 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import pytest |
|
21 | import pytest | |
22 | from pygments.lexers import get_lexer_by_name |
|
22 | from pygments.lexers import get_lexer_by_name | |
23 |
|
23 | |||
24 | from rhodecode.tests import no_newline_id_generator |
|
24 | from rhodecode.tests import no_newline_id_generator | |
25 | from rhodecode.lib.codeblocks import ( |
|
25 | from rhodecode.lib.codeblocks import ( | |
26 | tokenize_string, split_token_stream, rollup_tokenstream, |
|
26 | tokenize_string, split_token_stream, rollup_tokenstream, | |
27 | render_tokenstream) |
|
27 | render_tokenstream) | |
28 |
|
28 | |||
29 |
|
29 | |||
30 | class TestTokenizeString(object): |
|
30 | class TestTokenizeString(object): | |
31 |
|
31 | |||
32 | python_code = ''' |
|
32 | python_code = ''' | |
33 | import this |
|
33 | import this | |
34 |
|
34 | |||
35 | var = 6 |
|
35 | var = 6 | |
36 |
print |
|
36 | print("this") | |
37 |
|
37 | |||
38 | ''' |
|
38 | ''' | |
39 |
|
39 | |||
40 | def test_tokenize_as_python(self): |
|
40 | def test_tokenize_as_python(self): | |
41 | lexer = get_lexer_by_name('python') |
|
41 | lexer = get_lexer_by_name('python') | |
42 | tokens = list(tokenize_string(self.python_code, lexer)) |
|
42 | tokens = list(tokenize_string(self.python_code, lexer)) | |
43 |
|
43 | |||
44 | assert tokens == [ |
|
44 | assert tokens == [ | |
45 | ('', u'\n'), |
|
45 | ('', u'\n'), | |
46 | ('', u' '), |
|
46 | ('', u' '), | |
47 | ('kn', u'import'), |
|
47 | ('kn', u'import'), | |
48 | ('', u' '), |
|
48 | ('', u' '), | |
49 | ('nn', u'this'), |
|
49 | ('nn', u'this'), | |
50 | ('', u'\n'), |
|
50 | ('', u'\n'), | |
51 | ('', u'\n'), |
|
51 | ('', u'\n'), | |
52 | ('', u' '), |
|
52 | ('', u' '), | |
53 | ('n', u'var'), |
|
53 | ('n', u'var'), | |
54 | ('', u' '), |
|
54 | ('', u' '), | |
55 | ('o', u'='), |
|
55 | ('o', u'='), | |
56 | ('', u' '), |
|
56 | ('', u' '), | |
57 | ('mi', u'6'), |
|
57 | ('mi', u'6'), | |
58 | ('', u'\n'), |
|
58 | ('', u'\n'), | |
59 | ('', u' '), |
|
59 | ('', u' '), | |
60 | ('k', u'print'), |
|
60 | ('k', u'print'), | |
61 |
('', |
|
61 | ('p', u'('), | |
62 |
('s2', |
|
62 | ('s2', u'"'), | |
63 |
('s2', |
|
63 | ('s2', u'this'), | |
64 |
('s2', |
|
64 | ('s2', u'"'), | |
|
65 | ('p', u')'), | |||
65 | ('', u'\n'), |
|
66 | ('', u'\n'), | |
66 | ('', u'\n'), |
|
67 | ('', u'\n'), | |
67 | ('', u' ') |
|
68 | ('', u' ') | |
68 | ] |
|
69 | ] | |
69 |
|
70 | |||
70 | def test_tokenize_as_text(self): |
|
71 | def test_tokenize_as_text(self): | |
71 | lexer = get_lexer_by_name('text') |
|
72 | lexer = get_lexer_by_name('text') | |
72 | tokens = list(tokenize_string(self.python_code, lexer)) |
|
73 | tokens = list(tokenize_string(self.python_code, lexer)) | |
73 |
|
74 | |||
74 | assert tokens == [ |
|
75 | assert tokens == [ | |
75 | ('', |
|
76 | ('', | |
76 |
u'\n import this\n\n var = 6\n print |
|
77 | u'\n import this\n\n var = 6\n print("this")\n\n ') | |
77 | ] |
|
78 | ] | |
78 |
|
79 | |||
79 |
|
80 | |||
80 | class TestSplitTokenStream(object): |
|
81 | class TestSplitTokenStream(object): | |
81 |
|
82 | |||
82 | def test_split_token_stream(self): |
|
83 | def test_split_token_stream(self): | |
83 | lines = list(split_token_stream( |
|
84 | lines = list(split_token_stream( | |
84 | [('type1', 'some\ntext'), ('type2', 'more\n')])) |
|
85 | [('type1', 'some\ntext'), ('type2', 'more\n')])) | |
85 |
|
86 | |||
86 | assert lines == [ |
|
87 | assert lines == [ | |
87 | [('type1', u'some')], |
|
88 | [('type1', u'some')], | |
88 | [('type1', u'text'), ('type2', u'more')], |
|
89 | [('type1', u'text'), ('type2', u'more')], | |
89 | [('type2', u'')], |
|
90 | [('type2', u'')], | |
90 | ] |
|
91 | ] | |
91 |
|
92 | |||
92 | def test_split_token_stream_single(self): |
|
93 | def test_split_token_stream_single(self): | |
93 | lines = list(split_token_stream( |
|
94 | lines = list(split_token_stream( | |
94 | [('type1', '\n')])) |
|
95 | [('type1', '\n')])) | |
95 |
|
96 | |||
96 | assert lines == [ |
|
97 | assert lines == [ | |
97 | [('type1', '')], |
|
98 | [('type1', '')], | |
98 | [('type1', '')], |
|
99 | [('type1', '')], | |
99 | ] |
|
100 | ] | |
100 |
|
101 | |||
101 | def test_split_token_stream_single_repeat(self): |
|
102 | def test_split_token_stream_single_repeat(self): | |
102 | lines = list(split_token_stream( |
|
103 | lines = list(split_token_stream( | |
103 | [('type1', '\n\n\n')])) |
|
104 | [('type1', '\n\n\n')])) | |
104 |
|
105 | |||
105 | assert lines == [ |
|
106 | assert lines == [ | |
106 | [('type1', '')], |
|
107 | [('type1', '')], | |
107 | [('type1', '')], |
|
108 | [('type1', '')], | |
108 | [('type1', '')], |
|
109 | [('type1', '')], | |
109 | [('type1', '')], |
|
110 | [('type1', '')], | |
110 | ] |
|
111 | ] | |
111 |
|
112 | |||
112 | def test_split_token_stream_multiple_repeat(self): |
|
113 | def test_split_token_stream_multiple_repeat(self): | |
113 | lines = list(split_token_stream( |
|
114 | lines = list(split_token_stream( | |
114 | [('type1', '\n\n'), ('type2', '\n\n')])) |
|
115 | [('type1', '\n\n'), ('type2', '\n\n')])) | |
115 |
|
116 | |||
116 | assert lines == [ |
|
117 | assert lines == [ | |
117 | [('type1', '')], |
|
118 | [('type1', '')], | |
118 | [('type1', '')], |
|
119 | [('type1', '')], | |
119 | [('type1', ''), ('type2', '')], |
|
120 | [('type1', ''), ('type2', '')], | |
120 | [('type2', '')], |
|
121 | [('type2', '')], | |
121 | [('type2', '')], |
|
122 | [('type2', '')], | |
122 | ] |
|
123 | ] | |
123 |
|
124 | |||
124 |
|
125 | |||
125 | class TestRollupTokens(object): |
|
126 | class TestRollupTokens(object): | |
126 |
|
127 | |||
127 | @pytest.mark.parametrize('tokenstream,output', [ |
|
128 | @pytest.mark.parametrize('tokenstream,output', [ | |
128 | ([], |
|
129 | ([], | |
129 | []), |
|
130 | []), | |
130 | ([('A', 'hell'), ('A', 'o')], [ |
|
131 | ([('A', 'hell'), ('A', 'o')], [ | |
131 | ('A', [ |
|
132 | ('A', [ | |
132 | ('', 'hello')]), |
|
133 | ('', 'hello')]), | |
133 | ]), |
|
134 | ]), | |
134 | ([('A', 'hell'), ('B', 'o')], [ |
|
135 | ([('A', 'hell'), ('B', 'o')], [ | |
135 | ('A', [ |
|
136 | ('A', [ | |
136 | ('', 'hell')]), |
|
137 | ('', 'hell')]), | |
137 | ('B', [ |
|
138 | ('B', [ | |
138 | ('', 'o')]), |
|
139 | ('', 'o')]), | |
139 | ]), |
|
140 | ]), | |
140 | ([('A', 'hel'), ('A', 'lo'), ('B', ' '), ('A', 'there')], [ |
|
141 | ([('A', 'hel'), ('A', 'lo'), ('B', ' '), ('A', 'there')], [ | |
141 | ('A', [ |
|
142 | ('A', [ | |
142 | ('', 'hello')]), |
|
143 | ('', 'hello')]), | |
143 | ('B', [ |
|
144 | ('B', [ | |
144 | ('', ' ')]), |
|
145 | ('', ' ')]), | |
145 | ('A', [ |
|
146 | ('A', [ | |
146 | ('', 'there')]), |
|
147 | ('', 'there')]), | |
147 | ]), |
|
148 | ]), | |
148 | ]) |
|
149 | ]) | |
149 | def test_rollup_tokenstream_without_ops(self, tokenstream, output): |
|
150 | def test_rollup_tokenstream_without_ops(self, tokenstream, output): | |
150 | assert list(rollup_tokenstream(tokenstream)) == output |
|
151 | assert list(rollup_tokenstream(tokenstream)) == output | |
151 |
|
152 | |||
152 | @pytest.mark.parametrize('tokenstream,output', [ |
|
153 | @pytest.mark.parametrize('tokenstream,output', [ | |
153 | ([], |
|
154 | ([], | |
154 | []), |
|
155 | []), | |
155 | ([('A', '', 'hell'), ('A', '', 'o')], [ |
|
156 | ([('A', '', 'hell'), ('A', '', 'o')], [ | |
156 | ('A', [ |
|
157 | ('A', [ | |
157 | ('', 'hello')]), |
|
158 | ('', 'hello')]), | |
158 | ]), |
|
159 | ]), | |
159 | ([('A', '', 'hell'), ('B', '', 'o')], [ |
|
160 | ([('A', '', 'hell'), ('B', '', 'o')], [ | |
160 | ('A', [ |
|
161 | ('A', [ | |
161 | ('', 'hell')]), |
|
162 | ('', 'hell')]), | |
162 | ('B', [ |
|
163 | ('B', [ | |
163 | ('', 'o')]), |
|
164 | ('', 'o')]), | |
164 | ]), |
|
165 | ]), | |
165 | ([('A', '', 'h'), ('B', '', 'e'), ('C', '', 'y')], [ |
|
166 | ([('A', '', 'h'), ('B', '', 'e'), ('C', '', 'y')], [ | |
166 | ('A', [ |
|
167 | ('A', [ | |
167 | ('', 'h')]), |
|
168 | ('', 'h')]), | |
168 | ('B', [ |
|
169 | ('B', [ | |
169 | ('', 'e')]), |
|
170 | ('', 'e')]), | |
170 | ('C', [ |
|
171 | ('C', [ | |
171 | ('', 'y')]), |
|
172 | ('', 'y')]), | |
172 | ]), |
|
173 | ]), | |
173 | ([('A', '', 'h'), ('A', '', 'e'), ('C', '', 'y')], [ |
|
174 | ([('A', '', 'h'), ('A', '', 'e'), ('C', '', 'y')], [ | |
174 | ('A', [ |
|
175 | ('A', [ | |
175 | ('', 'he')]), |
|
176 | ('', 'he')]), | |
176 | ('C', [ |
|
177 | ('C', [ | |
177 | ('', 'y')]), |
|
178 | ('', 'y')]), | |
178 | ]), |
|
179 | ]), | |
179 | ([('A', 'ins', 'h'), ('A', 'ins', 'e')], [ |
|
180 | ([('A', 'ins', 'h'), ('A', 'ins', 'e')], [ | |
180 | ('A', [ |
|
181 | ('A', [ | |
181 | ('ins', 'he') |
|
182 | ('ins', 'he') | |
182 | ]), |
|
183 | ]), | |
183 | ]), |
|
184 | ]), | |
184 | ([('A', 'ins', 'h'), ('A', 'del', 'e')], [ |
|
185 | ([('A', 'ins', 'h'), ('A', 'del', 'e')], [ | |
185 | ('A', [ |
|
186 | ('A', [ | |
186 | ('ins', 'h'), |
|
187 | ('ins', 'h'), | |
187 | ('del', 'e') |
|
188 | ('del', 'e') | |
188 | ]), |
|
189 | ]), | |
189 | ]), |
|
190 | ]), | |
190 | ([('A', 'ins', 'h'), ('B', 'del', 'e'), ('B', 'del', 'y')], [ |
|
191 | ([('A', 'ins', 'h'), ('B', 'del', 'e'), ('B', 'del', 'y')], [ | |
191 | ('A', [ |
|
192 | ('A', [ | |
192 | ('ins', 'h'), |
|
193 | ('ins', 'h'), | |
193 | ]), |
|
194 | ]), | |
194 | ('B', [ |
|
195 | ('B', [ | |
195 | ('del', 'ey'), |
|
196 | ('del', 'ey'), | |
196 | ]), |
|
197 | ]), | |
197 | ]), |
|
198 | ]), | |
198 | ([('A', 'ins', 'h'), ('A', 'del', 'e'), ('B', 'del', 'y')], [ |
|
199 | ([('A', 'ins', 'h'), ('A', 'del', 'e'), ('B', 'del', 'y')], [ | |
199 | ('A', [ |
|
200 | ('A', [ | |
200 | ('ins', 'h'), |
|
201 | ('ins', 'h'), | |
201 | ('del', 'e'), |
|
202 | ('del', 'e'), | |
202 | ]), |
|
203 | ]), | |
203 | ('B', [ |
|
204 | ('B', [ | |
204 | ('del', 'y'), |
|
205 | ('del', 'y'), | |
205 | ]), |
|
206 | ]), | |
206 | ]), |
|
207 | ]), | |
207 | ([('A', '', 'some'), ('A', 'ins', 'new'), ('A', '', 'name')], [ |
|
208 | ([('A', '', 'some'), ('A', 'ins', 'new'), ('A', '', 'name')], [ | |
208 | ('A', [ |
|
209 | ('A', [ | |
209 | ('', 'some'), |
|
210 | ('', 'some'), | |
210 | ('ins', 'new'), |
|
211 | ('ins', 'new'), | |
211 | ('', 'name'), |
|
212 | ('', 'name'), | |
212 | ]), |
|
213 | ]), | |
213 | ]), |
|
214 | ]), | |
214 | ]) |
|
215 | ]) | |
215 | def test_rollup_tokenstream_with_ops(self, tokenstream, output): |
|
216 | def test_rollup_tokenstream_with_ops(self, tokenstream, output): | |
216 | assert list(rollup_tokenstream(tokenstream)) == output |
|
217 | assert list(rollup_tokenstream(tokenstream)) == output | |
217 |
|
218 | |||
218 |
|
219 | |||
219 | class TestRenderTokenStream(object): |
|
220 | class TestRenderTokenStream(object): | |
220 |
|
221 | |||
221 | @pytest.mark.parametrize('tokenstream,output', [ |
|
222 | @pytest.mark.parametrize('tokenstream,output', [ | |
222 | ( |
|
223 | ( | |
223 | [], |
|
224 | [], | |
224 | '', |
|
225 | '', | |
225 | ), |
|
226 | ), | |
226 | ( |
|
227 | ( | |
227 | [('', '', u'')], |
|
228 | [('', '', u'')], | |
228 | '<span></span>', |
|
229 | '<span></span>', | |
229 | ), |
|
230 | ), | |
230 | ( |
|
231 | ( | |
231 | [('', '', u'text')], |
|
232 | [('', '', u'text')], | |
232 | '<span>text</span>', |
|
233 | '<span>text</span>', | |
233 | ), |
|
234 | ), | |
234 | ( |
|
235 | ( | |
235 | [('A', '', u'')], |
|
236 | [('A', '', u'')], | |
236 | '<span class="A"></span>', |
|
237 | '<span class="A"></span>', | |
237 | ), |
|
238 | ), | |
238 | ( |
|
239 | ( | |
239 | [('A', '', u'hello')], |
|
240 | [('A', '', u'hello')], | |
240 | '<span class="A">hello</span>', |
|
241 | '<span class="A">hello</span>', | |
241 | ), |
|
242 | ), | |
242 | ( |
|
243 | ( | |
243 | [('A', '', u'hel'), ('A', '', u'lo')], |
|
244 | [('A', '', u'hel'), ('A', '', u'lo')], | |
244 | '<span class="A">hello</span>', |
|
245 | '<span class="A">hello</span>', | |
245 | ), |
|
246 | ), | |
246 | ( |
|
247 | ( | |
247 | [('A', '', u'two\n'), ('A', '', u'lines')], |
|
248 | [('A', '', u'two\n'), ('A', '', u'lines')], | |
248 | '<span class="A">two\nlines</span>', |
|
249 | '<span class="A">two\nlines</span>', | |
249 | ), |
|
250 | ), | |
250 | ( |
|
251 | ( | |
251 | [('A', '', u'\nthree\n'), ('A', '', u'lines')], |
|
252 | [('A', '', u'\nthree\n'), ('A', '', u'lines')], | |
252 | '<span class="A">\nthree\nlines</span>', |
|
253 | '<span class="A">\nthree\nlines</span>', | |
253 | ), |
|
254 | ), | |
254 | ( |
|
255 | ( | |
255 | [('', '', u'\n'), ('A', '', u'line')], |
|
256 | [('', '', u'\n'), ('A', '', u'line')], | |
256 | '<span>\n</span><span class="A">line</span>', |
|
257 | '<span>\n</span><span class="A">line</span>', | |
257 | ), |
|
258 | ), | |
258 | ( |
|
259 | ( | |
259 | [('', 'ins', u'\n'), ('A', '', u'line')], |
|
260 | [('', 'ins', u'\n'), ('A', '', u'line')], | |
260 | '<span><ins>\n</ins></span><span class="A">line</span>', |
|
261 | '<span><ins>\n</ins></span><span class="A">line</span>', | |
261 | ), |
|
262 | ), | |
262 | ( |
|
263 | ( | |
263 | [('A', '', u'hel'), ('A', 'ins', u'lo')], |
|
264 | [('A', '', u'hel'), ('A', 'ins', u'lo')], | |
264 | '<span class="A">hel<ins>lo</ins></span>', |
|
265 | '<span class="A">hel<ins>lo</ins></span>', | |
265 | ), |
|
266 | ), | |
266 | ( |
|
267 | ( | |
267 | [('A', '', u'hel'), ('A', 'ins', u'l'), ('A', 'ins', u'o')], |
|
268 | [('A', '', u'hel'), ('A', 'ins', u'l'), ('A', 'ins', u'o')], | |
268 | '<span class="A">hel<ins>lo</ins></span>', |
|
269 | '<span class="A">hel<ins>lo</ins></span>', | |
269 | ), |
|
270 | ), | |
270 | ( |
|
271 | ( | |
271 | [('A', '', u'hel'), ('A', 'ins', u'l'), ('A', 'del', u'o')], |
|
272 | [('A', '', u'hel'), ('A', 'ins', u'l'), ('A', 'del', u'o')], | |
272 | '<span class="A">hel<ins>l</ins><del>o</del></span>', |
|
273 | '<span class="A">hel<ins>l</ins><del>o</del></span>', | |
273 | ), |
|
274 | ), | |
274 | ( |
|
275 | ( | |
275 | [('A', '', u'hel'), ('B', '', u'lo')], |
|
276 | [('A', '', u'hel'), ('B', '', u'lo')], | |
276 | '<span class="A">hel</span><span class="B">lo</span>', |
|
277 | '<span class="A">hel</span><span class="B">lo</span>', | |
277 | ), |
|
278 | ), | |
278 | ( |
|
279 | ( | |
279 | [('A', '', u'hel'), ('B', 'ins', u'lo')], |
|
280 | [('A', '', u'hel'), ('B', 'ins', u'lo')], | |
280 | '<span class="A">hel</span><span class="B"><ins>lo</ins></span>', |
|
281 | '<span class="A">hel</span><span class="B"><ins>lo</ins></span>', | |
281 | ), |
|
282 | ), | |
282 | ], ids=no_newline_id_generator) |
|
283 | ], ids=no_newline_id_generator) | |
283 | def test_render_tokenstream_with_ops(self, tokenstream, output): |
|
284 | def test_render_tokenstream_with_ops(self, tokenstream, output): | |
284 | html = render_tokenstream(tokenstream) |
|
285 | html = render_tokenstream(tokenstream) | |
285 | assert html == output |
|
286 | assert html == output | |
286 |
|
287 | |||
287 | @pytest.mark.parametrize('tokenstream,output', [ |
|
288 | @pytest.mark.parametrize('tokenstream,output', [ | |
288 | ( |
|
289 | ( | |
289 | [('A', u'hel'), ('A', u'lo')], |
|
290 | [('A', u'hel'), ('A', u'lo')], | |
290 | '<span class="A">hello</span>', |
|
291 | '<span class="A">hello</span>', | |
291 | ), |
|
292 | ), | |
292 | ( |
|
293 | ( | |
293 | [('A', u'hel'), ('A', u'l'), ('A', u'o')], |
|
294 | [('A', u'hel'), ('A', u'l'), ('A', u'o')], | |
294 | '<span class="A">hello</span>', |
|
295 | '<span class="A">hello</span>', | |
295 | ), |
|
296 | ), | |
296 | ( |
|
297 | ( | |
297 | [('A', u'hel'), ('A', u'l'), ('A', u'o')], |
|
298 | [('A', u'hel'), ('A', u'l'), ('A', u'o')], | |
298 | '<span class="A">hello</span>', |
|
299 | '<span class="A">hello</span>', | |
299 | ), |
|
300 | ), | |
300 | ( |
|
301 | ( | |
301 | [('A', u'hel'), ('B', u'lo')], |
|
302 | [('A', u'hel'), ('B', u'lo')], | |
302 | '<span class="A">hel</span><span class="B">lo</span>', |
|
303 | '<span class="A">hel</span><span class="B">lo</span>', | |
303 | ), |
|
304 | ), | |
304 | ( |
|
305 | ( | |
305 | [('A', u'hel'), ('B', u'lo')], |
|
306 | [('A', u'hel'), ('B', u'lo')], | |
306 | '<span class="A">hel</span><span class="B">lo</span>', |
|
307 | '<span class="A">hel</span><span class="B">lo</span>', | |
307 | ), |
|
308 | ), | |
308 | ]) |
|
309 | ]) | |
309 | def test_render_tokenstream_without_ops(self, tokenstream, output): |
|
310 | def test_render_tokenstream_without_ops(self, tokenstream, output): | |
310 | html = render_tokenstream(tokenstream) |
|
311 | html = render_tokenstream(tokenstream) | |
311 | assert html == output |
|
312 | assert html == output |
@@ -1,463 +1,463 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2018 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2018 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | Module to test the performance of pull, push and clone operations. |
|
22 | Module to test the performance of pull, push and clone operations. | |
23 |
|
23 | |||
24 | It works by replaying a group of commits to the repo. |
|
24 | It works by replaying a group of commits to the repo. | |
25 | """ |
|
25 | """ | |
26 |
|
26 | |||
27 | import argparse |
|
27 | import argparse | |
28 | import collections |
|
28 | import collections | |
29 | import ConfigParser |
|
29 | import ConfigParser | |
30 | import functools |
|
30 | import functools | |
31 | import itertools |
|
31 | import itertools | |
32 | import os |
|
32 | import os | |
33 | import pprint |
|
33 | import pprint | |
34 | import shutil |
|
34 | import shutil | |
35 | import subprocess32 |
|
35 | import subprocess32 | |
36 | import sys |
|
36 | import sys | |
37 | import time |
|
37 | import time | |
38 |
|
38 | |||
39 | import api |
|
39 | import api | |
40 |
|
40 | |||
41 |
|
41 | |||
42 | def mean(container): |
|
42 | def mean(container): | |
43 | """Return the mean of the container.""" |
|
43 | """Return the mean of the container.""" | |
44 | if not container: |
|
44 | if not container: | |
45 | return -1.0 |
|
45 | return -1.0 | |
46 | return sum(container) / len(container) |
|
46 | return sum(container) / len(container) | |
47 |
|
47 | |||
48 |
|
48 | |||
49 | def keep_cwd(f): |
|
49 | def keep_cwd(f): | |
50 | """Decorator that keeps track of the starting working directory.""" |
|
50 | """Decorator that keeps track of the starting working directory.""" | |
51 | @functools.wraps(f) |
|
51 | @functools.wraps(f) | |
52 | def wrapped_f(*args, **kwargs): |
|
52 | def wrapped_f(*args, **kwargs): | |
53 | cur_dir = os.getcwd() |
|
53 | cur_dir = os.getcwd() | |
54 | try: |
|
54 | try: | |
55 | return f(*args, **kwargs) |
|
55 | return f(*args, **kwargs) | |
56 | finally: |
|
56 | finally: | |
57 | os.chdir(cur_dir) |
|
57 | os.chdir(cur_dir) | |
58 |
|
58 | |||
59 | return wrapped_f |
|
59 | return wrapped_f | |
60 |
|
60 | |||
61 |
|
61 | |||
62 | def timed(f): |
|
62 | def timed(f): | |
63 | """Decorator that returns the time it took to execute the function.""" |
|
63 | """Decorator that returns the time it took to execute the function.""" | |
64 | @functools.wraps(f) |
|
64 | @functools.wraps(f) | |
65 | def wrapped_f(*args, **kwargs): |
|
65 | def wrapped_f(*args, **kwargs): | |
66 | start_time = time.time() |
|
66 | start_time = time.time() | |
67 | try: |
|
67 | try: | |
68 | f(*args, **kwargs) |
|
68 | f(*args, **kwargs) | |
69 | finally: |
|
69 | finally: | |
70 | return time.time() - start_time |
|
70 | return time.time() - start_time | |
71 |
|
71 | |||
72 | return wrapped_f |
|
72 | return wrapped_f | |
73 |
|
73 | |||
74 |
|
74 | |||
75 | def execute(*popenargs, **kwargs): |
|
75 | def execute(*popenargs, **kwargs): | |
76 | """Extension of subprocess.check_output to support writing to stdin.""" |
|
76 | """Extension of subprocess.check_output to support writing to stdin.""" | |
77 | input = kwargs.pop('stdin', None) |
|
77 | input = kwargs.pop('stdin', None) | |
78 | stdin = None |
|
78 | stdin = None | |
79 | if input: |
|
79 | if input: | |
80 | stdin = subprocess32.PIPE |
|
80 | stdin = subprocess32.PIPE | |
81 | #if 'stderr' not in kwargs: |
|
81 | #if 'stderr' not in kwargs: | |
82 | # kwargs['stderr'] = subprocess32.PIPE |
|
82 | # kwargs['stderr'] = subprocess32.PIPE | |
83 | if 'stdout' in kwargs: |
|
83 | if 'stdout' in kwargs: | |
84 | raise ValueError('stdout argument not allowed, it will be overridden.') |
|
84 | raise ValueError('stdout argument not allowed, it will be overridden.') | |
85 | process = subprocess32.Popen(stdin=stdin, stdout=subprocess32.PIPE, |
|
85 | process = subprocess32.Popen(stdin=stdin, stdout=subprocess32.PIPE, | |
86 | *popenargs, **kwargs) |
|
86 | *popenargs, **kwargs) | |
87 | output, error = process.communicate(input=input) |
|
87 | output, error = process.communicate(input=input) | |
88 | retcode = process.poll() |
|
88 | retcode = process.poll() | |
89 | if retcode: |
|
89 | if retcode: | |
90 | cmd = kwargs.get("args") |
|
90 | cmd = kwargs.get("args") | |
91 | if cmd is None: |
|
91 | if cmd is None: | |
92 | cmd = popenargs[0] |
|
92 | cmd = popenargs[0] | |
93 | print cmd, output, error |
|
93 | print('{} {} {} '.format(cmd, output, error)) | |
94 | raise subprocess32.CalledProcessError(retcode, cmd, output=output) |
|
94 | raise subprocess32.CalledProcessError(retcode, cmd, output=output) | |
95 | return output |
|
95 | return output | |
96 |
|
96 | |||
97 |
|
97 | |||
98 | def get_repo_name(repo_url): |
|
98 | def get_repo_name(repo_url): | |
99 | """Extract the repo name from its url.""" |
|
99 | """Extract the repo name from its url.""" | |
100 | repo_url = repo_url.rstrip('/') |
|
100 | repo_url = repo_url.rstrip('/') | |
101 | return repo_url.split('/')[-1].split('.')[0] |
|
101 | return repo_url.split('/')[-1].split('.')[0] | |
102 |
|
102 | |||
103 |
|
103 | |||
104 | class TestPerformanceBase(object): |
|
104 | class TestPerformanceBase(object): | |
105 | def __init__(self, base_dir, repo_url, n_commits, max_commits, |
|
105 | def __init__(self, base_dir, repo_url, n_commits, max_commits, | |
106 | skip_commits): |
|
106 | skip_commits): | |
107 | self.repo_url = repo_url |
|
107 | self.repo_url = repo_url | |
108 | self.repo_name = get_repo_name(self.repo_url) |
|
108 | self.repo_name = get_repo_name(self.repo_url) | |
109 | self.upstream_repo_name = '%s_upstream' % self.repo_name |
|
109 | self.upstream_repo_name = '%s_upstream' % self.repo_name | |
110 | self.base_dir = os.path.abspath(base_dir) |
|
110 | self.base_dir = os.path.abspath(base_dir) | |
111 | self.n_commits = n_commits |
|
111 | self.n_commits = n_commits | |
112 | self.max_commits = max_commits |
|
112 | self.max_commits = max_commits | |
113 | self.skip_commits = skip_commits |
|
113 | self.skip_commits = skip_commits | |
114 | self.push_times = [] |
|
114 | self.push_times = [] | |
115 | self.pull_times = [] |
|
115 | self.pull_times = [] | |
116 | self.empty_pull_times = [] |
|
116 | self.empty_pull_times = [] | |
117 | self.clone_time = -1.0 |
|
117 | self.clone_time = -1.0 | |
118 | self.last_commit = None |
|
118 | self.last_commit = None | |
119 |
|
119 | |||
120 | self.cloned_repo = '' |
|
120 | self.cloned_repo = '' | |
121 | self.pull_repo = '' |
|
121 | self.pull_repo = '' | |
122 | self.orig_repo = '' |
|
122 | self.orig_repo = '' | |
123 |
|
123 | |||
124 | def run(self): |
|
124 | def run(self): | |
125 | try: |
|
125 | try: | |
126 | self.test() |
|
126 | self.test() | |
127 | except Exception as error: |
|
127 | except Exception as error: | |
128 |
print |
|
128 | print(error) | |
129 | finally: |
|
129 | finally: | |
130 | self.cleanup() |
|
130 | self.cleanup() | |
131 |
|
131 | |||
132 |
print |
|
132 | print('Clone time :{}'.format(self.clone_time)) | |
133 |
print |
|
133 | print('Push time :{}'.format(mean(self.push_times))) | |
134 |
print |
|
134 | print('Pull time :{}'.format(mean(self.pull_times))) | |
135 |
print |
|
135 | print('Empty pull time:{}'.format(mean(self.empty_pull_times))) | |
136 |
|
136 | |||
137 | return { |
|
137 | return { | |
138 | 'clone': self.clone_time, |
|
138 | 'clone': self.clone_time, | |
139 | 'push': mean(self.push_times), |
|
139 | 'push': mean(self.push_times), | |
140 | 'pull': mean(self.pull_times), |
|
140 | 'pull': mean(self.pull_times), | |
141 | 'empty_pull': mean(self.empty_pull_times), |
|
141 | 'empty_pull': mean(self.empty_pull_times), | |
142 | } |
|
142 | } | |
143 |
|
143 | |||
144 | @keep_cwd |
|
144 | @keep_cwd | |
145 | def test(self): |
|
145 | def test(self): | |
146 | os.chdir(self.base_dir) |
|
146 | os.chdir(self.base_dir) | |
147 |
|
147 | |||
148 | self.orig_repo = os.path.join(self.base_dir, self.repo_name) |
|
148 | self.orig_repo = os.path.join(self.base_dir, self.repo_name) | |
149 | if not os.path.exists(self.orig_repo): |
|
149 | if not os.path.exists(self.orig_repo): | |
150 | self.clone_repo(self.repo_url, default_only=True) |
|
150 | self.clone_repo(self.repo_url, default_only=True) | |
151 |
|
151 | |||
152 | upstream_url = self.create_repo(self.upstream_repo_name, self.repo_type) |
|
152 | upstream_url = self.create_repo(self.upstream_repo_name, self.repo_type) | |
153 |
|
153 | |||
154 | self.add_remote(self.orig_repo, upstream_url) |
|
154 | self.add_remote(self.orig_repo, upstream_url) | |
155 |
|
155 | |||
156 | self.pull_repo = os.path.join(self.base_dir, '%s_pull' % self.repo_name) |
|
156 | self.pull_repo = os.path.join(self.base_dir, '%s_pull' % self.repo_name) | |
157 | self.clone_repo(upstream_url, self.pull_repo) |
|
157 | self.clone_repo(upstream_url, self.pull_repo) | |
158 |
|
158 | |||
159 | commits = self.get_commits(self.orig_repo) |
|
159 | commits = self.get_commits(self.orig_repo) | |
160 | self.last_commit = commits[-1] |
|
160 | self.last_commit = commits[-1] | |
161 | if self.skip_commits: |
|
161 | if self.skip_commits: | |
162 | self.push( |
|
162 | self.push( | |
163 | self.orig_repo, commits[self.skip_commits - 1], 'upstream') |
|
163 | self.orig_repo, commits[self.skip_commits - 1], 'upstream') | |
164 | commits = commits[self.skip_commits:self.max_commits] |
|
164 | commits = commits[self.skip_commits:self.max_commits] | |
165 |
|
165 | |||
166 |
print |
|
166 | print('Working with %d commits' % len(commits)) | |
167 |
for i in |
|
167 | for i in range(self.n_commits - 1, len(commits), self.n_commits): | |
168 | commit = commits[i] |
|
168 | commit = commits[i] | |
169 |
print |
|
169 | print('Processing commit %s (%d)' % (commit, i + 1)) | |
170 | self.push_times.append( |
|
170 | self.push_times.append( | |
171 | self.push(self.orig_repo, commit, 'upstream')) |
|
171 | self.push(self.orig_repo, commit, 'upstream')) | |
172 | self.check_remote_last_commit_is(commit, upstream_url) |
|
172 | self.check_remote_last_commit_is(commit, upstream_url) | |
173 |
|
173 | |||
174 | self.pull_times.append(self.pull(self.pull_repo)) |
|
174 | self.pull_times.append(self.pull(self.pull_repo)) | |
175 | self.check_local_last_commit_is(commit, self.pull_repo) |
|
175 | self.check_local_last_commit_is(commit, self.pull_repo) | |
176 |
|
176 | |||
177 | self.empty_pull_times.append(self.pull(self.pull_repo)) |
|
177 | self.empty_pull_times.append(self.pull(self.pull_repo)) | |
178 |
|
178 | |||
179 | self.cloned_repo = os.path.join(self.base_dir, |
|
179 | self.cloned_repo = os.path.join(self.base_dir, | |
180 | '%s_clone' % self.repo_name) |
|
180 | '%s_clone' % self.repo_name) | |
181 | self.clone_time = self.clone_repo(upstream_url, self.cloned_repo) |
|
181 | self.clone_time = self.clone_repo(upstream_url, self.cloned_repo) | |
182 |
|
182 | |||
183 | def cleanup(self): |
|
183 | def cleanup(self): | |
184 | try: |
|
184 | try: | |
185 | self.delete_repo(self.upstream_repo_name) |
|
185 | self.delete_repo(self.upstream_repo_name) | |
186 | except api.ApiError: |
|
186 | except api.ApiError: | |
187 | # Continue in case we could not delete the repo. Maybe we did not |
|
187 | # Continue in case we could not delete the repo. Maybe we did not | |
188 | # create it in the first place. |
|
188 | # create it in the first place. | |
189 | pass |
|
189 | pass | |
190 |
|
190 | |||
191 | shutil.rmtree(self.pull_repo, ignore_errors=True) |
|
191 | shutil.rmtree(self.pull_repo, ignore_errors=True) | |
192 | shutil.rmtree(self.cloned_repo, ignore_errors=True) |
|
192 | shutil.rmtree(self.cloned_repo, ignore_errors=True) | |
193 |
|
193 | |||
194 | if os.path.exists(self.orig_repo): |
|
194 | if os.path.exists(self.orig_repo): | |
195 | self.remove_remote(self.orig_repo) |
|
195 | self.remove_remote(self.orig_repo) | |
196 |
|
196 | |||
197 |
|
197 | |||
198 | class RhodeCodeMixin(object): |
|
198 | class RhodeCodeMixin(object): | |
199 | """Mixin providing the methods to create and delete repos in RhodeCode.""" |
|
199 | """Mixin providing the methods to create and delete repos in RhodeCode.""" | |
200 | def __init__(self, api_key): |
|
200 | def __init__(self, api_key): | |
201 | self.api = api.RCApi(api_key=api_key) |
|
201 | self.api = api.RCApi(api_key=api_key) | |
202 |
|
202 | |||
203 | def create_repo(self, repo_name, repo_type): |
|
203 | def create_repo(self, repo_name, repo_type): | |
204 | return self.api.create_repo(repo_name, repo_type, |
|
204 | return self.api.create_repo(repo_name, repo_type, | |
205 | 'Repo for perfomance testing') |
|
205 | 'Repo for perfomance testing') | |
206 |
|
206 | |||
207 | def delete_repo(self, repo_name): |
|
207 | def delete_repo(self, repo_name): | |
208 | return self.api.delete_repo(repo_name) |
|
208 | return self.api.delete_repo(repo_name) | |
209 |
|
209 | |||
210 |
|
210 | |||
211 | class GitMixin(object): |
|
211 | class GitMixin(object): | |
212 | """Mixin providing the git operations.""" |
|
212 | """Mixin providing the git operations.""" | |
213 | @timed |
|
213 | @timed | |
214 | def clone_repo(self, repo_url, destination=None, default_only=False): |
|
214 | def clone_repo(self, repo_url, destination=None, default_only=False): | |
215 | args = ['git', 'clone'] |
|
215 | args = ['git', 'clone'] | |
216 | if default_only: |
|
216 | if default_only: | |
217 | args.extend(['--branch', 'master', '--single-branch']) |
|
217 | args.extend(['--branch', 'master', '--single-branch']) | |
218 | args.append(repo_url) |
|
218 | args.append(repo_url) | |
219 | if destination: |
|
219 | if destination: | |
220 | args.append(destination) |
|
220 | args.append(destination) | |
221 | execute(args) |
|
221 | execute(args) | |
222 |
|
222 | |||
223 | @keep_cwd |
|
223 | @keep_cwd | |
224 | def add_remote(self, repo, remote_url, remote_name='upstream'): |
|
224 | def add_remote(self, repo, remote_url, remote_name='upstream'): | |
225 | self.remove_remote(repo, remote_name) |
|
225 | self.remove_remote(repo, remote_name) | |
226 | os.chdir(repo) |
|
226 | os.chdir(repo) | |
227 | execute(['git', 'remote', 'add', remote_name, remote_url]) |
|
227 | execute(['git', 'remote', 'add', remote_name, remote_url]) | |
228 |
|
228 | |||
229 | @keep_cwd |
|
229 | @keep_cwd | |
230 | def remove_remote(self, repo, remote_name='upstream'): |
|
230 | def remove_remote(self, repo, remote_name='upstream'): | |
231 | os.chdir(repo) |
|
231 | os.chdir(repo) | |
232 | remotes = execute(['git', 'remote']).split('\n') |
|
232 | remotes = execute(['git', 'remote']).split('\n') | |
233 | if remote_name in remotes: |
|
233 | if remote_name in remotes: | |
234 | execute(['git', 'remote', 'remove', remote_name]) |
|
234 | execute(['git', 'remote', 'remove', remote_name]) | |
235 |
|
235 | |||
236 | @keep_cwd |
|
236 | @keep_cwd | |
237 | def get_commits(self, repo, branch='master'): |
|
237 | def get_commits(self, repo, branch='master'): | |
238 | os.chdir(repo) |
|
238 | os.chdir(repo) | |
239 | commits_list = execute( |
|
239 | commits_list = execute( | |
240 | ['git', 'log', '--first-parent', branch, '--pretty=%H']) |
|
240 | ['git', 'log', '--first-parent', branch, '--pretty=%H']) | |
241 | return commits_list.strip().split('\n')[::-1] |
|
241 | return commits_list.strip().split('\n')[::-1] | |
242 |
|
242 | |||
243 | @timed |
|
243 | @timed | |
244 | def push(self, repo, commit, remote_name=None): |
|
244 | def push(self, repo, commit, remote_name=None): | |
245 | os.chdir(repo) |
|
245 | os.chdir(repo) | |
246 | try: |
|
246 | try: | |
247 | execute(['git', 'reset', '--soft', commit]) |
|
247 | execute(['git', 'reset', '--soft', commit]) | |
248 | args = ['git', 'push'] |
|
248 | args = ['git', 'push'] | |
249 | if remote_name: |
|
249 | if remote_name: | |
250 | args.append(remote_name) |
|
250 | args.append(remote_name) | |
251 | execute(args) |
|
251 | execute(args) | |
252 | finally: |
|
252 | finally: | |
253 | execute(['git', 'reset', '--soft', 'HEAD@{1}']) |
|
253 | execute(['git', 'reset', '--soft', 'HEAD@{1}']) | |
254 |
|
254 | |||
255 | @timed |
|
255 | @timed | |
256 | def pull(self, repo): |
|
256 | def pull(self, repo): | |
257 | os.chdir(repo) |
|
257 | os.chdir(repo) | |
258 | execute(['git', 'pull']) |
|
258 | execute(['git', 'pull']) | |
259 |
|
259 | |||
260 | def _remote_last_commit(self, repo_url): |
|
260 | def _remote_last_commit(self, repo_url): | |
261 | output = execute(['git', 'ls-remote', repo_url, 'HEAD']) |
|
261 | output = execute(['git', 'ls-remote', repo_url, 'HEAD']) | |
262 | return output.split()[0] |
|
262 | return output.split()[0] | |
263 |
|
263 | |||
264 | def check_remote_last_commit_is(self, commit, repo_url): |
|
264 | def check_remote_last_commit_is(self, commit, repo_url): | |
265 | last_remote_commit = self._remote_last_commit(repo_url) |
|
265 | last_remote_commit = self._remote_last_commit(repo_url) | |
266 | if last_remote_commit != commit: |
|
266 | if last_remote_commit != commit: | |
267 | raise Exception('Push did not work, expected commit %s but got %s' % |
|
267 | raise Exception('Push did not work, expected commit %s but got %s' % | |
268 | (commit, last_remote_commit)) |
|
268 | (commit, last_remote_commit)) | |
269 |
|
269 | |||
270 | @keep_cwd |
|
270 | @keep_cwd | |
271 | def _local_last_commit(self, repo): |
|
271 | def _local_last_commit(self, repo): | |
272 | os.chdir(repo) |
|
272 | os.chdir(repo) | |
273 | return execute(['git', 'rev-parse', 'HEAD']).strip() |
|
273 | return execute(['git', 'rev-parse', 'HEAD']).strip() | |
274 |
|
274 | |||
275 | def check_local_last_commit_is(self, commit, repo): |
|
275 | def check_local_last_commit_is(self, commit, repo): | |
276 | last_local_commit = self._local_last_commit(repo) |
|
276 | last_local_commit = self._local_last_commit(repo) | |
277 | if last_local_commit != commit: |
|
277 | if last_local_commit != commit: | |
278 | raise Exception('Pull did not work, expected commit %s but got %s' % |
|
278 | raise Exception('Pull did not work, expected commit %s but got %s' % | |
279 | (commit, last_local_commit)) |
|
279 | (commit, last_local_commit)) | |
280 |
|
280 | |||
281 |
|
281 | |||
282 | class HgMixin(object): |
|
282 | class HgMixin(object): | |
283 | """Mixin providing the mercurial operations.""" |
|
283 | """Mixin providing the mercurial operations.""" | |
284 | @timed |
|
284 | @timed | |
285 | def clone_repo(self, repo_url, destination=None, default_only=False): |
|
285 | def clone_repo(self, repo_url, destination=None, default_only=False): | |
286 | args = ['hg', 'clone'] |
|
286 | args = ['hg', 'clone'] | |
287 | if default_only: |
|
287 | if default_only: | |
288 | args.extend(['--branch', 'default']) |
|
288 | args.extend(['--branch', 'default']) | |
289 | args.append(repo_url) |
|
289 | args.append(repo_url) | |
290 | if destination: |
|
290 | if destination: | |
291 | args.append(destination) |
|
291 | args.append(destination) | |
292 | execute(args) |
|
292 | execute(args) | |
293 |
|
293 | |||
294 | @keep_cwd |
|
294 | @keep_cwd | |
295 | def add_remote(self, repo, remote_url, remote_name='upstream'): |
|
295 | def add_remote(self, repo, remote_url, remote_name='upstream'): | |
296 | self.remove_remote(repo, remote_name) |
|
296 | self.remove_remote(repo, remote_name) | |
297 | os.chdir(repo) |
|
297 | os.chdir(repo) | |
298 | hgrc = ConfigParser.RawConfigParser() |
|
298 | hgrc = ConfigParser.RawConfigParser() | |
299 | hgrc.read('.hg/hgrc') |
|
299 | hgrc.read('.hg/hgrc') | |
300 | hgrc.set('paths', remote_name, remote_url) |
|
300 | hgrc.set('paths', remote_name, remote_url) | |
301 | with open('.hg/hgrc', 'w') as f: |
|
301 | with open('.hg/hgrc', 'w') as f: | |
302 | hgrc.write(f) |
|
302 | hgrc.write(f) | |
303 |
|
303 | |||
304 | @keep_cwd |
|
304 | @keep_cwd | |
305 | def remove_remote(self, repo, remote_name='upstream'): |
|
305 | def remove_remote(self, repo, remote_name='upstream'): | |
306 | os.chdir(repo) |
|
306 | os.chdir(repo) | |
307 | hgrc = ConfigParser.RawConfigParser() |
|
307 | hgrc = ConfigParser.RawConfigParser() | |
308 | hgrc.read('.hg/hgrc') |
|
308 | hgrc.read('.hg/hgrc') | |
309 | hgrc.remove_option('paths', remote_name) |
|
309 | hgrc.remove_option('paths', remote_name) | |
310 | with open('.hg/hgrc', 'w') as f: |
|
310 | with open('.hg/hgrc', 'w') as f: | |
311 | hgrc.write(f) |
|
311 | hgrc.write(f) | |
312 |
|
312 | |||
313 | @keep_cwd |
|
313 | @keep_cwd | |
314 | def get_commits(self, repo, branch='default'): |
|
314 | def get_commits(self, repo, branch='default'): | |
315 | os.chdir(repo) |
|
315 | os.chdir(repo) | |
316 | # See http://stackoverflow.com/questions/15376649/is-there-a-mercurial-equivalent-to-git-log-first-parent |
|
316 | # See http://stackoverflow.com/questions/15376649/is-there-a-mercurial-equivalent-to-git-log-first-parent | |
317 | commits_list = execute(['hg', 'log', '--branch', branch, '--template', |
|
317 | commits_list = execute(['hg', 'log', '--branch', branch, '--template', | |
318 | '{node}\n', '--follow-first']) |
|
318 | '{node}\n', '--follow-first']) | |
319 | return commits_list.strip().split('\n')[::-1] |
|
319 | return commits_list.strip().split('\n')[::-1] | |
320 |
|
320 | |||
321 | @timed |
|
321 | @timed | |
322 | def push(self, repo, commit, remote_name=None): |
|
322 | def push(self, repo, commit, remote_name=None): | |
323 | os.chdir(repo) |
|
323 | os.chdir(repo) | |
324 | args = ['hg', 'push', '--rev', commit, '--new-branch'] |
|
324 | args = ['hg', 'push', '--rev', commit, '--new-branch'] | |
325 | if remote_name: |
|
325 | if remote_name: | |
326 | args.append(remote_name) |
|
326 | args.append(remote_name) | |
327 | execute(args) |
|
327 | execute(args) | |
328 |
|
328 | |||
329 | @timed |
|
329 | @timed | |
330 | def pull(self, repo): |
|
330 | def pull(self, repo): | |
331 | os.chdir(repo) |
|
331 | os.chdir(repo) | |
332 | execute(['hg', '--config', 'alias.pull=pull', 'pull', '-u']) |
|
332 | execute(['hg', '--config', 'alias.pull=pull', 'pull', '-u']) | |
333 |
|
333 | |||
334 | def _remote_last_commit(self, repo_url): |
|
334 | def _remote_last_commit(self, repo_url): | |
335 | return execute(['hg', 'identify', repo_url])[:12] |
|
335 | return execute(['hg', 'identify', repo_url])[:12] | |
336 |
|
336 | |||
337 | def check_remote_last_commit_is(self, commit, repo_url): |
|
337 | def check_remote_last_commit_is(self, commit, repo_url): | |
338 | last_remote_commit = self._remote_last_commit(repo_url) |
|
338 | last_remote_commit = self._remote_last_commit(repo_url) | |
339 | if not commit.startswith(last_remote_commit): |
|
339 | if not commit.startswith(last_remote_commit): | |
340 | raise Exception('Push did not work, expected commit %s but got %s' % |
|
340 | raise Exception('Push did not work, expected commit %s but got %s' % | |
341 | (commit, last_remote_commit)) |
|
341 | (commit, last_remote_commit)) | |
342 |
|
342 | |||
343 | @keep_cwd |
|
343 | @keep_cwd | |
344 | def _local_last_commit(self, repo): |
|
344 | def _local_last_commit(self, repo): | |
345 | os.chdir(repo) |
|
345 | os.chdir(repo) | |
346 | return execute(['hg', 'identify'])[:12] |
|
346 | return execute(['hg', 'identify'])[:12] | |
347 |
|
347 | |||
348 | def check_local_last_commit_is(self, commit, repo): |
|
348 | def check_local_last_commit_is(self, commit, repo): | |
349 | last_local_commit = self._local_last_commit(repo) |
|
349 | last_local_commit = self._local_last_commit(repo) | |
350 | if not commit.startswith(last_local_commit): |
|
350 | if not commit.startswith(last_local_commit): | |
351 | raise Exception('Pull did not work, expected commit %s but got %s' % |
|
351 | raise Exception('Pull did not work, expected commit %s but got %s' % | |
352 | (commit, last_local_commit)) |
|
352 | (commit, last_local_commit)) | |
353 |
|
353 | |||
354 |
|
354 | |||
355 | class GitTestPerformance(GitMixin, RhodeCodeMixin, TestPerformanceBase): |
|
355 | class GitTestPerformance(GitMixin, RhodeCodeMixin, TestPerformanceBase): | |
356 | def __init__(self, base_dir, repo_url, n_commits, max_commits, skip_commits, |
|
356 | def __init__(self, base_dir, repo_url, n_commits, max_commits, skip_commits, | |
357 | api_key): |
|
357 | api_key): | |
358 | TestPerformanceBase.__init__(self, base_dir, repo_url, n_commits, |
|
358 | TestPerformanceBase.__init__(self, base_dir, repo_url, n_commits, | |
359 | max_commits, skip_commits) |
|
359 | max_commits, skip_commits) | |
360 | RhodeCodeMixin.__init__(self, api_key) |
|
360 | RhodeCodeMixin.__init__(self, api_key) | |
361 | self.repo_type = 'git' |
|
361 | self.repo_type = 'git' | |
362 |
|
362 | |||
363 |
|
363 | |||
364 | class HgTestPerformance(HgMixin, RhodeCodeMixin, TestPerformanceBase): |
|
364 | class HgTestPerformance(HgMixin, RhodeCodeMixin, TestPerformanceBase): | |
365 | def __init__(self, base_dir, repo_url, n_commits, max_commits, skip_commits, |
|
365 | def __init__(self, base_dir, repo_url, n_commits, max_commits, skip_commits, | |
366 | api_key): |
|
366 | api_key): | |
367 | TestPerformanceBase.__init__(self, base_dir, repo_url, n_commits, |
|
367 | TestPerformanceBase.__init__(self, base_dir, repo_url, n_commits, | |
368 | max_commits, skip_commits) |
|
368 | max_commits, skip_commits) | |
369 | RhodeCodeMixin.__init__(self, api_key) |
|
369 | RhodeCodeMixin.__init__(self, api_key) | |
370 | self.repo_type = 'hg' |
|
370 | self.repo_type = 'hg' | |
371 |
|
371 | |||
372 |
|
372 | |||
373 | def get_test(base_dir, repo_url, repo_type, step, max_commits, skip_commits, |
|
373 | def get_test(base_dir, repo_url, repo_type, step, max_commits, skip_commits, | |
374 | api_key): |
|
374 | api_key): | |
375 | max_commits = min(10 * step, |
|
375 | max_commits = min(10 * step, | |
376 | int((max_commits - skip_commits) / step) * step) |
|
376 | int((max_commits - skip_commits) / step) * step) | |
377 | max_commits += skip_commits |
|
377 | max_commits += skip_commits | |
378 | if repo_type == 'git': |
|
378 | if repo_type == 'git': | |
379 | return GitTestPerformance( |
|
379 | return GitTestPerformance( | |
380 | base_dir, repo_url, step, max_commits, skip_commits, api_key) |
|
380 | base_dir, repo_url, step, max_commits, skip_commits, api_key) | |
381 | elif repo_type == 'hg': |
|
381 | elif repo_type == 'hg': | |
382 | return HgTestPerformance( |
|
382 | return HgTestPerformance( | |
383 | base_dir, repo_url, step, max_commits, skip_commits, api_key) |
|
383 | base_dir, repo_url, step, max_commits, skip_commits, api_key) | |
384 |
|
384 | |||
385 |
|
385 | |||
386 | def main(argv): |
|
386 | def main(argv): | |
387 | parser = argparse.ArgumentParser( |
|
387 | parser = argparse.ArgumentParser( | |
388 | description='Performance tests for push/pull/clone for git and ' + |
|
388 | description='Performance tests for push/pull/clone for git and ' + | |
389 | 'mercurial repos.') |
|
389 | 'mercurial repos.') | |
390 | parser.add_argument( |
|
390 | parser.add_argument( | |
391 | '--tests', dest='tests', action='store', required=False, default='all', |
|
391 | '--tests', dest='tests', action='store', required=False, default='all', | |
392 | help='The tests to run. Default: all. But could be any comma ' + |
|
392 | help='The tests to run. Default: all. But could be any comma ' + | |
393 | 'separated list with python, hg, kernel or git') |
|
393 | 'separated list with python, hg, kernel or git') | |
394 | parser.add_argument( |
|
394 | parser.add_argument( | |
395 | '--sizes', dest='sizes', action='store', required=False, |
|
395 | '--sizes', dest='sizes', action='store', required=False, | |
396 | default='1,10,100,1000,2500', |
|
396 | default='1,10,100,1000,2500', | |
397 | help='The sizes to use. Default: 1,10,100,1000,2500') |
|
397 | help='The sizes to use. Default: 1,10,100,1000,2500') | |
398 | parser.add_argument( |
|
398 | parser.add_argument( | |
399 | '--dir', dest='dir', action='store', required=True, |
|
399 | '--dir', dest='dir', action='store', required=True, | |
400 | help='The dir where to store the repos') |
|
400 | help='The dir where to store the repos') | |
401 | parser.add_argument( |
|
401 | parser.add_argument( | |
402 | '--api-key', dest='api_key', action='store', required=True, |
|
402 | '--api-key', dest='api_key', action='store', required=True, | |
403 | help='The api key of RhodeCode') |
|
403 | help='The api key of RhodeCode') | |
404 | options = parser.parse_args(argv[1:]) |
|
404 | options = parser.parse_args(argv[1:]) | |
405 |
print |
|
405 | print(options) | |
406 |
|
406 | |||
407 | test_config = { |
|
407 | test_config = { | |
408 | 'python': { |
|
408 | 'python': { | |
409 | 'url': 'https://hg.python.org/cpython/', |
|
409 | 'url': 'https://hg.python.org/cpython/', | |
410 | 'limit': 23322, |
|
410 | 'limit': 23322, | |
411 | 'type': 'hg', |
|
411 | 'type': 'hg', | |
412 | # Do not time the first commit, as it is HUGE! |
|
412 | # Do not time the first commit, as it is HUGE! | |
413 | 'skip': 1, |
|
413 | 'skip': 1, | |
414 | }, |
|
414 | }, | |
415 | 'hg': { |
|
415 | 'hg': { | |
416 | 'url': 'http://selenic.com/hg', |
|
416 | 'url': 'http://selenic.com/hg', | |
417 | 'limit': 14396, |
|
417 | 'limit': 14396, | |
418 | 'type': 'hg', |
|
418 | 'type': 'hg', | |
419 | }, |
|
419 | }, | |
420 | 'kernel': { |
|
420 | 'kernel': { | |
421 | 'url': 'https://github.com/torvalds/linux.git', |
|
421 | 'url': 'https://github.com/torvalds/linux.git', | |
422 | 'limit': 46271, |
|
422 | 'limit': 46271, | |
423 | 'type': 'git', |
|
423 | 'type': 'git', | |
424 | }, |
|
424 | }, | |
425 | 'git': { |
|
425 | 'git': { | |
426 | 'url': 'https://github.com/git/git.git', |
|
426 | 'url': 'https://github.com/git/git.git', | |
427 | 'limit': 13525, |
|
427 | 'limit': 13525, | |
428 | 'type': 'git', |
|
428 | 'type': 'git', | |
429 | } |
|
429 | } | |
430 |
|
430 | |||
431 | } |
|
431 | } | |
432 |
|
432 | |||
433 | test_names = options.tests.split(',') |
|
433 | test_names = options.tests.split(',') | |
434 | if test_names == ['all']: |
|
434 | if test_names == ['all']: | |
435 | test_names = test_config.keys() |
|
435 | test_names = test_config.keys() | |
436 | if not set(test_names) <= set(test_config.keys()): |
|
436 | if not set(test_names) <= set(test_config.keys()): | |
437 |
print |
|
437 | print('Invalid tests: only %s are valid but specified %s' % | |
438 |
|
|
438 | (test_config.keys(), test_names)) | |
439 | return 1 |
|
439 | return 1 | |
440 |
|
440 | |||
441 | sizes = options.sizes.split(',') |
|
441 | sizes = options.sizes.split(',') | |
442 | sizes = map(int, sizes) |
|
442 | sizes = map(int, sizes) | |
443 |
|
443 | |||
444 | base_dir = options.dir |
|
444 | base_dir = options.dir | |
445 | api_key = options.api_key |
|
445 | api_key = options.api_key | |
446 | results = collections.defaultdict(dict) |
|
446 | results = collections.defaultdict(dict) | |
447 | for test_name, size in itertools.product(test_names, sizes): |
|
447 | for test_name, size in itertools.product(test_names, sizes): | |
448 | test = get_test(base_dir, |
|
448 | test = get_test(base_dir, | |
449 | test_config[test_name]['url'], |
|
449 | test_config[test_name]['url'], | |
450 | test_config[test_name]['type'], |
|
450 | test_config[test_name]['type'], | |
451 | size, |
|
451 | size, | |
452 | test_config[test_name]['limit'], |
|
452 | test_config[test_name]['limit'], | |
453 | test_config[test_name].get('skip', 0), |
|
453 | test_config[test_name].get('skip', 0), | |
454 | api_key) |
|
454 | api_key) | |
455 |
print |
|
455 | print('*' * 80) | |
456 |
print |
|
456 | print('Running performance test: %s with size %d' % (test_name, size)) | |
457 |
print |
|
457 | print('*' * 80) | |
458 | results[test_name][size] = test.run() |
|
458 | results[test_name][size] = test.run() | |
459 | pprint.pprint(dict(results)) |
|
459 | pprint.pprint(dict(results)) | |
460 |
|
460 | |||
461 |
|
461 | |||
462 | if __name__ == '__main__': |
|
462 | if __name__ == '__main__': | |
463 | sys.exit(main(sys.argv)) |
|
463 | sys.exit(main(sys.argv)) |
@@ -1,135 +1,135 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2018 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2018 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | Utility to gather certain statistics about a process. |
|
22 | Utility to gather certain statistics about a process. | |
23 |
|
23 | |||
24 | Used to generate data about the memory consumption of the vcsserver. It is |
|
24 | Used to generate data about the memory consumption of the vcsserver. It is | |
25 | quite generic and should work for every process. Use the parameter `--help` |
|
25 | quite generic and should work for every process. Use the parameter `--help` | |
26 | to see all options. |
|
26 | to see all options. | |
27 |
|
27 | |||
28 | Example call:: |
|
28 | Example call:: | |
29 |
|
29 | |||
30 | python profile-mem.py --pid=89816 --ae --ae-key=YOUR_API_KEY |
|
30 | python profile-mem.py --pid=89816 --ae --ae-key=YOUR_API_KEY | |
31 |
|
31 | |||
32 | """ |
|
32 | """ | |
33 |
|
33 | |||
34 |
|
34 | |||
35 | import argparse |
|
35 | import argparse | |
36 | import json |
|
36 | import json | |
37 | import sys |
|
37 | import sys | |
38 | import time |
|
38 | import time | |
39 |
|
39 | |||
40 | import datetime |
|
40 | import datetime | |
41 | import requests |
|
41 | import requests | |
42 | import psutil |
|
42 | import psutil | |
43 |
|
43 | |||
44 | import logging |
|
44 | import logging | |
45 | import socket |
|
45 | import socket | |
46 | logging.basicConfig(level=logging.DEBUG) |
|
46 | logging.basicConfig(level=logging.DEBUG) | |
47 |
|
47 | |||
48 |
|
48 | |||
49 | def profile(): |
|
49 | def profile(): | |
50 | config = parse_options() |
|
50 | config = parse_options() | |
51 | try: |
|
51 | try: | |
52 | process = psutil.Process(config.pid) |
|
52 | process = psutil.Process(config.pid) | |
53 | except psutil.NoSuchProcess: |
|
53 | except psutil.NoSuchProcess: | |
54 |
print |
|
54 | print("Process {pid} does not exist!".format(pid=config.pid)) | |
55 | sys.exit(1) |
|
55 | sys.exit(1) | |
56 |
|
56 | |||
57 | while True: |
|
57 | while True: | |
58 | stats = process_stats(process) |
|
58 | stats = process_stats(process) | |
59 | dump_stats(stats) |
|
59 | dump_stats(stats) | |
60 | if config.appenlight: |
|
60 | if config.appenlight: | |
61 | client = AppenlightClient( |
|
61 | client = AppenlightClient( | |
62 | url=config.appenlight_url, |
|
62 | url=config.appenlight_url, | |
63 | api_key=config.appenlight_api_key) |
|
63 | api_key=config.appenlight_api_key) | |
64 | client.dump_stats(stats) |
|
64 | client.dump_stats(stats) | |
65 | time.sleep(config.interval) |
|
65 | time.sleep(config.interval) | |
66 |
|
66 | |||
67 |
|
67 | |||
68 | def parse_options(): |
|
68 | def parse_options(): | |
69 | parser = argparse.ArgumentParser( |
|
69 | parser = argparse.ArgumentParser( | |
70 | description=__doc__) |
|
70 | description=__doc__) | |
71 | parser.add_argument( |
|
71 | parser.add_argument( | |
72 | '--pid', required=True, type=int, |
|
72 | '--pid', required=True, type=int, | |
73 | help="Process ID to monitor.") |
|
73 | help="Process ID to monitor.") | |
74 | parser.add_argument( |
|
74 | parser.add_argument( | |
75 | '--interval', '-i', type=float, default=5, |
|
75 | '--interval', '-i', type=float, default=5, | |
76 | help="Interval in secods.") |
|
76 | help="Interval in secods.") | |
77 | parser.add_argument( |
|
77 | parser.add_argument( | |
78 | '--appenlight', '--ae', action='store_true') |
|
78 | '--appenlight', '--ae', action='store_true') | |
79 | parser.add_argument( |
|
79 | parser.add_argument( | |
80 | '--appenlight-url', '--ae-url', |
|
80 | '--appenlight-url', '--ae-url', | |
81 | default='https://ae.rhodecode.com/api/logs', |
|
81 | default='https://ae.rhodecode.com/api/logs', | |
82 | help='URL of the Appenlight API endpoint, defaults to "%(default)s".') |
|
82 | help='URL of the Appenlight API endpoint, defaults to "%(default)s".') | |
83 | parser.add_argument( |
|
83 | parser.add_argument( | |
84 | '--appenlight-api-key', '--ae-key', |
|
84 | '--appenlight-api-key', '--ae-key', | |
85 | help='API key to use when sending data to appenlight. This has to be ' |
|
85 | help='API key to use when sending data to appenlight. This has to be ' | |
86 | 'set if Appenlight is enabled.') |
|
86 | 'set if Appenlight is enabled.') | |
87 | return parser.parse_args() |
|
87 | return parser.parse_args() | |
88 |
|
88 | |||
89 |
|
89 | |||
90 | def process_stats(process): |
|
90 | def process_stats(process): | |
91 | mem = process.memory_info() |
|
91 | mem = process.memory_info() | |
92 | iso_now = datetime.datetime.utcnow().isoformat() |
|
92 | iso_now = datetime.datetime.utcnow().isoformat() | |
93 | stats = [ |
|
93 | stats = [ | |
94 | {'message': 'Memory stats of process {pid}'.format(pid=process.pid), |
|
94 | {'message': 'Memory stats of process {pid}'.format(pid=process.pid), | |
95 | 'namespace': 'process.{pid}'.format(pid=process.pid), |
|
95 | 'namespace': 'process.{pid}'.format(pid=process.pid), | |
96 | 'server': socket.getfqdn(socket.gethostname()), |
|
96 | 'server': socket.getfqdn(socket.gethostname()), | |
97 | 'tags': [ |
|
97 | 'tags': [ | |
98 | ['rss', mem.rss], |
|
98 | ['rss', mem.rss], | |
99 | ['vms', mem.vms]], |
|
99 | ['vms', mem.vms]], | |
100 | 'date': iso_now, |
|
100 | 'date': iso_now, | |
101 | }, |
|
101 | }, | |
102 | ] |
|
102 | ] | |
103 | return stats |
|
103 | return stats | |
104 |
|
104 | |||
105 |
|
105 | |||
106 | def dump_stats(stats): |
|
106 | def dump_stats(stats): | |
107 | for sample in stats: |
|
107 | for sample in stats: | |
108 |
print |
|
108 | print(json.dumps(sample)) | |
109 |
|
109 | |||
110 |
|
110 | |||
111 | class AppenlightClient(): |
|
111 | class AppenlightClient(): | |
112 |
|
112 | |||
113 | url_template = '{url}?protocol_version=0.5' |
|
113 | url_template = '{url}?protocol_version=0.5' | |
114 |
|
114 | |||
115 | def __init__(self, url, api_key): |
|
115 | def __init__(self, url, api_key): | |
116 | self.url = self.url_template.format(url=url) |
|
116 | self.url = self.url_template.format(url=url) | |
117 | self.api_key = api_key |
|
117 | self.api_key = api_key | |
118 |
|
118 | |||
119 | def dump_stats(self, stats): |
|
119 | def dump_stats(self, stats): | |
120 | response = requests.post( |
|
120 | response = requests.post( | |
121 | self.url, |
|
121 | self.url, | |
122 | headers={ |
|
122 | headers={ | |
123 | 'X-appenlight-api-key': self.api_key}, |
|
123 | 'X-appenlight-api-key': self.api_key}, | |
124 | data=json.dumps(stats)) |
|
124 | data=json.dumps(stats)) | |
125 | if not response.status_code == 200: |
|
125 | if not response.status_code == 200: | |
126 | logging.error( |
|
126 | logging.error( | |
127 | 'Sending to appenlight failed\n%s\n%s', |
|
127 | 'Sending to appenlight failed\n%s\n%s', | |
128 | response.headers, response.text) |
|
128 | response.headers, response.text) | |
129 |
|
129 | |||
130 |
|
130 | |||
131 | if __name__ == '__main__': |
|
131 | if __name__ == '__main__': | |
132 | try: |
|
132 | try: | |
133 | profile() |
|
133 | profile() | |
134 | except KeyboardInterrupt: |
|
134 | except KeyboardInterrupt: | |
135 | pass |
|
135 | pass |
@@ -1,155 +1,155 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2018 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2018 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | This is a standalone script which will start VCS and RC. |
|
22 | This is a standalone script which will start VCS and RC. | |
23 |
|
23 | |||
24 | Performance numbers will be written on each interval to: |
|
24 | Performance numbers will be written on each interval to: | |
25 | vcs_profileX.csv |
|
25 | vcs_profileX.csv | |
26 | rc_profileX.csv |
|
26 | rc_profileX.csv | |
27 |
|
27 | |||
28 | To stop the script by press Ctrl-C |
|
28 | To stop the script by press Ctrl-C | |
29 | """ |
|
29 | """ | |
30 |
|
30 | |||
31 | import datetime |
|
31 | import datetime | |
32 | import os |
|
32 | import os | |
33 | import psutil |
|
33 | import psutil | |
34 | import subprocess32 |
|
34 | import subprocess32 | |
35 | import sys |
|
35 | import sys | |
36 | import time |
|
36 | import time | |
37 | import traceback |
|
37 | import traceback | |
38 | import urllib |
|
38 | import urllib | |
39 |
|
39 | |||
40 | PROFILING_INTERVAL = 5 |
|
40 | PROFILING_INTERVAL = 5 | |
41 | RC_WEBSITE = "http://localhost:5001/" |
|
41 | RC_WEBSITE = "http://localhost:5001/" | |
42 |
|
42 | |||
43 |
|
43 | |||
44 | def get_file(prefix): |
|
44 | def get_file(prefix): | |
45 | out_file = None |
|
45 | out_file = None | |
46 |
for i in |
|
46 | for i in range(100): | |
47 | file_path = "%s_profile%.3d.csv" % (prefix, i) |
|
47 | file_path = "%s_profile%.3d.csv" % (prefix, i) | |
48 | if os.path.exists(file_path): |
|
48 | if os.path.exists(file_path): | |
49 | continue |
|
49 | continue | |
50 | out_file = open(file_path, "w") |
|
50 | out_file = open(file_path, "w") | |
51 | out_file.write("Time; CPU %; Memory (MB); Total FDs; Dulwich FDs; Threads\n") |
|
51 | out_file.write("Time; CPU %; Memory (MB); Total FDs; Dulwich FDs; Threads\n") | |
52 | break |
|
52 | break | |
53 | return out_file |
|
53 | return out_file | |
54 |
|
54 | |||
55 |
|
55 | |||
56 | def dump_system(): |
|
56 | def dump_system(): | |
57 |
print |
|
57 | print("System Overview...") | |
58 |
print |
|
58 | print("\nCPU Count: %d (%d real)" % | |
59 | (psutil.cpu_count(), psutil.cpu_count(logical=False)) |
|
59 | (psutil.cpu_count(), psutil.cpu_count(logical=False))) | |
60 |
print |
|
60 | print("\nDisk:") | |
61 |
print |
|
61 | print(psutil.disk_usage(os.sep)) | |
62 |
print |
|
62 | print("\nMemory:") | |
63 |
print |
|
63 | print(psutil.virtual_memory()) | |
64 |
print |
|
64 | print("\nMemory (swap):") | |
65 |
print |
|
65 | print(psutil.swap_memory()) | |
66 |
|
66 | |||
67 |
|
67 | |||
68 | def count_dulwich_fds(proc): |
|
68 | def count_dulwich_fds(proc): | |
69 | p = subprocess32.Popen(["lsof", "-p", proc.pid], stdout=subprocess32.PIPE) |
|
69 | p = subprocess32.Popen(["lsof", "-p", proc.pid], stdout=subprocess32.PIPE) | |
70 | out, err = p.communicate() |
|
70 | out, err = p.communicate() | |
71 |
|
71 | |||
72 | count = 0 |
|
72 | count = 0 | |
73 | for line in out.splitlines(): |
|
73 | for line in out.splitlines(): | |
74 | content = line.split() |
|
74 | content = line.split() | |
75 | # http://git-scm.com/book/en/Git-Internals-Packfiles |
|
75 | # http://git-scm.com/book/en/Git-Internals-Packfiles | |
76 | if content[-1].endswith(".idx"): |
|
76 | if content[-1].endswith(".idx"): | |
77 | count += 1 |
|
77 | count += 1 | |
78 |
|
78 | |||
79 | return count |
|
79 | return count | |
80 |
|
80 | |||
81 | def dump_process(pid, out_file): |
|
81 | def dump_process(pid, out_file): | |
82 | now = datetime.datetime.now() |
|
82 | now = datetime.datetime.now() | |
83 | cpu = pid.cpu_percent() |
|
83 | cpu = pid.cpu_percent() | |
84 | mem = pid.memory_info() |
|
84 | mem = pid.memory_info() | |
85 | fds = pid.num_fds() |
|
85 | fds = pid.num_fds() | |
86 | dulwich_fds = count_dulwich_fds(pid) |
|
86 | dulwich_fds = count_dulwich_fds(pid) | |
87 | threads = pid.num_threads() |
|
87 | threads = pid.num_threads() | |
88 |
|
88 | |||
89 | content = [now.strftime('%m/%d/%y %H:%M:%S'), |
|
89 | content = [now.strftime('%m/%d/%y %H:%M:%S'), | |
90 | cpu, |
|
90 | cpu, | |
91 | "%.2f" % (mem[0]/1024.0/1024.0), |
|
91 | "%.2f" % (mem[0]/1024.0/1024.0), | |
92 | fds, dulwich_fds, threads] |
|
92 | fds, dulwich_fds, threads] | |
93 | out_file.write("; ".join([str(item) for item in content])) |
|
93 | out_file.write("; ".join([str(item) for item in content])) | |
94 | out_file.write("\n") |
|
94 | out_file.write("\n") | |
95 |
|
95 | |||
96 |
|
96 | |||
97 | # Open output files |
|
97 | # Open output files | |
98 | vcs_out = get_file("vcs") |
|
98 | vcs_out = get_file("vcs") | |
99 | if vcs_out is None: |
|
99 | if vcs_out is None: | |
100 |
print |
|
100 | print("Unable to enumerate output file for VCS") | |
101 | sys.exit(1) |
|
101 | sys.exit(1) | |
102 | rc_out = get_file("rc") |
|
102 | rc_out = get_file("rc") | |
103 | if rc_out is None: |
|
103 | if rc_out is None: | |
104 |
print |
|
104 | print("Unable to enumerate output file for RC") | |
105 | sys.exit(1) |
|
105 | sys.exit(1) | |
106 |
|
106 | |||
107 | # Show system information |
|
107 | # Show system information | |
108 | dump_system() |
|
108 | dump_system() | |
109 |
|
109 | |||
110 |
print |
|
110 | print("\nStarting VCS...") | |
111 | vcs = psutil.Popen(["vcsserver"]) |
|
111 | vcs = psutil.Popen(["vcsserver"]) | |
112 | time.sleep(1) |
|
112 | time.sleep(1) | |
113 | if not vcs.is_running(): |
|
113 | if not vcs.is_running(): | |
114 |
print |
|
114 | print("VCS - Failed to start") | |
115 | sys.exit(1) |
|
115 | sys.exit(1) | |
116 |
print |
|
116 | print("VCS - Ok") | |
117 |
|
117 | |||
118 |
print |
|
118 | print("\nStarting RhodeCode...") | |
119 | rc = psutil.Popen("RC_VCSSERVER_TEST_DISABLE=1 paster serve test.ini", |
|
119 | rc = psutil.Popen("RC_VCSSERVER_TEST_DISABLE=1 paster serve test.ini", | |
120 | shell=True, stdin=subprocess32.PIPE) |
|
120 | shell=True, stdin=subprocess32.PIPE) | |
121 | time.sleep(1) |
|
121 | time.sleep(1) | |
122 | if not rc.is_running(): |
|
122 | if not rc.is_running(): | |
123 |
print |
|
123 | print("RC - Failed to start") | |
124 | vcs.terminate() |
|
124 | vcs.terminate() | |
125 | sys.exit(1) |
|
125 | sys.exit(1) | |
126 |
|
126 | |||
127 | # Send command to create the databases |
|
127 | # Send command to create the databases | |
128 | rc.stdin.write("y\n") |
|
128 | rc.stdin.write("y\n") | |
129 |
|
129 | |||
130 | # Verify that the website is up |
|
130 | # Verify that the website is up | |
131 | time.sleep(4) |
|
131 | time.sleep(4) | |
132 | try: |
|
132 | try: | |
133 | urllib.urlopen(RC_WEBSITE) |
|
133 | urllib.urlopen(RC_WEBSITE) | |
134 | except IOError: |
|
134 | except IOError: | |
135 |
print |
|
135 | print("RC - Website not started") | |
136 | vcs.terminate() |
|
136 | vcs.terminate() | |
137 | sys.exit(1) |
|
137 | sys.exit(1) | |
138 |
print |
|
138 | print("RC - Ok") | |
139 |
|
139 | |||
140 |
print |
|
140 | print("\nProfiling...\n%s\n" % ("-"*80)) | |
141 | while True: |
|
141 | while True: | |
142 | try: |
|
142 | try: | |
143 | dump_process(vcs, vcs_out) |
|
143 | dump_process(vcs, vcs_out) | |
144 | dump_process(rc, rc_out) |
|
144 | dump_process(rc, rc_out) | |
145 | time.sleep(PROFILING_INTERVAL) |
|
145 | time.sleep(PROFILING_INTERVAL) | |
146 | except Exception: |
|
146 | except Exception: | |
147 |
print |
|
147 | print(traceback.format_exc()) | |
148 | break |
|
148 | break | |
149 |
|
149 | |||
150 | # Finalize the profiling |
|
150 | # Finalize the profiling | |
151 | vcs_out.close() |
|
151 | vcs_out.close() | |
152 | rc_out.close() |
|
152 | rc_out.close() | |
153 |
|
153 | |||
154 | vcs.terminate() |
|
154 | vcs.terminate() | |
155 | rc.terminate() |
|
155 | rc.terminate() |
@@ -1,69 +1,69 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2018 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2018 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import timeit |
|
21 | import timeit | |
22 |
|
22 | |||
23 | server = "localhost:5000" |
|
23 | server = "localhost:5000" | |
24 |
|
24 | |||
25 | pages = [ |
|
25 | pages = [ | |
26 | "cpython", |
|
26 | "cpython", | |
27 | "cpython/annotate/74236c8bf064188516b32bf95016971227ec72a9/Makefile.pre.in", |
|
27 | "cpython/annotate/74236c8bf064188516b32bf95016971227ec72a9/Makefile.pre.in", | |
28 | "cpython/changelog", |
|
28 | "cpython/changelog", | |
29 | "cpython/changeset/e0f681f4ade3af52915d5f32daac97ada580d71a", |
|
29 | "cpython/changeset/e0f681f4ade3af52915d5f32daac97ada580d71a", | |
30 | "cpython/compare/tag@v3.4.1rc1...tag@v3.4.1?target_repo=cpython", |
|
30 | "cpython/compare/tag@v3.4.1rc1...tag@v3.4.1?target_repo=cpython", | |
31 | "cpython/files/tip/", |
|
31 | "cpython/files/tip/", | |
32 | "cpython/files/74236c8bf064188516b32bf95016971227ec72a9/Grammar", |
|
32 | "cpython/files/74236c8bf064188516b32bf95016971227ec72a9/Grammar", | |
33 | "", |
|
33 | "", | |
34 | "git", |
|
34 | "git", | |
35 | "git/annotate/6c4ab27f2378ce67940b4496365043119d7ffff2/gitk-git/.gitignore", |
|
35 | "git/annotate/6c4ab27f2378ce67940b4496365043119d7ffff2/gitk-git/.gitignore", | |
36 | "git/changelog", |
|
36 | "git/changelog", | |
37 | "git/changeset/d299e9e550c1bf8640907fdba1f03cc585ee71df", |
|
37 | "git/changeset/d299e9e550c1bf8640907fdba1f03cc585ee71df", | |
38 | "git/compare/rev@1200...rev@1300?target_repo=git", |
|
38 | "git/compare/rev@1200...rev@1300?target_repo=git", | |
39 | "git/files/tip/", |
|
39 | "git/files/tip/", | |
40 | "git/files/6c4ab27f2378ce67940b4496365043119d7ffff2/.gitignore" |
|
40 | "git/files/6c4ab27f2378ce67940b4496365043119d7ffff2/.gitignore" | |
41 | ] |
|
41 | ] | |
42 |
|
42 | |||
43 | svn_pages = [ |
|
43 | svn_pages = [ | |
44 | "svn-apache", |
|
44 | "svn-apache", | |
45 | "svn-apache/annotate/672129/cocoon/trunk/README.txt", |
|
45 | "svn-apache/annotate/672129/cocoon/trunk/README.txt", | |
46 | "svn-apache/changelog", |
|
46 | "svn-apache/changelog", | |
47 | "svn-apache/changeset/1164362", |
|
47 | "svn-apache/changeset/1164362", | |
48 | "svn-apache/compare/rev@1164350...rev@1164360?target_repo=svn-apache", |
|
48 | "svn-apache/compare/rev@1164350...rev@1164360?target_repo=svn-apache", | |
49 | "svn-apache/compare/rev@1164300...rev@1164360?target_repo=svn-apache", |
|
49 | "svn-apache/compare/rev@1164300...rev@1164360?target_repo=svn-apache", | |
50 | "svn-apache/files/tip/", |
|
50 | "svn-apache/files/tip/", | |
51 | "svn-apache/files/1164363/cocoon/trunk/README.txt", |
|
51 | "svn-apache/files/1164363/cocoon/trunk/README.txt", | |
52 | ] |
|
52 | ] | |
53 |
|
53 | |||
54 | # Uncomment to check also svn performance |
|
54 | # Uncomment to check also svn performance | |
55 | # pages = pages + svn_pages |
|
55 | # pages = pages + svn_pages | |
56 |
|
56 | |||
57 | repeat = 10 |
|
57 | repeat = 10 | |
58 |
|
58 | |||
59 |
print |
|
59 | print("Repeating each URL x%d\n" % repeat) | |
60 | for page in pages: |
|
60 | for page in pages: | |
61 | url = "http://%s/%s" % (server, page) |
|
61 | url = "http://%s/%s" % (server, page) | |
62 |
print |
|
62 | print(url) | |
63 |
|
63 | |||
64 | stmt = "urllib2.urlopen('%s', timeout=120)" % url |
|
64 | stmt = "urllib2.urlopen('%s', timeout=120)" % url | |
65 | t = timeit.Timer(stmt=stmt, setup="import urllib2") |
|
65 | t = timeit.Timer(stmt=stmt, setup="import urllib2") | |
66 |
|
66 | |||
67 | result = t.repeat(repeat=repeat, number=1) |
|
67 | result = t.repeat(repeat=repeat, number=1) | |
68 |
print |
|
68 | print("\t%.3f (min) - %.3f (max) - %.3f (avg)\n" % | |
69 | (min(result), max(result), sum(result)/len(result)) |
|
69 | (min(result), max(result), sum(result)/len(result))) |
@@ -1,187 +1,187 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2018 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2018 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | Test for crawling a project for memory usage |
|
22 | Test for crawling a project for memory usage | |
23 | This should be runned just as regular script together |
|
23 | This should be runned just as regular script together | |
24 | with a watch script that will show memory usage. |
|
24 | with a watch script that will show memory usage. | |
25 |
|
25 | |||
26 | watch -n1 ./rhodecode/tests/mem_watch |
|
26 | watch -n1 ./rhodecode/tests/mem_watch | |
27 | """ |
|
27 | """ | |
28 |
|
28 | |||
29 |
|
29 | |||
30 | import cookielib |
|
30 | import cookielib | |
31 | import urllib |
|
31 | import urllib | |
32 | import urllib2 |
|
32 | import urllib2 | |
33 | import time |
|
33 | import time | |
34 | import os |
|
34 | import os | |
35 | import sys |
|
35 | import sys | |
36 | from os.path import join as jn |
|
36 | from os.path import join as jn | |
37 | from os.path import dirname as dn |
|
37 | from os.path import dirname as dn | |
38 |
|
38 | |||
39 | from sqlalchemy.util import OrderedSet |
|
39 | from sqlalchemy.util import OrderedSet | |
40 |
|
40 | |||
41 | __here__ = os.path.abspath(__file__) |
|
41 | __here__ = os.path.abspath(__file__) | |
42 | __root__ = dn(dn(dn(__here__))) |
|
42 | __root__ = dn(dn(dn(__here__))) | |
43 | sys.path.append(__root__) |
|
43 | sys.path.append(__root__) | |
44 |
|
44 | |||
45 | from rhodecode.lib import vcs |
|
45 | from rhodecode.lib import vcs | |
46 | from rhodecode.lib.vcs.exceptions import RepositoryError |
|
46 | from rhodecode.lib.vcs.exceptions import RepositoryError | |
47 |
|
47 | |||
48 | PASES = 3 |
|
48 | PASES = 3 | |
49 | HOST = 'http://127.0.0.1' |
|
49 | HOST = 'http://127.0.0.1' | |
50 | PORT = 5001 |
|
50 | PORT = 5001 | |
51 | BASE_URI = '%s:%s/' % (HOST, PORT) |
|
51 | BASE_URI = '%s:%s/' % (HOST, PORT) | |
52 |
|
52 | |||
53 | if len(sys.argv) == 2: |
|
53 | if len(sys.argv) == 2: | |
54 | BASE_URI = sys.argv[1] |
|
54 | BASE_URI = sys.argv[1] | |
55 |
|
55 | |||
56 | if not BASE_URI.endswith('/'): |
|
56 | if not BASE_URI.endswith('/'): | |
57 | BASE_URI += '/' |
|
57 | BASE_URI += '/' | |
58 |
|
58 | |||
59 |
print |
|
59 | print('Crawling @ %s' % BASE_URI) | |
60 | BASE_URI += '%s' |
|
60 | BASE_URI += '%s' | |
61 | PROJECT_PATH = jn('/', 'home', 'marcink', 'repos') |
|
61 | PROJECT_PATH = jn('/', 'home', 'marcink', 'repos') | |
62 | PROJECTS = [ |
|
62 | PROJECTS = [ | |
63 | #'linux-magx-pbranch', |
|
63 | #'linux-magx-pbranch', | |
64 | 'CPython', |
|
64 | 'CPython', | |
65 | 'rhodecode_tip', |
|
65 | 'rhodecode_tip', | |
66 | ] |
|
66 | ] | |
67 |
|
67 | |||
68 |
|
68 | |||
69 | cj = cookielib.FileCookieJar('/tmp/rc_test_cookie.txt') |
|
69 | cj = cookielib.FileCookieJar('/tmp/rc_test_cookie.txt') | |
70 | o = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) |
|
70 | o = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) | |
71 | o.addheaders = [ |
|
71 | o.addheaders = [ | |
72 | ('User-agent', 'rhodecode-crawler'), |
|
72 | ('User-agent', 'rhodecode-crawler'), | |
73 | ('Accept-Language', 'en - us, en;q = 0.5') |
|
73 | ('Accept-Language', 'en - us, en;q = 0.5') | |
74 | ] |
|
74 | ] | |
75 |
|
75 | |||
76 | urllib2.install_opener(o) |
|
76 | urllib2.install_opener(o) | |
77 |
|
77 | |||
78 |
|
78 | |||
79 | def _get_repo(proj): |
|
79 | def _get_repo(proj): | |
80 | if isinstance(proj, basestring): |
|
80 | if isinstance(proj, basestring): | |
81 | repo = vcs.get_repo(jn(PROJECT_PATH, proj)) |
|
81 | repo = vcs.get_repo(jn(PROJECT_PATH, proj)) | |
82 | proj = proj |
|
82 | proj = proj | |
83 | else: |
|
83 | else: | |
84 | repo = proj |
|
84 | repo = proj | |
85 | proj = repo.name |
|
85 | proj = repo.name | |
86 |
|
86 | |||
87 | return repo, proj |
|
87 | return repo, proj | |
88 |
|
88 | |||
89 |
|
89 | |||
90 | def test_changelog_walk(proj, pages=100): |
|
90 | def test_changelog_walk(proj, pages=100): | |
91 | repo, proj = _get_repo(proj) |
|
91 | repo, proj = _get_repo(proj) | |
92 |
|
92 | |||
93 | total_time = 0 |
|
93 | total_time = 0 | |
94 | for i in range(1, pages): |
|
94 | for i in range(1, pages): | |
95 |
|
95 | |||
96 | page = '/'.join((proj, 'changelog',)) |
|
96 | page = '/'.join((proj, 'changelog',)) | |
97 |
|
97 | |||
98 | full_uri = (BASE_URI % page) + '?' + urllib.urlencode({'page': i}) |
|
98 | full_uri = (BASE_URI % page) + '?' + urllib.urlencode({'page': i}) | |
99 | s = time.time() |
|
99 | s = time.time() | |
100 | f = o.open(full_uri) |
|
100 | f = o.open(full_uri) | |
101 |
|
101 | |||
102 | assert f.url == full_uri, 'URL:%s does not match %s' % (f.url, full_uri) |
|
102 | assert f.url == full_uri, 'URL:%s does not match %s' % (f.url, full_uri) | |
103 |
|
103 | |||
104 | size = len(f.read()) |
|
104 | size = len(f.read()) | |
105 | e = time.time() - s |
|
105 | e = time.time() - s | |
106 | total_time += e |
|
106 | total_time += e | |
107 |
print |
|
107 | print('visited %s size:%s req:%s ms' % (full_uri, size, e)) | |
108 |
|
108 | |||
109 |
print |
|
109 | print('total_time {}'.format(total_time)) | |
110 |
print |
|
110 | print('average on req {}'.format(total_time / float(pages))) | |
111 |
|
111 | |||
112 |
|
112 | |||
113 | def test_commit_walk(proj, limit=None): |
|
113 | def test_commit_walk(proj, limit=None): | |
114 | repo, proj = _get_repo(proj) |
|
114 | repo, proj = _get_repo(proj) | |
115 |
|
115 | |||
116 |
print |
|
116 | print('processing', jn(PROJECT_PATH, proj)) | |
117 | total_time = 0 |
|
117 | total_time = 0 | |
118 |
|
118 | |||
119 | cnt = 0 |
|
119 | cnt = 0 | |
120 | for i in repo: |
|
120 | for i in repo: | |
121 | cnt += 1 |
|
121 | cnt += 1 | |
122 | raw_cs = '/'.join((proj, 'changeset', i.raw_id)) |
|
122 | raw_cs = '/'.join((proj, 'changeset', i.raw_id)) | |
123 | if limit and limit == cnt: |
|
123 | if limit and limit == cnt: | |
124 | break |
|
124 | break | |
125 |
|
125 | |||
126 | full_uri = (BASE_URI % raw_cs) |
|
126 | full_uri = (BASE_URI % raw_cs) | |
127 |
print |
|
127 | print('%s visiting %s\%s' % (cnt, full_uri, i)) | |
128 | s = time.time() |
|
128 | s = time.time() | |
129 | f = o.open(full_uri) |
|
129 | f = o.open(full_uri) | |
130 | size = len(f.read()) |
|
130 | size = len(f.read()) | |
131 | e = time.time() - s |
|
131 | e = time.time() - s | |
132 | total_time += e |
|
132 | total_time += e | |
133 |
print |
|
133 | print('%s visited %s\%s size:%s req:%s ms' % (cnt, full_uri, i, size, e)) | |
134 |
|
134 | |||
135 |
print |
|
135 | print('total_time {}'.format(total_time)) | |
136 |
print |
|
136 | print('average on req {}'.format(total_time / float(cnt))) | |
137 |
|
137 | |||
138 |
|
138 | |||
139 | def test_files_walk(proj, limit=100): |
|
139 | def test_files_walk(proj, limit=100): | |
140 | repo, proj = _get_repo(proj) |
|
140 | repo, proj = _get_repo(proj) | |
141 |
|
141 | |||
142 |
print |
|
142 | print('processing {}'.format(jn(PROJECT_PATH, proj))) | |
143 | total_time = 0 |
|
143 | total_time = 0 | |
144 |
|
144 | |||
145 | paths_ = OrderedSet(['']) |
|
145 | paths_ = OrderedSet(['']) | |
146 | try: |
|
146 | try: | |
147 | tip = repo.get_commit('tip') |
|
147 | tip = repo.get_commit('tip') | |
148 | for topnode, dirs, files in tip.walk('/'): |
|
148 | for topnode, dirs, files in tip.walk('/'): | |
149 |
|
149 | |||
150 | for dir in dirs: |
|
150 | for dir in dirs: | |
151 | paths_.add(dir.path) |
|
151 | paths_.add(dir.path) | |
152 | for f in dir: |
|
152 | for f in dir: | |
153 | paths_.add(f.path) |
|
153 | paths_.add(f.path) | |
154 |
|
154 | |||
155 | for f in files: |
|
155 | for f in files: | |
156 | paths_.add(f.path) |
|
156 | paths_.add(f.path) | |
157 |
|
157 | |||
158 | except RepositoryError as e: |
|
158 | except RepositoryError as e: | |
159 | pass |
|
159 | pass | |
160 |
|
160 | |||
161 | cnt = 0 |
|
161 | cnt = 0 | |
162 | for f in paths_: |
|
162 | for f in paths_: | |
163 | cnt += 1 |
|
163 | cnt += 1 | |
164 | if limit and limit == cnt: |
|
164 | if limit and limit == cnt: | |
165 | break |
|
165 | break | |
166 |
|
166 | |||
167 | file_path = '/'.join((proj, 'files', 'tip', f)) |
|
167 | file_path = '/'.join((proj, 'files', 'tip', f)) | |
168 | full_uri = (BASE_URI % file_path) |
|
168 | full_uri = (BASE_URI % file_path) | |
169 |
print |
|
169 | print('%s visiting %s' % (cnt, full_uri)) | |
170 | s = time.time() |
|
170 | s = time.time() | |
171 | f = o.open(full_uri) |
|
171 | f = o.open(full_uri) | |
172 | size = len(f.read()) |
|
172 | size = len(f.read()) | |
173 | e = time.time() - s |
|
173 | e = time.time() - s | |
174 | total_time += e |
|
174 | total_time += e | |
175 |
print |
|
175 | print('%s visited OK size:%s req:%s ms' % (cnt, size, e)) | |
176 |
|
176 | |||
177 |
print |
|
177 | print('total_time {}'.format(total_time)) | |
178 |
print |
|
178 | print('average on req {}'.format(total_time / float(cnt))) | |
179 |
|
179 | |||
180 | if __name__ == '__main__': |
|
180 | if __name__ == '__main__': | |
181 | for path in PROJECTS: |
|
181 | for path in PROJECTS: | |
182 | repo = vcs.get_repo(jn(PROJECT_PATH, path)) |
|
182 | repo = vcs.get_repo(jn(PROJECT_PATH, path)) | |
183 | for i in range(PASES): |
|
183 | for i in range(PASES): | |
184 |
print |
|
184 | print('PASS %s/%s' % (i, PASES)) | |
185 | test_changelog_walk(repo, pages=80) |
|
185 | test_changelog_walk(repo, pages=80) | |
186 | test_commit_walk(repo, limit=100) |
|
186 | test_commit_walk(repo, limit=100) | |
187 | test_files_walk(repo, limit=100) |
|
187 | test_files_walk(repo, limit=100) |
General Comments 0
You need to be logged in to leave comments.
Login now