Show More
@@ -1,580 +1,580 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import urllib.parse |
|
21 | import urllib.parse | |
22 |
|
22 | |||
23 | import mock |
|
23 | import mock | |
24 | import pytest |
|
24 | import pytest | |
25 |
|
25 | |||
26 | from rhodecode.tests import ( |
|
26 | from rhodecode.tests import ( | |
27 | assert_session_flash, HG_REPO, TEST_USER_ADMIN_LOGIN, |
|
27 | assert_session_flash, HG_REPO, TEST_USER_ADMIN_LOGIN, | |
28 | no_newline_id_generator) |
|
28 | no_newline_id_generator) | |
29 | from rhodecode.tests.fixture import Fixture |
|
29 | from rhodecode.tests.fixture import Fixture | |
30 | from rhodecode.lib.auth import check_password |
|
30 | from rhodecode.lib.auth import check_password | |
31 | from rhodecode.lib import helpers as h |
|
31 | from rhodecode.lib import helpers as h | |
32 | from rhodecode.model.auth_token import AuthTokenModel |
|
32 | from rhodecode.model.auth_token import AuthTokenModel | |
33 | from rhodecode.model.db import User, Notification, UserApiKeys |
|
33 | from rhodecode.model.db import User, Notification, UserApiKeys | |
34 | from rhodecode.model.meta import Session |
|
34 | from rhodecode.model.meta import Session | |
35 |
|
35 | |||
36 | fixture = Fixture() |
|
36 | fixture = Fixture() | |
37 |
|
37 | |||
38 | whitelist_view = ['RepoCommitsView:repo_commit_raw'] |
|
38 | whitelist_view = ['RepoCommitsView:repo_commit_raw'] | |
39 |
|
39 | |||
40 |
|
40 | |||
41 | def route_path(name, params=None, **kwargs): |
|
41 | def route_path(name, params=None, **kwargs): | |
42 | import urllib.request, urllib.parse, urllib.error |
|
42 | import urllib.request, urllib.parse, urllib.error | |
43 | from rhodecode.apps._base import ADMIN_PREFIX |
|
43 | from rhodecode.apps._base import ADMIN_PREFIX | |
44 |
|
44 | |||
45 | base_url = { |
|
45 | base_url = { | |
46 | 'login': ADMIN_PREFIX + '/login', |
|
46 | 'login': ADMIN_PREFIX + '/login', | |
47 | 'logout': ADMIN_PREFIX + '/logout', |
|
47 | 'logout': ADMIN_PREFIX + '/logout', | |
48 | 'register': ADMIN_PREFIX + '/register', |
|
48 | 'register': ADMIN_PREFIX + '/register', | |
49 | 'reset_password': |
|
49 | 'reset_password': | |
50 | ADMIN_PREFIX + '/password_reset', |
|
50 | ADMIN_PREFIX + '/password_reset', | |
51 | 'reset_password_confirmation': |
|
51 | 'reset_password_confirmation': | |
52 | ADMIN_PREFIX + '/password_reset_confirmation', |
|
52 | ADMIN_PREFIX + '/password_reset_confirmation', | |
53 |
|
53 | |||
54 | 'admin_permissions_application': |
|
54 | 'admin_permissions_application': | |
55 | ADMIN_PREFIX + '/permissions/application', |
|
55 | ADMIN_PREFIX + '/permissions/application', | |
56 | 'admin_permissions_application_update': |
|
56 | 'admin_permissions_application_update': | |
57 | ADMIN_PREFIX + '/permissions/application/update', |
|
57 | ADMIN_PREFIX + '/permissions/application/update', | |
58 |
|
58 | |||
59 | 'repo_commit_raw': '/{repo_name}/raw-changeset/{commit_id}' |
|
59 | 'repo_commit_raw': '/{repo_name}/raw-changeset/{commit_id}' | |
60 |
|
60 | |||
61 | }[name].format(**kwargs) |
|
61 | }[name].format(**kwargs) | |
62 |
|
62 | |||
63 | if params: |
|
63 | if params: | |
64 | base_url = '{}?{}'.format(base_url, urllib.parse.urlencode(params)) |
|
64 | base_url = '{}?{}'.format(base_url, urllib.parse.urlencode(params)) | |
65 | return base_url |
|
65 | return base_url | |
66 |
|
66 | |||
67 |
|
67 | |||
68 | @pytest.mark.usefixtures('app') |
|
68 | @pytest.mark.usefixtures('app') | |
69 | class TestLoginController(object): |
|
69 | class TestLoginController(object): | |
70 | destroy_users = set() |
|
70 | destroy_users = set() | |
71 |
|
71 | |||
72 | @classmethod |
|
72 | @classmethod | |
73 | def teardown_class(cls): |
|
73 | def teardown_class(cls): | |
74 | fixture.destroy_users(cls.destroy_users) |
|
74 | fixture.destroy_users(cls.destroy_users) | |
75 |
|
75 | |||
76 | def teardown_method(self, method): |
|
76 | def teardown_method(self, method): | |
77 | for n in Notification.query().all(): |
|
77 | for n in Notification.query().all(): | |
78 | Session().delete(n) |
|
78 | Session().delete(n) | |
79 |
|
79 | |||
80 | Session().commit() |
|
80 | Session().commit() | |
81 | assert Notification.query().all() == [] |
|
81 | assert Notification.query().all() == [] | |
82 |
|
82 | |||
83 | def test_index(self): |
|
83 | def test_index(self): | |
84 | response = self.app.get(route_path('login')) |
|
84 | response = self.app.get(route_path('login')) | |
85 | assert response.status == '200 OK' |
|
85 | assert response.status == '200 OK' | |
86 | # Test response... |
|
86 | # Test response... | |
87 |
|
87 | |||
88 | def test_login_admin_ok(self): |
|
88 | def test_login_admin_ok(self): | |
89 | response = self.app.post(route_path('login'), |
|
89 | response = self.app.post(route_path('login'), | |
90 | {'username': 'test_admin', |
|
90 | {'username': 'test_admin', | |
91 | 'password': 'test12'}, status=302) |
|
91 | 'password': 'test12'}, status=302) | |
92 | response = response.follow() |
|
92 | response = response.follow() | |
93 | session = response.get_session_from_response() |
|
93 | session = response.get_session_from_response() | |
94 | username = session['rhodecode_user'].get('username') |
|
94 | username = session['rhodecode_user'].get('username') | |
95 | assert username == 'test_admin' |
|
95 | assert username == 'test_admin' | |
96 | response.mustcontain('logout') |
|
96 | response.mustcontain('logout') | |
97 |
|
97 | |||
98 | def test_login_regular_ok(self): |
|
98 | def test_login_regular_ok(self): | |
99 | response = self.app.post(route_path('login'), |
|
99 | response = self.app.post(route_path('login'), | |
100 | {'username': 'test_regular', |
|
100 | {'username': 'test_regular', | |
101 | 'password': 'test12'}, status=302) |
|
101 | 'password': 'test12'}, status=302) | |
102 |
|
102 | |||
103 | response = response.follow() |
|
103 | response = response.follow() | |
104 | session = response.get_session_from_response() |
|
104 | session = response.get_session_from_response() | |
105 | username = session['rhodecode_user'].get('username') |
|
105 | username = session['rhodecode_user'].get('username') | |
106 | assert username == 'test_regular' |
|
106 | assert username == 'test_regular' | |
107 | response.mustcontain('logout') |
|
107 | response.mustcontain('logout') | |
108 |
|
108 | |||
109 | def test_login_regular_forbidden_when_super_admin_restriction(self): |
|
109 | def test_login_regular_forbidden_when_super_admin_restriction(self): | |
110 | from rhodecode.authentication.plugins.auth_rhodecode import RhodeCodeAuthPlugin |
|
110 | from rhodecode.authentication.plugins.auth_rhodecode import RhodeCodeAuthPlugin | |
111 | with fixture.auth_restriction(self.app._pyramid_registry, |
|
111 | with fixture.auth_restriction(self.app._pyramid_registry, | |
112 | RhodeCodeAuthPlugin.AUTH_RESTRICTION_SUPER_ADMIN): |
|
112 | RhodeCodeAuthPlugin.AUTH_RESTRICTION_SUPER_ADMIN): | |
113 | response = self.app.post(route_path('login'), |
|
113 | response = self.app.post(route_path('login'), | |
114 | {'username': 'test_regular', |
|
114 | {'username': 'test_regular', | |
115 | 'password': 'test12'}) |
|
115 | 'password': 'test12'}) | |
116 |
|
116 | |||
117 | response.mustcontain('invalid user name') |
|
117 | response.mustcontain('invalid user name') | |
118 | response.mustcontain('invalid password') |
|
118 | response.mustcontain('invalid password') | |
119 |
|
119 | |||
120 | def test_login_regular_forbidden_when_scope_restriction(self): |
|
120 | def test_login_regular_forbidden_when_scope_restriction(self): | |
121 | from rhodecode.authentication.plugins.auth_rhodecode import RhodeCodeAuthPlugin |
|
121 | from rhodecode.authentication.plugins.auth_rhodecode import RhodeCodeAuthPlugin | |
122 | with fixture.scope_restriction(self.app._pyramid_registry, |
|
122 | with fixture.scope_restriction(self.app._pyramid_registry, | |
123 | RhodeCodeAuthPlugin.AUTH_RESTRICTION_SCOPE_VCS): |
|
123 | RhodeCodeAuthPlugin.AUTH_RESTRICTION_SCOPE_VCS): | |
124 | response = self.app.post(route_path('login'), |
|
124 | response = self.app.post(route_path('login'), | |
125 | {'username': 'test_regular', |
|
125 | {'username': 'test_regular', | |
126 | 'password': 'test12'}) |
|
126 | 'password': 'test12'}) | |
127 |
|
127 | |||
128 | response.mustcontain('invalid user name') |
|
128 | response.mustcontain('invalid user name') | |
129 | response.mustcontain('invalid password') |
|
129 | response.mustcontain('invalid password') | |
130 |
|
130 | |||
131 | def test_login_ok_came_from(self): |
|
131 | def test_login_ok_came_from(self): | |
132 | test_came_from = '/_admin/users?branch=stable' |
|
132 | test_came_from = '/_admin/users?branch=stable' | |
133 | _url = '{}?came_from={}'.format(route_path('login'), test_came_from) |
|
133 | _url = '{}?came_from={}'.format(route_path('login'), test_came_from) | |
134 | response = self.app.post( |
|
134 | response = self.app.post( | |
135 | _url, {'username': 'test_admin', 'password': 'test12'}, status=302) |
|
135 | _url, {'username': 'test_admin', 'password': 'test12'}, status=302) | |
136 |
|
136 | |||
137 | assert 'branch=stable' in response.location |
|
137 | assert 'branch=stable' in response.location | |
138 | response = response.follow() |
|
138 | response = response.follow() | |
139 |
|
139 | |||
140 | assert response.status == '200 OK' |
|
140 | assert response.status == '200 OK' | |
141 | response.mustcontain('Users administration') |
|
141 | response.mustcontain('Users administration') | |
142 |
|
142 | |||
143 | def test_redirect_to_login_with_get_args(self): |
|
143 | def test_redirect_to_login_with_get_args(self): | |
144 | with fixture.anon_access(False): |
|
144 | with fixture.anon_access(False): | |
145 | kwargs = {'branch': 'stable'} |
|
145 | kwargs = {'branch': 'stable'} | |
146 | response = self.app.get( |
|
146 | response = self.app.get( | |
147 | h.route_path('repo_summary', repo_name=HG_REPO, _query=kwargs), |
|
147 | h.route_path('repo_summary', repo_name=HG_REPO, _query=kwargs), | |
148 | status=302) |
|
148 | status=302) | |
149 |
|
149 | |||
150 | response_query = urllib.parse.urlparse.parse_qsl(response.location) |
|
150 | response_query = urllib.parse.urlparse.parse_qsl(response.location) | |
151 | assert 'branch=stable' in response_query[0][1] |
|
151 | assert 'branch=stable' in response_query[0][1] | |
152 |
|
152 | |||
153 | def test_login_form_with_get_args(self): |
|
153 | def test_login_form_with_get_args(self): | |
154 | _url = '{}?came_from=/_admin/users,branch=stable'.format(route_path('login')) |
|
154 | _url = '{}?came_from=/_admin/users,branch=stable'.format(route_path('login')) | |
155 | response = self.app.get(_url) |
|
155 | response = self.app.get(_url) | |
156 | assert 'branch%3Dstable' in response.form.action |
|
156 | assert 'branch%3Dstable' in response.form.action | |
157 |
|
157 | |||
158 | @pytest.mark.parametrize("url_came_from", [ |
|
158 | @pytest.mark.parametrize("url_came_from", [ | |
159 | 'data:text/html,<script>window.alert("xss")</script>', |
|
159 | 'data:text/html,<script>window.alert("xss")</script>', | |
160 | 'mailto:test@rhodecode.org', |
|
160 | 'mailto:test@rhodecode.org', | |
161 | 'file:///etc/passwd', |
|
161 | 'file:///etc/passwd', | |
162 | 'ftp://some.ftp.server', |
|
162 | 'ftp://some.ftp.server', | |
163 | 'http://other.domain', |
|
163 | 'http://other.domain', | |
164 | '/\r\nX-Forwarded-Host: http://example.org', |
|
164 | '/\r\nX-Forwarded-Host: http://example.org', | |
165 | ], ids=no_newline_id_generator) |
|
165 | ], ids=no_newline_id_generator) | |
166 | def test_login_bad_came_froms(self, url_came_from): |
|
166 | def test_login_bad_came_froms(self, url_came_from): | |
167 | _url = '{}?came_from={}'.format(route_path('login'), url_came_from) |
|
167 | _url = '{}?came_from={}'.format(route_path('login'), url_came_from) | |
168 | response = self.app.post( |
|
168 | response = self.app.post( | |
169 | _url, |
|
169 | _url, | |
170 | {'username': 'test_admin', 'password': 'test12'}) |
|
170 | {'username': 'test_admin', 'password': 'test12'}) | |
171 | assert response.status == '302 Found' |
|
171 | assert response.status == '302 Found' | |
172 | response = response.follow() |
|
172 | response = response.follow() | |
173 | assert response.status == '200 OK' |
|
173 | assert response.status == '200 OK' | |
174 | assert response.request.path == '/' |
|
174 | assert response.request.path == '/' | |
175 |
|
175 | |||
176 | def test_login_short_password(self): |
|
176 | def test_login_short_password(self): | |
177 | response = self.app.post(route_path('login'), |
|
177 | response = self.app.post(route_path('login'), | |
178 | {'username': 'test_admin', |
|
178 | {'username': 'test_admin', | |
179 | 'password': 'as'}) |
|
179 | 'password': 'as'}) | |
180 | assert response.status == '200 OK' |
|
180 | assert response.status == '200 OK' | |
181 |
|
181 | |||
182 | response.mustcontain('Enter 3 characters or more') |
|
182 | response.mustcontain('Enter 3 characters or more') | |
183 |
|
183 | |||
184 | def test_login_wrong_non_ascii_password(self, user_regular): |
|
184 | def test_login_wrong_non_ascii_password(self, user_regular): | |
185 | response = self.app.post( |
|
185 | response = self.app.post( | |
186 | route_path('login'), |
|
186 | route_path('login'), | |
187 | {'username': user_regular.username, |
|
187 | {'username': user_regular.username, | |
188 | 'password': u'invalid-non-asci\xe4'.encode('utf8')}) |
|
188 | 'password': u'invalid-non-asci\xe4'.encode('utf8')}) | |
189 |
|
189 | |||
190 | response.mustcontain('invalid user name') |
|
190 | response.mustcontain('invalid user name') | |
191 | response.mustcontain('invalid password') |
|
191 | response.mustcontain('invalid password') | |
192 |
|
192 | |||
193 | def test_login_with_non_ascii_password(self, user_util): |
|
193 | def test_login_with_non_ascii_password(self, user_util): | |
194 | password = u'valid-non-ascii\xe4' |
|
194 | password = u'valid-non-ascii\xe4' | |
195 | user = user_util.create_user(password=password) |
|
195 | user = user_util.create_user(password=password) | |
196 | response = self.app.post( |
|
196 | response = self.app.post( | |
197 | route_path('login'), |
|
197 | route_path('login'), | |
198 | {'username': user.username, |
|
198 | {'username': user.username, | |
199 |
'password': password |
|
199 | 'password': password}) | |
200 | assert response.status_code == 302 |
|
200 | assert response.status_code == 302 | |
201 |
|
201 | |||
202 | def test_login_wrong_username_password(self): |
|
202 | def test_login_wrong_username_password(self): | |
203 | response = self.app.post(route_path('login'), |
|
203 | response = self.app.post(route_path('login'), | |
204 | {'username': 'error', |
|
204 | {'username': 'error', | |
205 | 'password': 'test12'}) |
|
205 | 'password': 'test12'}) | |
206 |
|
206 | |||
207 | response.mustcontain('invalid user name') |
|
207 | response.mustcontain('invalid user name') | |
208 | response.mustcontain('invalid password') |
|
208 | response.mustcontain('invalid password') | |
209 |
|
209 | |||
210 | def test_login_admin_ok_password_migration(self, real_crypto_backend): |
|
210 | def test_login_admin_ok_password_migration(self, real_crypto_backend): | |
211 | from rhodecode.lib import auth |
|
211 | from rhodecode.lib import auth | |
212 |
|
212 | |||
213 | # create new user, with sha256 password |
|
213 | # create new user, with sha256 password | |
214 | temp_user = 'test_admin_sha256' |
|
214 | temp_user = 'test_admin_sha256' | |
215 | user = fixture.create_user(temp_user) |
|
215 | user = fixture.create_user(temp_user) | |
216 | user.password = auth._RhodeCodeCryptoSha256().hash_create( |
|
216 | user.password = auth._RhodeCodeCryptoSha256().hash_create( | |
217 | b'test123') |
|
217 | b'test123') | |
218 | Session().add(user) |
|
218 | Session().add(user) | |
219 | Session().commit() |
|
219 | Session().commit() | |
220 | self.destroy_users.add(temp_user) |
|
220 | self.destroy_users.add(temp_user) | |
221 | response = self.app.post(route_path('login'), |
|
221 | response = self.app.post(route_path('login'), | |
222 | {'username': temp_user, |
|
222 | {'username': temp_user, | |
223 | 'password': 'test123'}, status=302) |
|
223 | 'password': 'test123'}, status=302) | |
224 |
|
224 | |||
225 | response = response.follow() |
|
225 | response = response.follow() | |
226 | session = response.get_session_from_response() |
|
226 | session = response.get_session_from_response() | |
227 | username = session['rhodecode_user'].get('username') |
|
227 | username = session['rhodecode_user'].get('username') | |
228 | assert username == temp_user |
|
228 | assert username == temp_user | |
229 | response.mustcontain('logout') |
|
229 | response.mustcontain('logout') | |
230 |
|
230 | |||
231 | # new password should be bcrypted, after log-in and transfer |
|
231 | # new password should be bcrypted, after log-in and transfer | |
232 | user = User.get_by_username(temp_user) |
|
232 | user = User.get_by_username(temp_user) | |
233 | assert user.password.startswith('$') |
|
233 | assert user.password.startswith('$') | |
234 |
|
234 | |||
235 | # REGISTRATIONS |
|
235 | # REGISTRATIONS | |
236 | def test_register(self): |
|
236 | def test_register(self): | |
237 | response = self.app.get(route_path('register')) |
|
237 | response = self.app.get(route_path('register')) | |
238 | response.mustcontain('Create an Account') |
|
238 | response.mustcontain('Create an Account') | |
239 |
|
239 | |||
240 | def test_register_err_same_username(self): |
|
240 | def test_register_err_same_username(self): | |
241 | uname = 'test_admin' |
|
241 | uname = 'test_admin' | |
242 | response = self.app.post( |
|
242 | response = self.app.post( | |
243 | route_path('register'), |
|
243 | route_path('register'), | |
244 | { |
|
244 | { | |
245 | 'username': uname, |
|
245 | 'username': uname, | |
246 | 'password': 'test12', |
|
246 | 'password': 'test12', | |
247 | 'password_confirmation': 'test12', |
|
247 | 'password_confirmation': 'test12', | |
248 | 'email': 'goodmail@domain.com', |
|
248 | 'email': 'goodmail@domain.com', | |
249 | 'firstname': 'test', |
|
249 | 'firstname': 'test', | |
250 | 'lastname': 'test' |
|
250 | 'lastname': 'test' | |
251 | } |
|
251 | } | |
252 | ) |
|
252 | ) | |
253 |
|
253 | |||
254 | assertr = response.assert_response() |
|
254 | assertr = response.assert_response() | |
255 | msg = 'Username "%(username)s" already exists' |
|
255 | msg = 'Username "%(username)s" already exists' | |
256 | msg = msg % {'username': uname} |
|
256 | msg = msg % {'username': uname} | |
257 | assertr.element_contains('#username+.error-message', msg) |
|
257 | assertr.element_contains('#username+.error-message', msg) | |
258 |
|
258 | |||
259 | def test_register_err_same_email(self): |
|
259 | def test_register_err_same_email(self): | |
260 | response = self.app.post( |
|
260 | response = self.app.post( | |
261 | route_path('register'), |
|
261 | route_path('register'), | |
262 | { |
|
262 | { | |
263 | 'username': 'test_admin_0', |
|
263 | 'username': 'test_admin_0', | |
264 | 'password': 'test12', |
|
264 | 'password': 'test12', | |
265 | 'password_confirmation': 'test12', |
|
265 | 'password_confirmation': 'test12', | |
266 | 'email': 'test_admin@mail.com', |
|
266 | 'email': 'test_admin@mail.com', | |
267 | 'firstname': 'test', |
|
267 | 'firstname': 'test', | |
268 | 'lastname': 'test' |
|
268 | 'lastname': 'test' | |
269 | } |
|
269 | } | |
270 | ) |
|
270 | ) | |
271 |
|
271 | |||
272 | assertr = response.assert_response() |
|
272 | assertr = response.assert_response() | |
273 | msg = u'This e-mail address is already taken' |
|
273 | msg = u'This e-mail address is already taken' | |
274 | assertr.element_contains('#email+.error-message', msg) |
|
274 | assertr.element_contains('#email+.error-message', msg) | |
275 |
|
275 | |||
276 | def test_register_err_same_email_case_sensitive(self): |
|
276 | def test_register_err_same_email_case_sensitive(self): | |
277 | response = self.app.post( |
|
277 | response = self.app.post( | |
278 | route_path('register'), |
|
278 | route_path('register'), | |
279 | { |
|
279 | { | |
280 | 'username': 'test_admin_1', |
|
280 | 'username': 'test_admin_1', | |
281 | 'password': 'test12', |
|
281 | 'password': 'test12', | |
282 | 'password_confirmation': 'test12', |
|
282 | 'password_confirmation': 'test12', | |
283 | 'email': 'TesT_Admin@mail.COM', |
|
283 | 'email': 'TesT_Admin@mail.COM', | |
284 | 'firstname': 'test', |
|
284 | 'firstname': 'test', | |
285 | 'lastname': 'test' |
|
285 | 'lastname': 'test' | |
286 | } |
|
286 | } | |
287 | ) |
|
287 | ) | |
288 | assertr = response.assert_response() |
|
288 | assertr = response.assert_response() | |
289 | msg = u'This e-mail address is already taken' |
|
289 | msg = u'This e-mail address is already taken' | |
290 | assertr.element_contains('#email+.error-message', msg) |
|
290 | assertr.element_contains('#email+.error-message', msg) | |
291 |
|
291 | |||
292 | def test_register_err_wrong_data(self): |
|
292 | def test_register_err_wrong_data(self): | |
293 | response = self.app.post( |
|
293 | response = self.app.post( | |
294 | route_path('register'), |
|
294 | route_path('register'), | |
295 | { |
|
295 | { | |
296 | 'username': 'xs', |
|
296 | 'username': 'xs', | |
297 | 'password': 'test', |
|
297 | 'password': 'test', | |
298 | 'password_confirmation': 'test', |
|
298 | 'password_confirmation': 'test', | |
299 | 'email': 'goodmailm', |
|
299 | 'email': 'goodmailm', | |
300 | 'firstname': 'test', |
|
300 | 'firstname': 'test', | |
301 | 'lastname': 'test' |
|
301 | 'lastname': 'test' | |
302 | } |
|
302 | } | |
303 | ) |
|
303 | ) | |
304 | assert response.status == '200 OK' |
|
304 | assert response.status == '200 OK' | |
305 | response.mustcontain('An email address must contain a single @') |
|
305 | response.mustcontain('An email address must contain a single @') | |
306 | response.mustcontain('Enter a value 6 characters long or more') |
|
306 | response.mustcontain('Enter a value 6 characters long or more') | |
307 |
|
307 | |||
308 | def test_register_err_username(self): |
|
308 | def test_register_err_username(self): | |
309 | response = self.app.post( |
|
309 | response = self.app.post( | |
310 | route_path('register'), |
|
310 | route_path('register'), | |
311 | { |
|
311 | { | |
312 | 'username': 'error user', |
|
312 | 'username': 'error user', | |
313 | 'password': 'test12', |
|
313 | 'password': 'test12', | |
314 | 'password_confirmation': 'test12', |
|
314 | 'password_confirmation': 'test12', | |
315 | 'email': 'goodmailm', |
|
315 | 'email': 'goodmailm', | |
316 | 'firstname': 'test', |
|
316 | 'firstname': 'test', | |
317 | 'lastname': 'test' |
|
317 | 'lastname': 'test' | |
318 | } |
|
318 | } | |
319 | ) |
|
319 | ) | |
320 |
|
320 | |||
321 | response.mustcontain('An email address must contain a single @') |
|
321 | response.mustcontain('An email address must contain a single @') | |
322 | response.mustcontain( |
|
322 | response.mustcontain( | |
323 | 'Username may only contain ' |
|
323 | 'Username may only contain ' | |
324 | 'alphanumeric characters underscores, ' |
|
324 | 'alphanumeric characters underscores, ' | |
325 | 'periods or dashes and must begin with ' |
|
325 | 'periods or dashes and must begin with ' | |
326 | 'alphanumeric character') |
|
326 | 'alphanumeric character') | |
327 |
|
327 | |||
328 | def test_register_err_case_sensitive(self): |
|
328 | def test_register_err_case_sensitive(self): | |
329 | usr = 'Test_Admin' |
|
329 | usr = 'Test_Admin' | |
330 | response = self.app.post( |
|
330 | response = self.app.post( | |
331 | route_path('register'), |
|
331 | route_path('register'), | |
332 | { |
|
332 | { | |
333 | 'username': usr, |
|
333 | 'username': usr, | |
334 | 'password': 'test12', |
|
334 | 'password': 'test12', | |
335 | 'password_confirmation': 'test12', |
|
335 | 'password_confirmation': 'test12', | |
336 | 'email': 'goodmailm', |
|
336 | 'email': 'goodmailm', | |
337 | 'firstname': 'test', |
|
337 | 'firstname': 'test', | |
338 | 'lastname': 'test' |
|
338 | 'lastname': 'test' | |
339 | } |
|
339 | } | |
340 | ) |
|
340 | ) | |
341 |
|
341 | |||
342 | assertr = response.assert_response() |
|
342 | assertr = response.assert_response() | |
343 | msg = u'Username "%(username)s" already exists' |
|
343 | msg = u'Username "%(username)s" already exists' | |
344 | msg = msg % {'username': usr} |
|
344 | msg = msg % {'username': usr} | |
345 | assertr.element_contains('#username+.error-message', msg) |
|
345 | assertr.element_contains('#username+.error-message', msg) | |
346 |
|
346 | |||
347 | def test_register_special_chars(self): |
|
347 | def test_register_special_chars(self): | |
348 | response = self.app.post( |
|
348 | response = self.app.post( | |
349 | route_path('register'), |
|
349 | route_path('register'), | |
350 | { |
|
350 | { | |
351 | 'username': 'xxxaxn', |
|
351 | 'username': 'xxxaxn', | |
352 | 'password': 'Δ ΔΕΊΕΌΔ ΕΕΕΕ', |
|
352 | 'password': 'Δ ΔΕΊΕΌΔ ΕΕΕΕ', | |
353 | 'password_confirmation': 'Δ ΔΕΊΕΌΔ ΕΕΕΕ', |
|
353 | 'password_confirmation': 'Δ ΔΕΊΕΌΔ ΕΕΕΕ', | |
354 | 'email': 'goodmailm@test.plx', |
|
354 | 'email': 'goodmailm@test.plx', | |
355 | 'firstname': 'test', |
|
355 | 'firstname': 'test', | |
356 | 'lastname': 'test' |
|
356 | 'lastname': 'test' | |
357 | } |
|
357 | } | |
358 | ) |
|
358 | ) | |
359 |
|
359 | |||
360 | msg = u'Invalid characters (non-ascii) in password' |
|
360 | msg = u'Invalid characters (non-ascii) in password' | |
361 | response.mustcontain(msg) |
|
361 | response.mustcontain(msg) | |
362 |
|
362 | |||
363 | def test_register_password_mismatch(self): |
|
363 | def test_register_password_mismatch(self): | |
364 | response = self.app.post( |
|
364 | response = self.app.post( | |
365 | route_path('register'), |
|
365 | route_path('register'), | |
366 | { |
|
366 | { | |
367 | 'username': 'xs', |
|
367 | 'username': 'xs', | |
368 | 'password': '123qwe', |
|
368 | 'password': '123qwe', | |
369 | 'password_confirmation': 'qwe123', |
|
369 | 'password_confirmation': 'qwe123', | |
370 | 'email': 'goodmailm@test.plxa', |
|
370 | 'email': 'goodmailm@test.plxa', | |
371 | 'firstname': 'test', |
|
371 | 'firstname': 'test', | |
372 | 'lastname': 'test' |
|
372 | 'lastname': 'test' | |
373 | } |
|
373 | } | |
374 | ) |
|
374 | ) | |
375 | msg = u'Passwords do not match' |
|
375 | msg = u'Passwords do not match' | |
376 | response.mustcontain(msg) |
|
376 | response.mustcontain(msg) | |
377 |
|
377 | |||
378 | def test_register_ok(self): |
|
378 | def test_register_ok(self): | |
379 | username = 'test_regular4' |
|
379 | username = 'test_regular4' | |
380 | password = 'qweqwe' |
|
380 | password = 'qweqwe' | |
381 | email = 'marcin@test.com' |
|
381 | email = 'marcin@test.com' | |
382 | name = 'testname' |
|
382 | name = 'testname' | |
383 | lastname = 'testlastname' |
|
383 | lastname = 'testlastname' | |
384 |
|
384 | |||
385 | # this initializes a session |
|
385 | # this initializes a session | |
386 | response = self.app.get(route_path('register')) |
|
386 | response = self.app.get(route_path('register')) | |
387 | response.mustcontain('Create an Account') |
|
387 | response.mustcontain('Create an Account') | |
388 |
|
388 | |||
389 |
|
389 | |||
390 | response = self.app.post( |
|
390 | response = self.app.post( | |
391 | route_path('register'), |
|
391 | route_path('register'), | |
392 | { |
|
392 | { | |
393 | 'username': username, |
|
393 | 'username': username, | |
394 | 'password': password, |
|
394 | 'password': password, | |
395 | 'password_confirmation': password, |
|
395 | 'password_confirmation': password, | |
396 | 'email': email, |
|
396 | 'email': email, | |
397 | 'firstname': name, |
|
397 | 'firstname': name, | |
398 | 'lastname': lastname, |
|
398 | 'lastname': lastname, | |
399 | 'admin': True |
|
399 | 'admin': True | |
400 | }, |
|
400 | }, | |
401 | status=302 |
|
401 | status=302 | |
402 | ) # This should be overridden |
|
402 | ) # This should be overridden | |
403 |
|
403 | |||
404 | assert_session_flash( |
|
404 | assert_session_flash( | |
405 | response, 'You have successfully registered with RhodeCode. You can log-in now.') |
|
405 | response, 'You have successfully registered with RhodeCode. You can log-in now.') | |
406 |
|
406 | |||
407 | ret = Session().query(User).filter( |
|
407 | ret = Session().query(User).filter( | |
408 | User.username == 'test_regular4').one() |
|
408 | User.username == 'test_regular4').one() | |
409 | assert ret.username == username |
|
409 | assert ret.username == username | |
410 | assert check_password(password, ret.password) |
|
410 | assert check_password(password, ret.password) | |
411 | assert ret.email == email |
|
411 | assert ret.email == email | |
412 | assert ret.name == name |
|
412 | assert ret.name == name | |
413 | assert ret.lastname == lastname |
|
413 | assert ret.lastname == lastname | |
414 | assert ret.auth_tokens is not None |
|
414 | assert ret.auth_tokens is not None | |
415 | assert not ret.admin |
|
415 | assert not ret.admin | |
416 |
|
416 | |||
417 | def test_forgot_password_wrong_mail(self): |
|
417 | def test_forgot_password_wrong_mail(self): | |
418 | bad_email = 'marcin@wrongmail.org' |
|
418 | bad_email = 'marcin@wrongmail.org' | |
419 | # this initializes a session |
|
419 | # this initializes a session | |
420 | self.app.get(route_path('reset_password')) |
|
420 | self.app.get(route_path('reset_password')) | |
421 |
|
421 | |||
422 | response = self.app.post( |
|
422 | response = self.app.post( | |
423 | route_path('reset_password'), {'email': bad_email, } |
|
423 | route_path('reset_password'), {'email': bad_email, } | |
424 | ) |
|
424 | ) | |
425 | assert_session_flash(response, |
|
425 | assert_session_flash(response, | |
426 | 'If such email exists, a password reset link was sent to it.') |
|
426 | 'If such email exists, a password reset link was sent to it.') | |
427 |
|
427 | |||
428 | def test_forgot_password(self, user_util): |
|
428 | def test_forgot_password(self, user_util): | |
429 | # this initializes a session |
|
429 | # this initializes a session | |
430 | self.app.get(route_path('reset_password')) |
|
430 | self.app.get(route_path('reset_password')) | |
431 |
|
431 | |||
432 | user = user_util.create_user() |
|
432 | user = user_util.create_user() | |
433 | user_id = user.user_id |
|
433 | user_id = user.user_id | |
434 | email = user.email |
|
434 | email = user.email | |
435 |
|
435 | |||
436 | response = self.app.post(route_path('reset_password'), {'email': email, }) |
|
436 | response = self.app.post(route_path('reset_password'), {'email': email, }) | |
437 |
|
437 | |||
438 | assert_session_flash(response, |
|
438 | assert_session_flash(response, | |
439 | 'If such email exists, a password reset link was sent to it.') |
|
439 | 'If such email exists, a password reset link was sent to it.') | |
440 |
|
440 | |||
441 | # BAD KEY |
|
441 | # BAD KEY | |
442 | confirm_url = '{}?key={}'.format(route_path('reset_password_confirmation'), 'badkey') |
|
442 | confirm_url = '{}?key={}'.format(route_path('reset_password_confirmation'), 'badkey') | |
443 | response = self.app.get(confirm_url, status=302) |
|
443 | response = self.app.get(confirm_url, status=302) | |
444 | assert response.location.endswith(route_path('reset_password')) |
|
444 | assert response.location.endswith(route_path('reset_password')) | |
445 | assert_session_flash(response, 'Given reset token is invalid') |
|
445 | assert_session_flash(response, 'Given reset token is invalid') | |
446 |
|
446 | |||
447 | response.follow() # cleanup flash |
|
447 | response.follow() # cleanup flash | |
448 |
|
448 | |||
449 | # GOOD KEY |
|
449 | # GOOD KEY | |
450 | key = UserApiKeys.query()\ |
|
450 | key = UserApiKeys.query()\ | |
451 | .filter(UserApiKeys.user_id == user_id)\ |
|
451 | .filter(UserApiKeys.user_id == user_id)\ | |
452 | .filter(UserApiKeys.role == UserApiKeys.ROLE_PASSWORD_RESET)\ |
|
452 | .filter(UserApiKeys.role == UserApiKeys.ROLE_PASSWORD_RESET)\ | |
453 | .first() |
|
453 | .first() | |
454 |
|
454 | |||
455 | assert key |
|
455 | assert key | |
456 |
|
456 | |||
457 | confirm_url = '{}?key={}'.format(route_path('reset_password_confirmation'), key.api_key) |
|
457 | confirm_url = '{}?key={}'.format(route_path('reset_password_confirmation'), key.api_key) | |
458 | response = self.app.get(confirm_url) |
|
458 | response = self.app.get(confirm_url) | |
459 | assert response.status == '302 Found' |
|
459 | assert response.status == '302 Found' | |
460 | assert response.location.endswith(route_path('login')) |
|
460 | assert response.location.endswith(route_path('login')) | |
461 |
|
461 | |||
462 | assert_session_flash( |
|
462 | assert_session_flash( | |
463 | response, |
|
463 | response, | |
464 | 'Your password reset was successful, ' |
|
464 | 'Your password reset was successful, ' | |
465 | 'a new password has been sent to your email') |
|
465 | 'a new password has been sent to your email') | |
466 |
|
466 | |||
467 | response.follow() |
|
467 | response.follow() | |
468 |
|
468 | |||
469 | def _get_api_whitelist(self, values=None): |
|
469 | def _get_api_whitelist(self, values=None): | |
470 | config = {'api_access_controllers_whitelist': values or []} |
|
470 | config = {'api_access_controllers_whitelist': values or []} | |
471 | return config |
|
471 | return config | |
472 |
|
472 | |||
473 | @pytest.mark.parametrize("test_name, auth_token", [ |
|
473 | @pytest.mark.parametrize("test_name, auth_token", [ | |
474 | ('none', None), |
|
474 | ('none', None), | |
475 | ('empty_string', ''), |
|
475 | ('empty_string', ''), | |
476 | ('fake_number', '123456'), |
|
476 | ('fake_number', '123456'), | |
477 | ('proper_auth_token', None) |
|
477 | ('proper_auth_token', None) | |
478 | ]) |
|
478 | ]) | |
479 | def test_access_not_whitelisted_page_via_auth_token( |
|
479 | def test_access_not_whitelisted_page_via_auth_token( | |
480 | self, test_name, auth_token, user_admin): |
|
480 | self, test_name, auth_token, user_admin): | |
481 |
|
481 | |||
482 | whitelist = self._get_api_whitelist([]) |
|
482 | whitelist = self._get_api_whitelist([]) | |
483 | with mock.patch.dict('rhodecode.CONFIG', whitelist): |
|
483 | with mock.patch.dict('rhodecode.CONFIG', whitelist): | |
484 | assert [] == whitelist['api_access_controllers_whitelist'] |
|
484 | assert [] == whitelist['api_access_controllers_whitelist'] | |
485 | if test_name == 'proper_auth_token': |
|
485 | if test_name == 'proper_auth_token': | |
486 | # use builtin if api_key is None |
|
486 | # use builtin if api_key is None | |
487 | auth_token = user_admin.api_key |
|
487 | auth_token = user_admin.api_key | |
488 |
|
488 | |||
489 | with fixture.anon_access(False): |
|
489 | with fixture.anon_access(False): | |
490 | self.app.get( |
|
490 | self.app.get( | |
491 | route_path('repo_commit_raw', |
|
491 | route_path('repo_commit_raw', | |
492 | repo_name=HG_REPO, commit_id='tip', |
|
492 | repo_name=HG_REPO, commit_id='tip', | |
493 | params=dict(api_key=auth_token)), |
|
493 | params=dict(api_key=auth_token)), | |
494 | status=302) |
|
494 | status=302) | |
495 |
|
495 | |||
496 | @pytest.mark.parametrize("test_name, auth_token, code", [ |
|
496 | @pytest.mark.parametrize("test_name, auth_token, code", [ | |
497 | ('none', None, 302), |
|
497 | ('none', None, 302), | |
498 | ('empty_string', '', 302), |
|
498 | ('empty_string', '', 302), | |
499 | ('fake_number', '123456', 302), |
|
499 | ('fake_number', '123456', 302), | |
500 | ('proper_auth_token', None, 200) |
|
500 | ('proper_auth_token', None, 200) | |
501 | ]) |
|
501 | ]) | |
502 | def test_access_whitelisted_page_via_auth_token( |
|
502 | def test_access_whitelisted_page_via_auth_token( | |
503 | self, test_name, auth_token, code, user_admin): |
|
503 | self, test_name, auth_token, code, user_admin): | |
504 |
|
504 | |||
505 | whitelist = self._get_api_whitelist(whitelist_view) |
|
505 | whitelist = self._get_api_whitelist(whitelist_view) | |
506 |
|
506 | |||
507 | with mock.patch.dict('rhodecode.CONFIG', whitelist): |
|
507 | with mock.patch.dict('rhodecode.CONFIG', whitelist): | |
508 | assert whitelist_view == whitelist['api_access_controllers_whitelist'] |
|
508 | assert whitelist_view == whitelist['api_access_controllers_whitelist'] | |
509 |
|
509 | |||
510 | if test_name == 'proper_auth_token': |
|
510 | if test_name == 'proper_auth_token': | |
511 | auth_token = user_admin.api_key |
|
511 | auth_token = user_admin.api_key | |
512 | assert auth_token |
|
512 | assert auth_token | |
513 |
|
513 | |||
514 | with fixture.anon_access(False): |
|
514 | with fixture.anon_access(False): | |
515 | self.app.get( |
|
515 | self.app.get( | |
516 | route_path('repo_commit_raw', |
|
516 | route_path('repo_commit_raw', | |
517 | repo_name=HG_REPO, commit_id='tip', |
|
517 | repo_name=HG_REPO, commit_id='tip', | |
518 | params=dict(api_key=auth_token)), |
|
518 | params=dict(api_key=auth_token)), | |
519 | status=code) |
|
519 | status=code) | |
520 |
|
520 | |||
521 | @pytest.mark.parametrize("test_name, auth_token, code", [ |
|
521 | @pytest.mark.parametrize("test_name, auth_token, code", [ | |
522 | ('proper_auth_token', None, 200), |
|
522 | ('proper_auth_token', None, 200), | |
523 | ('wrong_auth_token', '123456', 302), |
|
523 | ('wrong_auth_token', '123456', 302), | |
524 | ]) |
|
524 | ]) | |
525 | def test_access_whitelisted_page_via_auth_token_bound_to_token( |
|
525 | def test_access_whitelisted_page_via_auth_token_bound_to_token( | |
526 | self, test_name, auth_token, code, user_admin): |
|
526 | self, test_name, auth_token, code, user_admin): | |
527 |
|
527 | |||
528 | expected_token = auth_token |
|
528 | expected_token = auth_token | |
529 | if test_name == 'proper_auth_token': |
|
529 | if test_name == 'proper_auth_token': | |
530 | auth_token = user_admin.api_key |
|
530 | auth_token = user_admin.api_key | |
531 | expected_token = auth_token |
|
531 | expected_token = auth_token | |
532 | assert auth_token |
|
532 | assert auth_token | |
533 |
|
533 | |||
534 | whitelist = self._get_api_whitelist([ |
|
534 | whitelist = self._get_api_whitelist([ | |
535 | 'RepoCommitsView:repo_commit_raw@{}'.format(expected_token)]) |
|
535 | 'RepoCommitsView:repo_commit_raw@{}'.format(expected_token)]) | |
536 |
|
536 | |||
537 | with mock.patch.dict('rhodecode.CONFIG', whitelist): |
|
537 | with mock.patch.dict('rhodecode.CONFIG', whitelist): | |
538 |
|
538 | |||
539 | with fixture.anon_access(False): |
|
539 | with fixture.anon_access(False): | |
540 | self.app.get( |
|
540 | self.app.get( | |
541 | route_path('repo_commit_raw', |
|
541 | route_path('repo_commit_raw', | |
542 | repo_name=HG_REPO, commit_id='tip', |
|
542 | repo_name=HG_REPO, commit_id='tip', | |
543 | params=dict(api_key=auth_token)), |
|
543 | params=dict(api_key=auth_token)), | |
544 | status=code) |
|
544 | status=code) | |
545 |
|
545 | |||
546 | def test_access_page_via_extra_auth_token(self): |
|
546 | def test_access_page_via_extra_auth_token(self): | |
547 | whitelist = self._get_api_whitelist(whitelist_view) |
|
547 | whitelist = self._get_api_whitelist(whitelist_view) | |
548 | with mock.patch.dict('rhodecode.CONFIG', whitelist): |
|
548 | with mock.patch.dict('rhodecode.CONFIG', whitelist): | |
549 | assert whitelist_view == \ |
|
549 | assert whitelist_view == \ | |
550 | whitelist['api_access_controllers_whitelist'] |
|
550 | whitelist['api_access_controllers_whitelist'] | |
551 |
|
551 | |||
552 | new_auth_token = AuthTokenModel().create( |
|
552 | new_auth_token = AuthTokenModel().create( | |
553 | TEST_USER_ADMIN_LOGIN, 'test') |
|
553 | TEST_USER_ADMIN_LOGIN, 'test') | |
554 | Session().commit() |
|
554 | Session().commit() | |
555 | with fixture.anon_access(False): |
|
555 | with fixture.anon_access(False): | |
556 | self.app.get( |
|
556 | self.app.get( | |
557 | route_path('repo_commit_raw', |
|
557 | route_path('repo_commit_raw', | |
558 | repo_name=HG_REPO, commit_id='tip', |
|
558 | repo_name=HG_REPO, commit_id='tip', | |
559 | params=dict(api_key=new_auth_token.api_key)), |
|
559 | params=dict(api_key=new_auth_token.api_key)), | |
560 | status=200) |
|
560 | status=200) | |
561 |
|
561 | |||
562 | def test_access_page_via_expired_auth_token(self): |
|
562 | def test_access_page_via_expired_auth_token(self): | |
563 | whitelist = self._get_api_whitelist(whitelist_view) |
|
563 | whitelist = self._get_api_whitelist(whitelist_view) | |
564 | with mock.patch.dict('rhodecode.CONFIG', whitelist): |
|
564 | with mock.patch.dict('rhodecode.CONFIG', whitelist): | |
565 | assert whitelist_view == \ |
|
565 | assert whitelist_view == \ | |
566 | whitelist['api_access_controllers_whitelist'] |
|
566 | whitelist['api_access_controllers_whitelist'] | |
567 |
|
567 | |||
568 | new_auth_token = AuthTokenModel().create( |
|
568 | new_auth_token = AuthTokenModel().create( | |
569 | TEST_USER_ADMIN_LOGIN, 'test') |
|
569 | TEST_USER_ADMIN_LOGIN, 'test') | |
570 | Session().commit() |
|
570 | Session().commit() | |
571 | # patch the api key and make it expired |
|
571 | # patch the api key and make it expired | |
572 | new_auth_token.expires = 0 |
|
572 | new_auth_token.expires = 0 | |
573 | Session().add(new_auth_token) |
|
573 | Session().add(new_auth_token) | |
574 | Session().commit() |
|
574 | Session().commit() | |
575 | with fixture.anon_access(False): |
|
575 | with fixture.anon_access(False): | |
576 | self.app.get( |
|
576 | self.app.get( | |
577 | route_path('repo_commit_raw', |
|
577 | route_path('repo_commit_raw', | |
578 | repo_name=HG_REPO, commit_id='tip', |
|
578 | repo_name=HG_REPO, commit_id='tip', | |
579 | params=dict(api_key=new_auth_token.api_key)), |
|
579 | params=dict(api_key=new_auth_token.api_key)), | |
580 | status=302) |
|
580 | status=302) |
@@ -1,496 +1,494 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2014-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2014-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | GIT commit module |
|
22 | GIT commit module | |
23 | """ |
|
23 | """ | |
24 |
|
24 | |||
25 | import re |
|
25 | import re | |
26 | import stat |
|
26 | import stat | |
27 | import configparser |
|
27 | import configparser | |
28 | from itertools import chain |
|
28 | from itertools import chain | |
29 | from io import StringIO |
|
29 | from io import StringIO | |
30 |
|
30 | |||
31 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
31 | from zope.cachedescriptors.property import Lazy as LazyProperty | |
32 |
|
32 | |||
33 | from rhodecode.lib.datelib import utcdate_fromtimestamp |
|
33 | from rhodecode.lib.datelib import utcdate_fromtimestamp | |
34 | from rhodecode.lib.utils import safe_unicode, safe_str |
|
34 | from rhodecode.lib.utils import safe_unicode, safe_str | |
35 | from rhodecode.lib.utils2 import safe_int |
|
35 | from rhodecode.lib.utils2 import safe_int | |
36 | from rhodecode.lib.vcs.conf import settings |
|
36 | from rhodecode.lib.vcs.conf import settings | |
37 | from rhodecode.lib.vcs.backends import base |
|
37 | from rhodecode.lib.vcs.backends import base | |
38 | from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError |
|
38 | from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError | |
39 | from rhodecode.lib.vcs.nodes import ( |
|
39 | from rhodecode.lib.vcs.nodes import ( | |
40 | FileNode, DirNode, NodeKind, RootNode, SubModuleNode, |
|
40 | FileNode, DirNode, NodeKind, RootNode, SubModuleNode, | |
41 | ChangedFileNodesGenerator, AddedFileNodesGenerator, |
|
41 | ChangedFileNodesGenerator, AddedFileNodesGenerator, | |
42 | RemovedFileNodesGenerator, LargeFileNode) |
|
42 | RemovedFileNodesGenerator, LargeFileNode) | |
43 |
|
43 | |||
44 |
|
44 | |||
45 | class GitCommit(base.BaseCommit): |
|
45 | class GitCommit(base.BaseCommit): | |
46 | """ |
|
46 | """ | |
47 | Represents state of the repository at single commit id. |
|
47 | Represents state of the repository at single commit id. | |
48 | """ |
|
48 | """ | |
49 |
|
49 | |||
50 | _filter_pre_load = [ |
|
50 | _filter_pre_load = [ | |
51 | # done through a more complex tree walk on parents |
|
51 | # done through a more complex tree walk on parents | |
52 | "affected_files", |
|
52 | "affected_files", | |
53 | # done through subprocess not remote call |
|
53 | # done through subprocess not remote call | |
54 | "children", |
|
54 | "children", | |
55 | # done through a more complex tree walk on parents |
|
55 | # done through a more complex tree walk on parents | |
56 | "status", |
|
56 | "status", | |
57 | # mercurial specific property not supported here |
|
57 | # mercurial specific property not supported here | |
58 | "_file_paths", |
|
58 | "_file_paths", | |
59 | # mercurial specific property not supported here |
|
59 | # mercurial specific property not supported here | |
60 | 'obsolete', |
|
60 | 'obsolete', | |
61 | # mercurial specific property not supported here |
|
61 | # mercurial specific property not supported here | |
62 | 'phase', |
|
62 | 'phase', | |
63 | # mercurial specific property not supported here |
|
63 | # mercurial specific property not supported here | |
64 | 'hidden' |
|
64 | 'hidden' | |
65 | ] |
|
65 | ] | |
66 |
|
66 | |||
67 | def __init__(self, repository, raw_id, idx, pre_load=None): |
|
67 | def __init__(self, repository, raw_id, idx, pre_load=None): | |
68 | self.repository = repository |
|
68 | self.repository = repository | |
69 | self._remote = repository._remote |
|
69 | self._remote = repository._remote | |
70 | # TODO: johbo: Tweak of raw_id should not be necessary |
|
70 | # TODO: johbo: Tweak of raw_id should not be necessary | |
71 | self.raw_id = safe_str(raw_id) |
|
71 | self.raw_id = safe_str(raw_id) | |
72 | self.idx = idx |
|
72 | self.idx = idx | |
73 |
|
73 | |||
74 | self._set_bulk_properties(pre_load) |
|
74 | self._set_bulk_properties(pre_load) | |
75 |
|
75 | |||
76 | # caches |
|
76 | # caches | |
77 | self._stat_modes = {} # stat info for paths |
|
77 | self._stat_modes = {} # stat info for paths | |
78 | self._paths = {} # path processed with parse_tree |
|
78 | self._paths = {} # path processed with parse_tree | |
79 | self.nodes = {} |
|
79 | self.nodes = {} | |
80 | self._submodules = None |
|
80 | self._submodules = None | |
81 |
|
81 | |||
82 | def _set_bulk_properties(self, pre_load): |
|
82 | def _set_bulk_properties(self, pre_load): | |
83 |
|
83 | |||
84 | if not pre_load: |
|
84 | if not pre_load: | |
85 | return |
|
85 | return | |
86 | pre_load = [entry for entry in pre_load |
|
86 | pre_load = [entry for entry in pre_load | |
87 | if entry not in self._filter_pre_load] |
|
87 | if entry not in self._filter_pre_load] | |
88 | if not pre_load: |
|
88 | if not pre_load: | |
89 | return |
|
89 | return | |
90 |
|
90 | |||
91 | result = self._remote.bulk_request(self.raw_id, pre_load) |
|
91 | result = self._remote.bulk_request(self.raw_id, pre_load) | |
92 | for attr, value in result.items(): |
|
92 | for attr, value in result.items(): | |
93 | if attr in ["author", "message"]: |
|
93 | if attr in ["author", "message"]: | |
94 | if value: |
|
94 | if value: | |
95 | value = safe_unicode(value) |
|
95 | value = safe_unicode(value) | |
96 | elif attr == "date": |
|
96 | elif attr == "date": | |
97 | value = utcdate_fromtimestamp(*value) |
|
97 | value = utcdate_fromtimestamp(*value) | |
98 | elif attr == "parents": |
|
98 | elif attr == "parents": | |
99 | value = self._make_commits(value) |
|
99 | value = self._make_commits(value) | |
100 | elif attr == "branch": |
|
100 | elif attr == "branch": | |
101 | value = self._set_branch(value) |
|
101 | value = self._set_branch(value) | |
102 | self.__dict__[attr] = value |
|
102 | self.__dict__[attr] = value | |
103 |
|
103 | |||
104 | @LazyProperty |
|
104 | @LazyProperty | |
105 | def _commit(self): |
|
105 | def _commit(self): | |
106 | return self._remote[self.raw_id] |
|
106 | return self._remote[self.raw_id] | |
107 |
|
107 | |||
108 | @LazyProperty |
|
108 | @LazyProperty | |
109 | def _tree_id(self): |
|
109 | def _tree_id(self): | |
110 | return self._remote[self._commit['tree']]['id'] |
|
110 | return self._remote[self._commit['tree']]['id'] | |
111 |
|
111 | |||
112 | @LazyProperty |
|
112 | @LazyProperty | |
113 | def id(self): |
|
113 | def id(self): | |
114 | return self.raw_id |
|
114 | return self.raw_id | |
115 |
|
115 | |||
116 | @LazyProperty |
|
116 | @LazyProperty | |
117 | def short_id(self): |
|
117 | def short_id(self): | |
118 | return self.raw_id[:12] |
|
118 | return self.raw_id[:12] | |
119 |
|
119 | |||
120 | @LazyProperty |
|
120 | @LazyProperty | |
121 | def message(self): |
|
121 | def message(self): | |
122 | return safe_unicode(self._remote.message(self.id)) |
|
122 | return safe_unicode(self._remote.message(self.id)) | |
123 |
|
123 | |||
124 | @LazyProperty |
|
124 | @LazyProperty | |
125 | def committer(self): |
|
125 | def committer(self): | |
126 | return safe_unicode(self._remote.author(self.id)) |
|
126 | return safe_unicode(self._remote.author(self.id)) | |
127 |
|
127 | |||
128 | @LazyProperty |
|
128 | @LazyProperty | |
129 | def author(self): |
|
129 | def author(self): | |
130 | return safe_unicode(self._remote.author(self.id)) |
|
130 | return safe_unicode(self._remote.author(self.id)) | |
131 |
|
131 | |||
132 | @LazyProperty |
|
132 | @LazyProperty | |
133 | def date(self): |
|
133 | def date(self): | |
134 | unix_ts, tz = self._remote.date(self.raw_id) |
|
134 | unix_ts, tz = self._remote.date(self.raw_id) | |
135 | return utcdate_fromtimestamp(unix_ts, tz) |
|
135 | return utcdate_fromtimestamp(unix_ts, tz) | |
136 |
|
136 | |||
137 | @LazyProperty |
|
137 | @LazyProperty | |
138 | def status(self): |
|
138 | def status(self): | |
139 | """ |
|
139 | """ | |
140 | Returns modified, added, removed, deleted files for current commit |
|
140 | Returns modified, added, removed, deleted files for current commit | |
141 | """ |
|
141 | """ | |
142 | return self.changed, self.added, self.removed |
|
142 | return self.changed, self.added, self.removed | |
143 |
|
143 | |||
144 | @LazyProperty |
|
144 | @LazyProperty | |
145 | def tags(self): |
|
145 | def tags(self): | |
146 | tags = [safe_unicode(name) for name, |
|
146 | tags = [safe_unicode(name) for name, | |
147 | commit_id in self.repository.tags.items() |
|
147 | commit_id in self.repository.tags.items() | |
148 | if commit_id == self.raw_id] |
|
148 | if commit_id == self.raw_id] | |
149 | return tags |
|
149 | return tags | |
150 |
|
150 | |||
151 | @LazyProperty |
|
151 | @LazyProperty | |
152 | def commit_branches(self): |
|
152 | def commit_branches(self): | |
153 | branches = [] |
|
153 | branches = [] | |
154 | for name, commit_id in self.repository.branches.items(): |
|
154 | for name, commit_id in self.repository.branches.items(): | |
155 | if commit_id == self.raw_id: |
|
155 | if commit_id == self.raw_id: | |
156 | branches.append(name) |
|
156 | branches.append(name) | |
157 | return branches |
|
157 | return branches | |
158 |
|
158 | |||
159 | def _set_branch(self, branches): |
|
159 | def _set_branch(self, branches): | |
160 | if branches: |
|
160 | if branches: | |
161 | # actually commit can have multiple branches in git |
|
161 | # actually commit can have multiple branches in git | |
162 | return safe_unicode(branches[0]) |
|
162 | return safe_unicode(branches[0]) | |
163 |
|
163 | |||
164 | @LazyProperty |
|
164 | @LazyProperty | |
165 | def branch(self): |
|
165 | def branch(self): | |
166 | branches = self._remote.branch(self.raw_id) |
|
166 | branches = self._remote.branch(self.raw_id) | |
167 | return self._set_branch(branches) |
|
167 | return self._set_branch(branches) | |
168 |
|
168 | |||
169 | def _get_tree_id_for_path(self, path): |
|
169 | def _get_tree_id_for_path(self, path): | |
170 | path = safe_str(path) |
|
170 | path = safe_str(path) | |
171 | if path in self._paths: |
|
171 | if path in self._paths: | |
172 | return self._paths[path] |
|
172 | return self._paths[path] | |
173 |
|
173 | |||
174 | tree_id = self._tree_id |
|
174 | tree_id = self._tree_id | |
175 |
|
175 | |||
176 | path = path.strip('/') |
|
176 | path = path.strip('/') | |
177 | if path == '': |
|
177 | if path == '': | |
178 | data = [tree_id, "tree"] |
|
178 | data = [tree_id, "tree"] | |
179 | self._paths[''] = data |
|
179 | self._paths[''] = data | |
180 | return data |
|
180 | return data | |
181 |
|
181 | |||
182 | tree_id, tree_type, tree_mode = \ |
|
182 | tree_id, tree_type, tree_mode = \ | |
183 | self._remote.tree_and_type_for_path(self.raw_id, path) |
|
183 | self._remote.tree_and_type_for_path(self.raw_id, path) | |
184 | if tree_id is None: |
|
184 | if tree_id is None: | |
185 | raise self.no_node_at_path(path) |
|
185 | raise self.no_node_at_path(path) | |
186 |
|
186 | |||
187 | self._paths[path] = [tree_id, tree_type] |
|
187 | self._paths[path] = [tree_id, tree_type] | |
188 | self._stat_modes[path] = tree_mode |
|
188 | self._stat_modes[path] = tree_mode | |
189 |
|
189 | |||
190 | if path not in self._paths: |
|
190 | if path not in self._paths: | |
191 | raise self.no_node_at_path(path) |
|
191 | raise self.no_node_at_path(path) | |
192 |
|
192 | |||
193 | return self._paths[path] |
|
193 | return self._paths[path] | |
194 |
|
194 | |||
195 | def _get_kind(self, path): |
|
195 | def _get_kind(self, path): | |
196 | tree_id, type_ = self._get_tree_id_for_path(path) |
|
196 | tree_id, type_ = self._get_tree_id_for_path(path) | |
197 | if type_ == 'blob': |
|
197 | if type_ == 'blob': | |
198 | return NodeKind.FILE |
|
198 | return NodeKind.FILE | |
199 | elif type_ == 'tree': |
|
199 | elif type_ == 'tree': | |
200 | return NodeKind.DIR |
|
200 | return NodeKind.DIR | |
201 | elif type_ == 'link': |
|
201 | elif type_ == 'link': | |
202 | return NodeKind.SUBMODULE |
|
202 | return NodeKind.SUBMODULE | |
203 | return None |
|
203 | return None | |
204 |
|
204 | |||
205 | def _get_filectx(self, path): |
|
205 | def _get_filectx(self, path): | |
206 | path = self._fix_path(path) |
|
206 | path = self._fix_path(path) | |
207 | if self._get_kind(path) != NodeKind.FILE: |
|
207 | if self._get_kind(path) != NodeKind.FILE: | |
208 | raise CommitError( |
|
208 | raise CommitError( | |
209 | "File does not exist for commit %s at '%s'" % (self.raw_id, path)) |
|
209 | "File does not exist for commit %s at '%s'" % (self.raw_id, path)) | |
210 | return path |
|
210 | return path | |
211 |
|
211 | |||
212 | def _get_file_nodes(self): |
|
212 | def _get_file_nodes(self): | |
213 | return chain(*(t[2] for t in self.walk())) |
|
213 | return chain(*(t[2] for t in self.walk())) | |
214 |
|
214 | |||
215 | @LazyProperty |
|
215 | @LazyProperty | |
216 | def parents(self): |
|
216 | def parents(self): | |
217 | """ |
|
217 | """ | |
218 | Returns list of parent commits. |
|
218 | Returns list of parent commits. | |
219 | """ |
|
219 | """ | |
220 | parent_ids = self._remote.parents(self.id) |
|
220 | parent_ids = self._remote.parents(self.id) | |
221 | return self._make_commits(parent_ids) |
|
221 | return self._make_commits(parent_ids) | |
222 |
|
222 | |||
223 | @LazyProperty |
|
223 | @LazyProperty | |
224 | def children(self): |
|
224 | def children(self): | |
225 | """ |
|
225 | """ | |
226 | Returns list of child commits. |
|
226 | Returns list of child commits. | |
227 | """ |
|
227 | """ | |
228 |
|
228 | |||
229 | children = self._remote.children(self.raw_id) |
|
229 | children = self._remote.children(self.raw_id) | |
230 | return self._make_commits(children) |
|
230 | return self._make_commits(children) | |
231 |
|
231 | |||
232 | def _make_commits(self, commit_ids): |
|
232 | def _make_commits(self, commit_ids): | |
233 | def commit_maker(_commit_id): |
|
233 | def commit_maker(_commit_id): | |
234 | return self.repository.get_commit(commit_id=commit_id) |
|
234 | return self.repository.get_commit(commit_id=commit_id) | |
235 |
|
235 | |||
236 | return [commit_maker(commit_id) for commit_id in commit_ids] |
|
236 | return [commit_maker(commit_id) for commit_id in commit_ids] | |
237 |
|
237 | |||
238 | def get_file_mode(self, path): |
|
238 | def get_file_mode(self, path): | |
239 | """ |
|
239 | """ | |
240 | Returns stat mode of the file at the given `path`. |
|
240 | Returns stat mode of the file at the given `path`. | |
241 | """ |
|
241 | """ | |
242 | path = safe_str(path) |
|
242 | path = safe_str(path) | |
243 | # ensure path is traversed |
|
243 | # ensure path is traversed | |
244 | self._get_tree_id_for_path(path) |
|
244 | self._get_tree_id_for_path(path) | |
245 | return self._stat_modes[path] |
|
245 | return self._stat_modes[path] | |
246 |
|
246 | |||
247 | def is_link(self, path): |
|
247 | def is_link(self, path): | |
248 | return stat.S_ISLNK(self.get_file_mode(path)) |
|
248 | return stat.S_ISLNK(self.get_file_mode(path)) | |
249 |
|
249 | |||
250 | def is_node_binary(self, path): |
|
250 | def is_node_binary(self, path): | |
251 | tree_id, _ = self._get_tree_id_for_path(path) |
|
251 | tree_id, _ = self._get_tree_id_for_path(path) | |
252 | return self._remote.is_binary(tree_id) |
|
252 | return self._remote.is_binary(tree_id) | |
253 |
|
253 | |||
254 | def get_file_content(self, path): |
|
254 | def get_file_content(self, path): | |
255 | """ |
|
255 | """ | |
256 | Returns content of the file at given `path`. |
|
256 | Returns content of the file at given `path`. | |
257 | """ |
|
257 | """ | |
258 | tree_id, _ = self._get_tree_id_for_path(path) |
|
258 | tree_id, _ = self._get_tree_id_for_path(path) | |
259 | return self._remote.blob_as_pretty_string(tree_id) |
|
259 | return self._remote.blob_as_pretty_string(tree_id) | |
260 |
|
260 | |||
261 | def get_file_content_streamed(self, path): |
|
261 | def get_file_content_streamed(self, path): | |
262 | tree_id, _ = self._get_tree_id_for_path(path) |
|
262 | tree_id, _ = self._get_tree_id_for_path(path) | |
263 | stream_method = getattr(self._remote, 'stream:blob_as_pretty_string') |
|
263 | stream_method = getattr(self._remote, 'stream:blob_as_pretty_string') | |
264 | return stream_method(tree_id) |
|
264 | return stream_method(tree_id) | |
265 |
|
265 | |||
266 | def get_file_size(self, path): |
|
266 | def get_file_size(self, path): | |
267 | """ |
|
267 | """ | |
268 | Returns size of the file at given `path`. |
|
268 | Returns size of the file at given `path`. | |
269 | """ |
|
269 | """ | |
270 | tree_id, _ = self._get_tree_id_for_path(path) |
|
270 | tree_id, _ = self._get_tree_id_for_path(path) | |
271 | return self._remote.blob_raw_length(tree_id) |
|
271 | return self._remote.blob_raw_length(tree_id) | |
272 |
|
272 | |||
273 | def get_path_history(self, path, limit=None, pre_load=None): |
|
273 | def get_path_history(self, path, limit=None, pre_load=None): | |
274 | """ |
|
274 | """ | |
275 | Returns history of file as reversed list of `GitCommit` objects for |
|
275 | Returns history of file as reversed list of `GitCommit` objects for | |
276 | which file at given `path` has been modified. |
|
276 | which file at given `path` has been modified. | |
277 | """ |
|
277 | """ | |
278 |
|
278 | |||
279 | path = self._get_filectx(path) |
|
279 | path = self._get_filectx(path) | |
280 | hist = self._remote.node_history(self.raw_id, path, limit) |
|
280 | hist = self._remote.node_history(self.raw_id, path, limit) | |
281 | return [ |
|
281 | return [ | |
282 | self.repository.get_commit(commit_id=commit_id, pre_load=pre_load) |
|
282 | self.repository.get_commit(commit_id=commit_id, pre_load=pre_load) | |
283 | for commit_id in hist] |
|
283 | for commit_id in hist] | |
284 |
|
284 | |||
285 | def get_file_annotate(self, path, pre_load=None): |
|
285 | def get_file_annotate(self, path, pre_load=None): | |
286 | """ |
|
286 | """ | |
287 | Returns a generator of four element tuples with |
|
287 | Returns a generator of four element tuples with | |
288 | lineno, commit_id, commit lazy loader and line |
|
288 | lineno, commit_id, commit lazy loader and line | |
289 | """ |
|
289 | """ | |
290 |
|
290 | |||
291 | result = self._remote.node_annotate(self.raw_id, path) |
|
291 | result = self._remote.node_annotate(self.raw_id, path) | |
292 |
|
292 | |||
293 | for ln_no, commit_id, content in result: |
|
293 | for ln_no, commit_id, content in result: | |
294 | yield ( |
|
294 | yield ( | |
295 | ln_no, commit_id, |
|
295 | ln_no, commit_id, | |
296 | lambda: self.repository.get_commit(commit_id=commit_id, pre_load=pre_load), |
|
296 | lambda: self.repository.get_commit(commit_id=commit_id, pre_load=pre_load), | |
297 | content) |
|
297 | content) | |
298 |
|
298 | |||
299 | def get_nodes(self, path): |
|
299 | def get_nodes(self, path): | |
300 |
|
300 | |||
301 | if self._get_kind(path) != NodeKind.DIR: |
|
301 | if self._get_kind(path) != NodeKind.DIR: | |
302 | raise CommitError( |
|
302 | raise CommitError( | |
303 | "Directory does not exist for commit %s at '%s'" % (self.raw_id, path)) |
|
303 | "Directory does not exist for commit %s at '%s'" % (self.raw_id, path)) | |
304 | path = self._fix_path(path) |
|
304 | path = self._fix_path(path) | |
305 |
|
305 | |||
306 | tree_id, _ = self._get_tree_id_for_path(path) |
|
306 | tree_id, _ = self._get_tree_id_for_path(path) | |
307 |
|
307 | |||
308 | dirnodes = [] |
|
308 | dirnodes = [] | |
309 | filenodes = [] |
|
309 | filenodes = [] | |
310 |
|
310 | |||
311 | # extracted tree ID gives us our files... |
|
311 | # extracted tree ID gives us our files... | |
312 | bytes_path = safe_str(path) # libgit operates on bytes |
|
312 | bytes_path = safe_str(path) # libgit operates on bytes | |
313 | for name, stat_, id_, type_ in self._remote.tree_items(tree_id): |
|
313 | for name, stat_, id_, type_ in self._remote.tree_items(tree_id): | |
314 | if type_ == 'link': |
|
314 | if type_ == 'link': | |
315 | url = self._get_submodule_url('/'.join((bytes_path, name))) |
|
315 | url = self._get_submodule_url('/'.join((bytes_path, name))) | |
316 | dirnodes.append(SubModuleNode( |
|
316 | dirnodes.append(SubModuleNode( | |
317 | name, url=url, commit=id_, alias=self.repository.alias)) |
|
317 | name, url=url, commit=id_, alias=self.repository.alias)) | |
318 | continue |
|
318 | continue | |
319 |
|
319 | |||
320 | if bytes_path != '': |
|
320 | if bytes_path != '': | |
321 | obj_path = '/'.join((bytes_path, name)) |
|
321 | obj_path = '/'.join((bytes_path, name)) | |
322 | else: |
|
322 | else: | |
323 | obj_path = name |
|
323 | obj_path = name | |
324 | if obj_path not in self._stat_modes: |
|
324 | if obj_path not in self._stat_modes: | |
325 | self._stat_modes[obj_path] = stat_ |
|
325 | self._stat_modes[obj_path] = stat_ | |
326 |
|
326 | |||
327 | if type_ == 'tree': |
|
327 | if type_ == 'tree': | |
328 | dirnodes.append(DirNode(obj_path, commit=self)) |
|
328 | dirnodes.append(DirNode(obj_path, commit=self)) | |
329 | elif type_ == 'blob': |
|
329 | elif type_ == 'blob': | |
330 | filenodes.append(FileNode(obj_path, commit=self, mode=stat_)) |
|
330 | filenodes.append(FileNode(obj_path, commit=self, mode=stat_)) | |
331 | else: |
|
331 | else: | |
332 | raise CommitError( |
|
332 | raise CommitError( | |
333 | "Requested object should be Tree or Blob, is %s", type_) |
|
333 | "Requested object should be Tree or Blob, is %s", type_) | |
334 |
|
334 | |||
335 | nodes = dirnodes + filenodes |
|
335 | nodes = dirnodes + filenodes | |
336 | for node in nodes: |
|
336 | for node in nodes: | |
337 | if node.path not in self.nodes: |
|
337 | if node.path not in self.nodes: | |
338 | self.nodes[node.path] = node |
|
338 | self.nodes[node.path] = node | |
339 | nodes.sort() |
|
339 | nodes.sort() | |
340 | return nodes |
|
340 | return nodes | |
341 |
|
341 | |||
342 | def get_node(self, path, pre_load=None): |
|
342 | def get_node(self, path, pre_load=None): | |
343 | if isinstance(path, unicode): |
|
|||
344 | path = path.encode('utf-8') |
|
|||
345 | path = self._fix_path(path) |
|
343 | path = self._fix_path(path) | |
346 | if path not in self.nodes: |
|
344 | if path not in self.nodes: | |
347 | try: |
|
345 | try: | |
348 | tree_id, type_ = self._get_tree_id_for_path(path) |
|
346 | tree_id, type_ = self._get_tree_id_for_path(path) | |
349 | except CommitError: |
|
347 | except CommitError: | |
350 | raise NodeDoesNotExistError( |
|
348 | raise NodeDoesNotExistError( | |
351 | "Cannot find one of parents' directories for a given " |
|
349 | "Cannot find one of parents' directories for a given " | |
352 | "path: %s" % path) |
|
350 | "path: %s" % path) | |
353 |
|
351 | |||
354 | if type_ in ['link', 'commit']: |
|
352 | if type_ in ['link', 'commit']: | |
355 | url = self._get_submodule_url(path) |
|
353 | url = self._get_submodule_url(path) | |
356 | node = SubModuleNode(path, url=url, commit=tree_id, |
|
354 | node = SubModuleNode(path, url=url, commit=tree_id, | |
357 | alias=self.repository.alias) |
|
355 | alias=self.repository.alias) | |
358 | elif type_ == 'tree': |
|
356 | elif type_ == 'tree': | |
359 | if path == '': |
|
357 | if path == '': | |
360 | node = RootNode(commit=self) |
|
358 | node = RootNode(commit=self) | |
361 | else: |
|
359 | else: | |
362 | node = DirNode(path, commit=self) |
|
360 | node = DirNode(path, commit=self) | |
363 | elif type_ == 'blob': |
|
361 | elif type_ == 'blob': | |
364 | node = FileNode(path, commit=self, pre_load=pre_load) |
|
362 | node = FileNode(path, commit=self, pre_load=pre_load) | |
365 | self._stat_modes[path] = node.mode |
|
363 | self._stat_modes[path] = node.mode | |
366 | else: |
|
364 | else: | |
367 | raise self.no_node_at_path(path) |
|
365 | raise self.no_node_at_path(path) | |
368 |
|
366 | |||
369 | # cache node |
|
367 | # cache node | |
370 | self.nodes[path] = node |
|
368 | self.nodes[path] = node | |
371 |
|
369 | |||
372 | return self.nodes[path] |
|
370 | return self.nodes[path] | |
373 |
|
371 | |||
374 | def get_largefile_node(self, path): |
|
372 | def get_largefile_node(self, path): | |
375 | tree_id, _ = self._get_tree_id_for_path(path) |
|
373 | tree_id, _ = self._get_tree_id_for_path(path) | |
376 | pointer_spec = self._remote.is_large_file(tree_id) |
|
374 | pointer_spec = self._remote.is_large_file(tree_id) | |
377 |
|
375 | |||
378 | if pointer_spec: |
|
376 | if pointer_spec: | |
379 | # content of that file regular FileNode is the hash of largefile |
|
377 | # content of that file regular FileNode is the hash of largefile | |
380 | file_id = pointer_spec.get('oid_hash') |
|
378 | file_id = pointer_spec.get('oid_hash') | |
381 | if self._remote.in_largefiles_store(file_id): |
|
379 | if self._remote.in_largefiles_store(file_id): | |
382 | lf_path = self._remote.store_path(file_id) |
|
380 | lf_path = self._remote.store_path(file_id) | |
383 | return LargeFileNode(lf_path, commit=self, org_path=path) |
|
381 | return LargeFileNode(lf_path, commit=self, org_path=path) | |
384 |
|
382 | |||
385 | @LazyProperty |
|
383 | @LazyProperty | |
386 | def affected_files(self): |
|
384 | def affected_files(self): | |
387 | """ |
|
385 | """ | |
388 | Gets a fast accessible file changes for given commit |
|
386 | Gets a fast accessible file changes for given commit | |
389 | """ |
|
387 | """ | |
390 | added, modified, deleted = self._changes_cache |
|
388 | added, modified, deleted = self._changes_cache | |
391 | return list(added.union(modified).union(deleted)) |
|
389 | return list(added.union(modified).union(deleted)) | |
392 |
|
390 | |||
393 | @LazyProperty |
|
391 | @LazyProperty | |
394 | def _changes_cache(self): |
|
392 | def _changes_cache(self): | |
395 | added = set() |
|
393 | added = set() | |
396 | modified = set() |
|
394 | modified = set() | |
397 | deleted = set() |
|
395 | deleted = set() | |
398 | _r = self._remote |
|
396 | _r = self._remote | |
399 |
|
397 | |||
400 | parents = self.parents |
|
398 | parents = self.parents | |
401 | if not self.parents: |
|
399 | if not self.parents: | |
402 | parents = [base.EmptyCommit()] |
|
400 | parents = [base.EmptyCommit()] | |
403 | for parent in parents: |
|
401 | for parent in parents: | |
404 | if isinstance(parent, base.EmptyCommit): |
|
402 | if isinstance(parent, base.EmptyCommit): | |
405 | oid = None |
|
403 | oid = None | |
406 | else: |
|
404 | else: | |
407 | oid = parent.raw_id |
|
405 | oid = parent.raw_id | |
408 | changes = _r.tree_changes(oid, self.raw_id) |
|
406 | changes = _r.tree_changes(oid, self.raw_id) | |
409 | for (oldpath, newpath), (_, _), (_, _) in changes: |
|
407 | for (oldpath, newpath), (_, _), (_, _) in changes: | |
410 | if newpath and oldpath: |
|
408 | if newpath and oldpath: | |
411 | modified.add(newpath) |
|
409 | modified.add(newpath) | |
412 | elif newpath and not oldpath: |
|
410 | elif newpath and not oldpath: | |
413 | added.add(newpath) |
|
411 | added.add(newpath) | |
414 | elif not newpath and oldpath: |
|
412 | elif not newpath and oldpath: | |
415 | deleted.add(oldpath) |
|
413 | deleted.add(oldpath) | |
416 | return added, modified, deleted |
|
414 | return added, modified, deleted | |
417 |
|
415 | |||
418 | def _get_paths_for_status(self, status): |
|
416 | def _get_paths_for_status(self, status): | |
419 | """ |
|
417 | """ | |
420 | Returns sorted list of paths for given ``status``. |
|
418 | Returns sorted list of paths for given ``status``. | |
421 |
|
419 | |||
422 | :param status: one of: *added*, *modified* or *deleted* |
|
420 | :param status: one of: *added*, *modified* or *deleted* | |
423 | """ |
|
421 | """ | |
424 | added, modified, deleted = self._changes_cache |
|
422 | added, modified, deleted = self._changes_cache | |
425 | return sorted({ |
|
423 | return sorted({ | |
426 | 'added': list(added), |
|
424 | 'added': list(added), | |
427 | 'modified': list(modified), |
|
425 | 'modified': list(modified), | |
428 | 'deleted': list(deleted)}[status] |
|
426 | 'deleted': list(deleted)}[status] | |
429 | ) |
|
427 | ) | |
430 |
|
428 | |||
431 | @LazyProperty |
|
429 | @LazyProperty | |
432 | def added(self): |
|
430 | def added(self): | |
433 | """ |
|
431 | """ | |
434 | Returns list of added ``FileNode`` objects. |
|
432 | Returns list of added ``FileNode`` objects. | |
435 | """ |
|
433 | """ | |
436 | if not self.parents: |
|
434 | if not self.parents: | |
437 | return list(self._get_file_nodes()) |
|
435 | return list(self._get_file_nodes()) | |
438 | return AddedFileNodesGenerator(self.added_paths, self) |
|
436 | return AddedFileNodesGenerator(self.added_paths, self) | |
439 |
|
437 | |||
440 | @LazyProperty |
|
438 | @LazyProperty | |
441 | def added_paths(self): |
|
439 | def added_paths(self): | |
442 | return [n for n in self._get_paths_for_status('added')] |
|
440 | return [n for n in self._get_paths_for_status('added')] | |
443 |
|
441 | |||
444 | @LazyProperty |
|
442 | @LazyProperty | |
445 | def changed(self): |
|
443 | def changed(self): | |
446 | """ |
|
444 | """ | |
447 | Returns list of modified ``FileNode`` objects. |
|
445 | Returns list of modified ``FileNode`` objects. | |
448 | """ |
|
446 | """ | |
449 | if not self.parents: |
|
447 | if not self.parents: | |
450 | return [] |
|
448 | return [] | |
451 | return ChangedFileNodesGenerator(self.changed_paths, self) |
|
449 | return ChangedFileNodesGenerator(self.changed_paths, self) | |
452 |
|
450 | |||
453 | @LazyProperty |
|
451 | @LazyProperty | |
454 | def changed_paths(self): |
|
452 | def changed_paths(self): | |
455 | return [n for n in self._get_paths_for_status('modified')] |
|
453 | return [n for n in self._get_paths_for_status('modified')] | |
456 |
|
454 | |||
457 | @LazyProperty |
|
455 | @LazyProperty | |
458 | def removed(self): |
|
456 | def removed(self): | |
459 | """ |
|
457 | """ | |
460 | Returns list of removed ``FileNode`` objects. |
|
458 | Returns list of removed ``FileNode`` objects. | |
461 | """ |
|
459 | """ | |
462 | if not self.parents: |
|
460 | if not self.parents: | |
463 | return [] |
|
461 | return [] | |
464 | return RemovedFileNodesGenerator(self.removed_paths, self) |
|
462 | return RemovedFileNodesGenerator(self.removed_paths, self) | |
465 |
|
463 | |||
466 | @LazyProperty |
|
464 | @LazyProperty | |
467 | def removed_paths(self): |
|
465 | def removed_paths(self): | |
468 | return [n for n in self._get_paths_for_status('deleted')] |
|
466 | return [n for n in self._get_paths_for_status('deleted')] | |
469 |
|
467 | |||
470 | def _get_submodule_url(self, submodule_path): |
|
468 | def _get_submodule_url(self, submodule_path): | |
471 | git_modules_path = '.gitmodules' |
|
469 | git_modules_path = '.gitmodules' | |
472 |
|
470 | |||
473 | if self._submodules is None: |
|
471 | if self._submodules is None: | |
474 | self._submodules = {} |
|
472 | self._submodules = {} | |
475 |
|
473 | |||
476 | try: |
|
474 | try: | |
477 | submodules_node = self.get_node(git_modules_path) |
|
475 | submodules_node = self.get_node(git_modules_path) | |
478 | except NodeDoesNotExistError: |
|
476 | except NodeDoesNotExistError: | |
479 | return None |
|
477 | return None | |
480 |
|
478 | |||
481 | # ConfigParser fails if there are whitespaces, also it needs an iterable |
|
479 | # ConfigParser fails if there are whitespaces, also it needs an iterable | |
482 | # file like content |
|
480 | # file like content | |
483 | def iter_content(_content): |
|
481 | def iter_content(_content): | |
484 | for line in _content.splitlines(): |
|
482 | for line in _content.splitlines(): | |
485 | yield line |
|
483 | yield line | |
486 |
|
484 | |||
487 | parser = configparser.RawConfigParser() |
|
485 | parser = configparser.RawConfigParser() | |
488 | parser.read_file(iter_content(submodules_node.content)) |
|
486 | parser.read_file(iter_content(submodules_node.content)) | |
489 |
|
487 | |||
490 | for section in parser.sections(): |
|
488 | for section in parser.sections(): | |
491 | path = parser.get(section, 'path') |
|
489 | path = parser.get(section, 'path') | |
492 | url = parser.get(section, 'url') |
|
490 | url = parser.get(section, 'url') | |
493 | if path and url: |
|
491 | if path and url: | |
494 | self._submodules[path.strip('/')] = url |
|
492 | self._submodules[path.strip('/')] = url | |
495 |
|
493 | |||
496 | return self._submodules.get(submodule_path.strip('/')) |
|
494 | return self._submodules.get(submodule_path.strip('/')) |
@@ -1,95 +1,96 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2014-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2014-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | HG inmemory module |
|
22 | HG inmemory module | |
23 | """ |
|
23 | """ | |
24 |
|
24 | |||
25 | from rhodecode.lib.datelib import date_to_timestamp_plus_offset |
|
25 | from rhodecode.lib.datelib import date_to_timestamp_plus_offset | |
26 | from rhodecode.lib.utils import safe_str |
|
26 | from rhodecode.lib.utils import safe_str | |
27 | from rhodecode.lib.vcs.backends.base import BaseInMemoryCommit |
|
27 | from rhodecode.lib.vcs.backends.base import BaseInMemoryCommit | |
28 | from rhodecode.lib.vcs.exceptions import RepositoryError |
|
28 | from rhodecode.lib.vcs.exceptions import RepositoryError | |
29 |
|
29 | |||
30 |
|
30 | |||
31 | class MercurialInMemoryCommit(BaseInMemoryCommit): |
|
31 | class MercurialInMemoryCommit(BaseInMemoryCommit): | |
32 |
|
32 | |||
33 | def commit(self, message, author, parents=None, branch=None, date=None, **kwargs): |
|
33 | def commit(self, message, author, parents=None, branch=None, date=None, **kwargs): | |
34 | """ |
|
34 | """ | |
35 | Performs in-memory commit (doesn't check workdir in any way) and |
|
35 | Performs in-memory commit (doesn't check workdir in any way) and | |
36 | returns newly created `MercurialCommit`. Updates repository's |
|
36 | returns newly created `MercurialCommit`. Updates repository's | |
37 | `commit_ids`. |
|
37 | `commit_ids`. | |
38 |
|
38 | |||
39 | :param message: message of the commit |
|
39 | :param message: message of the commit | |
40 | :param author: full username, i.e. "Joe Doe <joe.doe@example.com>" |
|
40 | :param author: full username, i.e. "Joe Doe <joe.doe@example.com>" | |
41 | :param parents: single parent or sequence of parents from which commit |
|
41 | :param parents: single parent or sequence of parents from which commit | |
42 | would be derived |
|
42 | would be derived | |
43 | :param date: `datetime.datetime` instance. Defaults to |
|
43 | :param date: `datetime.datetime` instance. Defaults to | |
44 | ``datetime.datetime.now()``. |
|
44 | ``datetime.datetime.now()``. | |
45 | :param branch: Optional. Branch name as unicode. Will use the backend's |
|
45 | :param branch: Optional. Branch name as unicode. Will use the backend's | |
46 | default if not given. |
|
46 | default if not given. | |
47 |
|
47 | |||
48 | :raises `RepositoryError`: if any error occurs while committing |
|
48 | :raises `RepositoryError`: if any error occurs while committing | |
49 | """ |
|
49 | """ | |
50 | self.check_integrity(parents) |
|
50 | self.check_integrity(parents) | |
51 |
|
51 | |||
52 |
if not isinstance(message, |
|
52 | if not isinstance(message, str) or not isinstance(author, str): | |
53 | # TODO: johbo: Should be a TypeError |
|
53 | # TODO: johbo: Should be a TypeError | |
54 |
raise RepositoryError( |
|
54 | raise RepositoryError( | |
55 | 'an <unicode> instance got %r & %r instead' |
|
55 | f'Given message and author needs to be ' | |
56 |
|
|
56 | f'an <str> instance got {type(message)} & {type(author)} instead' | |
|
57 | ) | |||
57 |
|
58 | |||
58 | if branch is None: |
|
59 | if branch is None: | |
59 | branch = self.repository.DEFAULT_BRANCH_NAME |
|
60 | branch = self.repository.DEFAULT_BRANCH_NAME | |
60 | kwargs['branch'] = safe_str(branch) |
|
61 | kwargs['branch'] = safe_str(branch) | |
61 |
|
62 | |||
62 | message = safe_str(message) |
|
63 | message = safe_str(message) | |
63 | author = safe_str(author) |
|
64 | author = safe_str(author) | |
64 |
|
65 | |||
65 | parent_ids = [p.raw_id if p else None for p in self.parents] |
|
66 | parent_ids = [p.raw_id if p else None for p in self.parents] | |
66 |
|
67 | |||
67 | ENCODING = "UTF-8" |
|
68 | ENCODING = "UTF-8" | |
68 |
|
69 | |||
69 | updated = [] |
|
70 | updated = [] | |
70 | for node in self.added + self.changed: |
|
71 | for node in self.added + self.changed: | |
71 | if node.is_binary: |
|
72 | if node.is_binary: | |
72 | content = node.content |
|
73 | content = node.content | |
73 | else: |
|
74 | else: | |
74 | content = node.content.encode(ENCODING) |
|
75 | content = node.content.encode(ENCODING) | |
75 | updated.append({ |
|
76 | updated.append({ | |
76 | 'path': node.path, |
|
77 | 'path': node.path, | |
77 | 'content': content, |
|
78 | 'content': content, | |
78 | 'mode': node.mode, |
|
79 | 'mode': node.mode, | |
79 | }) |
|
80 | }) | |
80 |
|
81 | |||
81 | removed = [node.path for node in self.removed] |
|
82 | removed = [node.path for node in self.removed] | |
82 |
|
83 | |||
83 | date, tz = date_to_timestamp_plus_offset(date) |
|
84 | date, tz = date_to_timestamp_plus_offset(date) | |
84 |
|
85 | |||
85 | commit_id = self.repository._remote.commitctx( |
|
86 | commit_id = self.repository._remote.commitctx( | |
86 | message=message, parents=parent_ids, |
|
87 | message=message, parents=parent_ids, | |
87 | commit_time=date, commit_timezone=tz, user=author, |
|
88 | commit_time=date, commit_timezone=tz, user=author, | |
88 | files=self.get_paths(), extra=kwargs, removed=removed, |
|
89 | files=self.get_paths(), extra=kwargs, removed=removed, | |
89 | updated=updated) |
|
90 | updated=updated) | |
90 | self.repository.append_commit_id(commit_id) |
|
91 | self.repository.append_commit_id(commit_id) | |
91 |
|
92 | |||
92 | self.repository.branches = self.repository._get_branches() |
|
93 | self.repository.branches = self.repository._get_branches() | |
93 | tip = self.repository.get_commit(commit_id) |
|
94 | tip = self.repository.get_commit(commit_id) | |
94 | self.reset() |
|
95 | self.reset() | |
95 | return tip |
|
96 | return tip |
@@ -1,1013 +1,1013 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2014-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2014-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | HG repository module |
|
22 | HG repository module | |
23 | """ |
|
23 | """ | |
24 | import os |
|
24 | import os | |
25 | import logging |
|
25 | import logging | |
26 | import binascii |
|
26 | import binascii | |
27 | import configparser |
|
27 | import configparser | |
28 | import urllib.request, urllib.parse, urllib.error |
|
28 | import urllib.request, urllib.parse, urllib.error | |
29 |
|
29 | |||
30 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
30 | from zope.cachedescriptors.property import Lazy as LazyProperty | |
31 |
|
31 | |||
32 | from collections import OrderedDict |
|
32 | from collections import OrderedDict | |
33 | from rhodecode.lib.datelib import ( |
|
33 | from rhodecode.lib.datelib import ( | |
34 | date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate) |
|
34 | date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate) | |
35 | from rhodecode.lib.utils import safe_unicode, safe_str |
|
35 | from rhodecode.lib.utils import safe_unicode, safe_str | |
36 | from rhodecode.lib.utils2 import CachedProperty |
|
36 | from rhodecode.lib.utils2 import CachedProperty | |
37 | from rhodecode.lib.vcs import connection, exceptions |
|
37 | from rhodecode.lib.vcs import connection, exceptions | |
38 | from rhodecode.lib.vcs.backends.base import ( |
|
38 | from rhodecode.lib.vcs.backends.base import ( | |
39 | BaseRepository, CollectionGenerator, Config, MergeResponse, |
|
39 | BaseRepository, CollectionGenerator, Config, MergeResponse, | |
40 | MergeFailureReason, Reference, BasePathPermissionChecker) |
|
40 | MergeFailureReason, Reference, BasePathPermissionChecker) | |
41 | from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit |
|
41 | from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit | |
42 | from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff |
|
42 | from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff | |
43 | from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit |
|
43 | from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit | |
44 | from rhodecode.lib.vcs.exceptions import ( |
|
44 | from rhodecode.lib.vcs.exceptions import ( | |
45 | EmptyRepositoryError, RepositoryError, TagAlreadyExistError, |
|
45 | EmptyRepositoryError, RepositoryError, TagAlreadyExistError, | |
46 | TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo) |
|
46 | TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo) | |
47 |
|
47 | |||
48 | hexlify = binascii.hexlify |
|
48 | hexlify = binascii.hexlify | |
49 | nullid = "\0" * 20 |
|
49 | nullid = "\0" * 20 | |
50 |
|
50 | |||
51 | log = logging.getLogger(__name__) |
|
51 | log = logging.getLogger(__name__) | |
52 |
|
52 | |||
53 |
|
53 | |||
54 | class MercurialRepository(BaseRepository): |
|
54 | class MercurialRepository(BaseRepository): | |
55 | """ |
|
55 | """ | |
56 | Mercurial repository backend |
|
56 | Mercurial repository backend | |
57 | """ |
|
57 | """ | |
58 | DEFAULT_BRANCH_NAME = 'default' |
|
58 | DEFAULT_BRANCH_NAME = 'default' | |
59 |
|
59 | |||
60 | def __init__(self, repo_path, config=None, create=False, src_url=None, |
|
60 | def __init__(self, repo_path, config=None, create=False, src_url=None, | |
61 | do_workspace_checkout=False, with_wire=None, bare=False): |
|
61 | do_workspace_checkout=False, with_wire=None, bare=False): | |
62 | """ |
|
62 | """ | |
63 | Raises RepositoryError if repository could not be find at the given |
|
63 | Raises RepositoryError if repository could not be find at the given | |
64 | ``repo_path``. |
|
64 | ``repo_path``. | |
65 |
|
65 | |||
66 | :param repo_path: local path of the repository |
|
66 | :param repo_path: local path of the repository | |
67 | :param config: config object containing the repo configuration |
|
67 | :param config: config object containing the repo configuration | |
68 | :param create=False: if set to True, would try to create repository if |
|
68 | :param create=False: if set to True, would try to create repository if | |
69 | it does not exist rather than raising exception |
|
69 | it does not exist rather than raising exception | |
70 | :param src_url=None: would try to clone repository from given location |
|
70 | :param src_url=None: would try to clone repository from given location | |
71 | :param do_workspace_checkout=False: sets update of working copy after |
|
71 | :param do_workspace_checkout=False: sets update of working copy after | |
72 | making a clone |
|
72 | making a clone | |
73 | :param bare: not used, compatible with other VCS |
|
73 | :param bare: not used, compatible with other VCS | |
74 | """ |
|
74 | """ | |
75 |
|
75 | |||
76 | self.path = safe_str(os.path.abspath(repo_path)) |
|
76 | self.path = safe_str(os.path.abspath(repo_path)) | |
77 | # mercurial since 4.4.X requires certain configuration to be present |
|
77 | # mercurial since 4.4.X requires certain configuration to be present | |
78 | # because sometimes we init the repos with config we need to meet |
|
78 | # because sometimes we init the repos with config we need to meet | |
79 | # special requirements |
|
79 | # special requirements | |
80 | self.config = config if config else self.get_default_config( |
|
80 | self.config = config if config else self.get_default_config( | |
81 | default=[('extensions', 'largefiles', '1')]) |
|
81 | default=[('extensions', 'largefiles', '1')]) | |
82 | self.with_wire = with_wire or {"cache": False} # default should not use cache |
|
82 | self.with_wire = with_wire or {"cache": False} # default should not use cache | |
83 |
|
83 | |||
84 | self._init_repo(create, src_url, do_workspace_checkout) |
|
84 | self._init_repo(create, src_url, do_workspace_checkout) | |
85 |
|
85 | |||
86 | # caches |
|
86 | # caches | |
87 | self._commit_ids = {} |
|
87 | self._commit_ids = {} | |
88 |
|
88 | |||
89 | @LazyProperty |
|
89 | @LazyProperty | |
90 | def _remote(self): |
|
90 | def _remote(self): | |
91 | repo_id = self.path |
|
91 | repo_id = self.path | |
92 | return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire) |
|
92 | return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire) | |
93 |
|
93 | |||
94 | @CachedProperty |
|
94 | @CachedProperty | |
95 | def commit_ids(self): |
|
95 | def commit_ids(self): | |
96 | """ |
|
96 | """ | |
97 | Returns list of commit ids, in ascending order. Being lazy |
|
97 | Returns list of commit ids, in ascending order. Being lazy | |
98 | attribute allows external tools to inject shas from cache. |
|
98 | attribute allows external tools to inject shas from cache. | |
99 | """ |
|
99 | """ | |
100 | commit_ids = self._get_all_commit_ids() |
|
100 | commit_ids = self._get_all_commit_ids() | |
101 | self._rebuild_cache(commit_ids) |
|
101 | self._rebuild_cache(commit_ids) | |
102 | return commit_ids |
|
102 | return commit_ids | |
103 |
|
103 | |||
104 | def _rebuild_cache(self, commit_ids): |
|
104 | def _rebuild_cache(self, commit_ids): | |
105 | self._commit_ids = dict((commit_id, index) |
|
105 | self._commit_ids = dict((commit_id, index) | |
106 | for index, commit_id in enumerate(commit_ids)) |
|
106 | for index, commit_id in enumerate(commit_ids)) | |
107 |
|
107 | |||
108 | @CachedProperty |
|
108 | @CachedProperty | |
109 | def branches(self): |
|
109 | def branches(self): | |
110 | return self._get_branches() |
|
110 | return self._get_branches() | |
111 |
|
111 | |||
112 | @CachedProperty |
|
112 | @CachedProperty | |
113 | def branches_closed(self): |
|
113 | def branches_closed(self): | |
114 | return self._get_branches(active=False, closed=True) |
|
114 | return self._get_branches(active=False, closed=True) | |
115 |
|
115 | |||
116 | @CachedProperty |
|
116 | @CachedProperty | |
117 | def branches_all(self): |
|
117 | def branches_all(self): | |
118 | all_branches = {} |
|
118 | all_branches = {} | |
119 | all_branches.update(self.branches) |
|
119 | all_branches.update(self.branches) | |
120 | all_branches.update(self.branches_closed) |
|
120 | all_branches.update(self.branches_closed) | |
121 | return all_branches |
|
121 | return all_branches | |
122 |
|
122 | |||
123 | def _get_branches(self, active=True, closed=False): |
|
123 | def _get_branches(self, active=True, closed=False): | |
124 | """ |
|
124 | """ | |
125 | Gets branches for this repository |
|
125 | Gets branches for this repository | |
126 | Returns only not closed active branches by default |
|
126 | Returns only not closed active branches by default | |
127 |
|
127 | |||
128 | :param active: return also active branches |
|
128 | :param active: return also active branches | |
129 | :param closed: return also closed branches |
|
129 | :param closed: return also closed branches | |
130 |
|
130 | |||
131 | """ |
|
131 | """ | |
132 | if self.is_empty(): |
|
132 | if self.is_empty(): | |
133 | return {} |
|
133 | return {} | |
134 |
|
134 | |||
135 | def get_name(ctx): |
|
135 | def get_name(ctx): | |
136 | return ctx[0] |
|
136 | return ctx[0] | |
137 |
|
137 | |||
138 | _branches = [(safe_unicode(n), hexlify(h),) for n, h in |
|
138 | _branches = [(safe_unicode(n), hexlify(h),) for n, h in | |
139 | self._remote.branches(active, closed).items()] |
|
139 | self._remote.branches(active, closed).items()] | |
140 |
|
140 | |||
141 | return OrderedDict(sorted(_branches, key=get_name, reverse=False)) |
|
141 | return OrderedDict(sorted(_branches, key=get_name, reverse=False)) | |
142 |
|
142 | |||
143 | @CachedProperty |
|
143 | @CachedProperty | |
144 | def tags(self): |
|
144 | def tags(self): | |
145 | """ |
|
145 | """ | |
146 | Gets tags for this repository |
|
146 | Gets tags for this repository | |
147 | """ |
|
147 | """ | |
148 | return self._get_tags() |
|
148 | return self._get_tags() | |
149 |
|
149 | |||
150 | def _get_tags(self): |
|
150 | def _get_tags(self): | |
151 | if self.is_empty(): |
|
151 | if self.is_empty(): | |
152 | return {} |
|
152 | return {} | |
153 |
|
153 | |||
154 | def get_name(ctx): |
|
154 | def get_name(ctx): | |
155 | return ctx[0] |
|
155 | return ctx[0] | |
156 |
|
156 | |||
157 | _tags = [(safe_unicode(n), hexlify(h),) for n, h in |
|
157 | _tags = [(safe_unicode(n), hexlify(h),) for n, h in | |
158 | self._remote.tags().items()] |
|
158 | self._remote.tags().items()] | |
159 |
|
159 | |||
160 | return OrderedDict(sorted(_tags, key=get_name, reverse=True)) |
|
160 | return OrderedDict(sorted(_tags, key=get_name, reverse=True)) | |
161 |
|
161 | |||
162 | def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs): |
|
162 | def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs): | |
163 | """ |
|
163 | """ | |
164 | Creates and returns a tag for the given ``commit_id``. |
|
164 | Creates and returns a tag for the given ``commit_id``. | |
165 |
|
165 | |||
166 | :param name: name for new tag |
|
166 | :param name: name for new tag | |
167 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" |
|
167 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" | |
168 | :param commit_id: commit id for which new tag would be created |
|
168 | :param commit_id: commit id for which new tag would be created | |
169 | :param message: message of the tag's commit |
|
169 | :param message: message of the tag's commit | |
170 | :param date: date of tag's commit |
|
170 | :param date: date of tag's commit | |
171 |
|
171 | |||
172 | :raises TagAlreadyExistError: if tag with same name already exists |
|
172 | :raises TagAlreadyExistError: if tag with same name already exists | |
173 | """ |
|
173 | """ | |
174 | if name in self.tags: |
|
174 | if name in self.tags: | |
175 | raise TagAlreadyExistError("Tag %s already exists" % name) |
|
175 | raise TagAlreadyExistError("Tag %s already exists" % name) | |
176 |
|
176 | |||
177 | commit = self.get_commit(commit_id=commit_id) |
|
177 | commit = self.get_commit(commit_id=commit_id) | |
178 | local = kwargs.setdefault('local', False) |
|
178 | local = kwargs.setdefault('local', False) | |
179 |
|
179 | |||
180 | if message is None: |
|
180 | if message is None: | |
181 | message = "Added tag %s for commit %s" % (name, commit.short_id) |
|
181 | message = "Added tag %s for commit %s" % (name, commit.short_id) | |
182 |
|
182 | |||
183 | date, tz = date_to_timestamp_plus_offset(date) |
|
183 | date, tz = date_to_timestamp_plus_offset(date) | |
184 |
|
184 | |||
185 | self._remote.tag(name, commit.raw_id, message, local, user, date, tz) |
|
185 | self._remote.tag(name, commit.raw_id, message, local, user, date, tz) | |
186 | self._remote.invalidate_vcs_cache() |
|
186 | self._remote.invalidate_vcs_cache() | |
187 |
|
187 | |||
188 | # Reinitialize tags |
|
188 | # Reinitialize tags | |
189 | self._invalidate_prop_cache('tags') |
|
189 | self._invalidate_prop_cache('tags') | |
190 | tag_id = self.tags[name] |
|
190 | tag_id = self.tags[name] | |
191 |
|
191 | |||
192 | return self.get_commit(commit_id=tag_id) |
|
192 | return self.get_commit(commit_id=tag_id) | |
193 |
|
193 | |||
194 | def remove_tag(self, name, user, message=None, date=None): |
|
194 | def remove_tag(self, name, user, message=None, date=None): | |
195 | """ |
|
195 | """ | |
196 | Removes tag with the given `name`. |
|
196 | Removes tag with the given `name`. | |
197 |
|
197 | |||
198 | :param name: name of the tag to be removed |
|
198 | :param name: name of the tag to be removed | |
199 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" |
|
199 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" | |
200 | :param message: message of the tag's removal commit |
|
200 | :param message: message of the tag's removal commit | |
201 | :param date: date of tag's removal commit |
|
201 | :param date: date of tag's removal commit | |
202 |
|
202 | |||
203 | :raises TagDoesNotExistError: if tag with given name does not exists |
|
203 | :raises TagDoesNotExistError: if tag with given name does not exists | |
204 | """ |
|
204 | """ | |
205 | if name not in self.tags: |
|
205 | if name not in self.tags: | |
206 | raise TagDoesNotExistError("Tag %s does not exist" % name) |
|
206 | raise TagDoesNotExistError("Tag %s does not exist" % name) | |
207 |
|
207 | |||
208 | if message is None: |
|
208 | if message is None: | |
209 | message = "Removed tag %s" % name |
|
209 | message = "Removed tag %s" % name | |
210 | local = False |
|
210 | local = False | |
211 |
|
211 | |||
212 | date, tz = date_to_timestamp_plus_offset(date) |
|
212 | date, tz = date_to_timestamp_plus_offset(date) | |
213 |
|
213 | |||
214 | self._remote.tag(name, nullid, message, local, user, date, tz) |
|
214 | self._remote.tag(name, nullid, message, local, user, date, tz) | |
215 | self._remote.invalidate_vcs_cache() |
|
215 | self._remote.invalidate_vcs_cache() | |
216 | self._invalidate_prop_cache('tags') |
|
216 | self._invalidate_prop_cache('tags') | |
217 |
|
217 | |||
218 | @LazyProperty |
|
218 | @LazyProperty | |
219 | def bookmarks(self): |
|
219 | def bookmarks(self): | |
220 | """ |
|
220 | """ | |
221 | Gets bookmarks for this repository |
|
221 | Gets bookmarks for this repository | |
222 | """ |
|
222 | """ | |
223 | return self._get_bookmarks() |
|
223 | return self._get_bookmarks() | |
224 |
|
224 | |||
225 | def _get_bookmarks(self): |
|
225 | def _get_bookmarks(self): | |
226 | if self.is_empty(): |
|
226 | if self.is_empty(): | |
227 | return {} |
|
227 | return {} | |
228 |
|
228 | |||
229 | def get_name(ctx): |
|
229 | def get_name(ctx): | |
230 | return ctx[0] |
|
230 | return ctx[0] | |
231 |
|
231 | |||
232 | _bookmarks = [ |
|
232 | _bookmarks = [ | |
233 | (safe_unicode(n), hexlify(h)) for n, h in |
|
233 | (safe_unicode(n), hexlify(h)) for n, h in | |
234 | self._remote.bookmarks().items()] |
|
234 | self._remote.bookmarks().items()] | |
235 |
|
235 | |||
236 | return OrderedDict(sorted(_bookmarks, key=get_name)) |
|
236 | return OrderedDict(sorted(_bookmarks, key=get_name)) | |
237 |
|
237 | |||
238 | def _get_all_commit_ids(self): |
|
238 | def _get_all_commit_ids(self): | |
239 | return self._remote.get_all_commit_ids('visible') |
|
239 | return self._remote.get_all_commit_ids('visible') | |
240 |
|
240 | |||
241 | def get_diff( |
|
241 | def get_diff( | |
242 | self, commit1, commit2, path='', ignore_whitespace=False, |
|
242 | self, commit1, commit2, path='', ignore_whitespace=False, | |
243 | context=3, path1=None): |
|
243 | context=3, path1=None): | |
244 | """ |
|
244 | """ | |
245 | Returns (git like) *diff*, as plain text. Shows changes introduced by |
|
245 | Returns (git like) *diff*, as plain text. Shows changes introduced by | |
246 | `commit2` since `commit1`. |
|
246 | `commit2` since `commit1`. | |
247 |
|
247 | |||
248 | :param commit1: Entry point from which diff is shown. Can be |
|
248 | :param commit1: Entry point from which diff is shown. Can be | |
249 | ``self.EMPTY_COMMIT`` - in this case, patch showing all |
|
249 | ``self.EMPTY_COMMIT`` - in this case, patch showing all | |
250 | the changes since empty state of the repository until `commit2` |
|
250 | the changes since empty state of the repository until `commit2` | |
251 | :param commit2: Until which commit changes should be shown. |
|
251 | :param commit2: Until which commit changes should be shown. | |
252 | :param ignore_whitespace: If set to ``True``, would not show whitespace |
|
252 | :param ignore_whitespace: If set to ``True``, would not show whitespace | |
253 | changes. Defaults to ``False``. |
|
253 | changes. Defaults to ``False``. | |
254 | :param context: How many lines before/after changed lines should be |
|
254 | :param context: How many lines before/after changed lines should be | |
255 | shown. Defaults to ``3``. |
|
255 | shown. Defaults to ``3``. | |
256 | """ |
|
256 | """ | |
257 | self._validate_diff_commits(commit1, commit2) |
|
257 | self._validate_diff_commits(commit1, commit2) | |
258 | if path1 is not None and path1 != path: |
|
258 | if path1 is not None and path1 != path: | |
259 | raise ValueError("Diff of two different paths not supported.") |
|
259 | raise ValueError("Diff of two different paths not supported.") | |
260 |
|
260 | |||
261 | if path: |
|
261 | if path: | |
262 | file_filter = [self.path, path] |
|
262 | file_filter = [self.path, path] | |
263 | else: |
|
263 | else: | |
264 | file_filter = None |
|
264 | file_filter = None | |
265 |
|
265 | |||
266 | diff = self._remote.diff( |
|
266 | diff = self._remote.diff( | |
267 | commit1.raw_id, commit2.raw_id, file_filter=file_filter, |
|
267 | commit1.raw_id, commit2.raw_id, file_filter=file_filter, | |
268 | opt_git=True, opt_ignorews=ignore_whitespace, |
|
268 | opt_git=True, opt_ignorews=ignore_whitespace, | |
269 | context=context) |
|
269 | context=context) | |
270 | return MercurialDiff(diff) |
|
270 | return MercurialDiff(diff) | |
271 |
|
271 | |||
272 | def strip(self, commit_id, branch=None): |
|
272 | def strip(self, commit_id, branch=None): | |
273 | self._remote.strip(commit_id, update=False, backup="none") |
|
273 | self._remote.strip(commit_id, update=False, backup="none") | |
274 |
|
274 | |||
275 | self._remote.invalidate_vcs_cache() |
|
275 | self._remote.invalidate_vcs_cache() | |
276 | # clear cache |
|
276 | # clear cache | |
277 | self._invalidate_prop_cache('commit_ids') |
|
277 | self._invalidate_prop_cache('commit_ids') | |
278 |
|
278 | |||
279 | return len(self.commit_ids) |
|
279 | return len(self.commit_ids) | |
280 |
|
280 | |||
281 | def verify(self): |
|
281 | def verify(self): | |
282 | verify = self._remote.verify() |
|
282 | verify = self._remote.verify() | |
283 |
|
283 | |||
284 | self._remote.invalidate_vcs_cache() |
|
284 | self._remote.invalidate_vcs_cache() | |
285 | return verify |
|
285 | return verify | |
286 |
|
286 | |||
287 | def hg_update_cache(self): |
|
287 | def hg_update_cache(self): | |
288 | update_cache = self._remote.hg_update_cache() |
|
288 | update_cache = self._remote.hg_update_cache() | |
289 |
|
289 | |||
290 | self._remote.invalidate_vcs_cache() |
|
290 | self._remote.invalidate_vcs_cache() | |
291 | return update_cache |
|
291 | return update_cache | |
292 |
|
292 | |||
293 | def hg_rebuild_fn_cache(self): |
|
293 | def hg_rebuild_fn_cache(self): | |
294 | update_cache = self._remote.hg_rebuild_fn_cache() |
|
294 | update_cache = self._remote.hg_rebuild_fn_cache() | |
295 |
|
295 | |||
296 | self._remote.invalidate_vcs_cache() |
|
296 | self._remote.invalidate_vcs_cache() | |
297 | return update_cache |
|
297 | return update_cache | |
298 |
|
298 | |||
299 | def get_common_ancestor(self, commit_id1, commit_id2, repo2): |
|
299 | def get_common_ancestor(self, commit_id1, commit_id2, repo2): | |
300 | log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s', |
|
300 | log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s', | |
301 | self, commit_id1, repo2, commit_id2) |
|
301 | self, commit_id1, repo2, commit_id2) | |
302 |
|
302 | |||
303 | if commit_id1 == commit_id2: |
|
303 | if commit_id1 == commit_id2: | |
304 | return commit_id1 |
|
304 | return commit_id1 | |
305 |
|
305 | |||
306 | ancestors = self._remote.revs_from_revspec( |
|
306 | ancestors = self._remote.revs_from_revspec( | |
307 | "ancestor(id(%s), id(%s))", commit_id1, commit_id2, |
|
307 | "ancestor(id(%s), id(%s))", commit_id1, commit_id2, | |
308 | other_path=repo2.path) |
|
308 | other_path=repo2.path) | |
309 |
|
309 | |||
310 | ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None |
|
310 | ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None | |
311 |
|
311 | |||
312 | log.debug('Found common ancestor with sha: %s', ancestor_id) |
|
312 | log.debug('Found common ancestor with sha: %s', ancestor_id) | |
313 | return ancestor_id |
|
313 | return ancestor_id | |
314 |
|
314 | |||
315 | def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None): |
|
315 | def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None): | |
316 | if commit_id1 == commit_id2: |
|
316 | if commit_id1 == commit_id2: | |
317 | commits = [] |
|
317 | commits = [] | |
318 | else: |
|
318 | else: | |
319 | if merge: |
|
319 | if merge: | |
320 | indexes = self._remote.revs_from_revspec( |
|
320 | indexes = self._remote.revs_from_revspec( | |
321 | "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)", |
|
321 | "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)", | |
322 | commit_id2, commit_id1, commit_id1, other_path=repo2.path) |
|
322 | commit_id2, commit_id1, commit_id1, other_path=repo2.path) | |
323 | else: |
|
323 | else: | |
324 | indexes = self._remote.revs_from_revspec( |
|
324 | indexes = self._remote.revs_from_revspec( | |
325 | "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2, |
|
325 | "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2, | |
326 | commit_id1, other_path=repo2.path) |
|
326 | commit_id1, other_path=repo2.path) | |
327 |
|
327 | |||
328 | commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load) |
|
328 | commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load) | |
329 | for idx in indexes] |
|
329 | for idx in indexes] | |
330 |
|
330 | |||
331 | return commits |
|
331 | return commits | |
332 |
|
332 | |||
333 | @staticmethod |
|
333 | @staticmethod | |
334 | def check_url(url, config): |
|
334 | def check_url(url, config): | |
335 | """ |
|
335 | """ | |
336 | Function will check given url and try to verify if it's a valid |
|
336 | Function will check given url and try to verify if it's a valid | |
337 | link. Sometimes it may happened that mercurial will issue basic |
|
337 | link. Sometimes it may happened that mercurial will issue basic | |
338 | auth request that can cause whole API to hang when used from python |
|
338 | auth request that can cause whole API to hang when used from python | |
339 | or other external calls. |
|
339 | or other external calls. | |
340 |
|
340 | |||
341 | On failures it'll raise urllib2.HTTPError, exception is also thrown |
|
341 | On failures it'll raise urllib2.HTTPError, exception is also thrown | |
342 | when the return code is non 200 |
|
342 | when the return code is non 200 | |
343 | """ |
|
343 | """ | |
344 | # check first if it's not an local url |
|
344 | # check first if it's not an local url | |
345 | if os.path.isdir(url) or url.startswith('file:'): |
|
345 | if os.path.isdir(url) or url.startswith('file:'): | |
346 | return True |
|
346 | return True | |
347 |
|
347 | |||
348 | # Request the _remote to verify the url |
|
348 | # Request the _remote to verify the url | |
349 | return connection.Hg.check_url(url, config.serialize()) |
|
349 | return connection.Hg.check_url(url, config.serialize()) | |
350 |
|
350 | |||
351 | @staticmethod |
|
351 | @staticmethod | |
352 | def is_valid_repository(path): |
|
352 | def is_valid_repository(path): | |
353 | return os.path.isdir(os.path.join(path, '.hg')) |
|
353 | return os.path.isdir(os.path.join(path, '.hg')) | |
354 |
|
354 | |||
355 | def _init_repo(self, create, src_url=None, do_workspace_checkout=False): |
|
355 | def _init_repo(self, create, src_url=None, do_workspace_checkout=False): | |
356 | """ |
|
356 | """ | |
357 | Function will check for mercurial repository in given path. If there |
|
357 | Function will check for mercurial repository in given path. If there | |
358 | is no repository in that path it will raise an exception unless |
|
358 | is no repository in that path it will raise an exception unless | |
359 | `create` parameter is set to True - in that case repository would |
|
359 | `create` parameter is set to True - in that case repository would | |
360 | be created. |
|
360 | be created. | |
361 |
|
361 | |||
362 | If `src_url` is given, would try to clone repository from the |
|
362 | If `src_url` is given, would try to clone repository from the | |
363 | location at given clone_point. Additionally it'll make update to |
|
363 | location at given clone_point. Additionally it'll make update to | |
364 | working copy accordingly to `do_workspace_checkout` flag. |
|
364 | working copy accordingly to `do_workspace_checkout` flag. | |
365 | """ |
|
365 | """ | |
366 | if create and os.path.exists(self.path): |
|
366 | if create and os.path.exists(self.path): | |
367 | raise RepositoryError( |
|
367 | raise RepositoryError( | |
368 | "Cannot create repository at %s, location already exist" |
|
368 | "Cannot create repository at %s, location already exist" | |
369 | % self.path) |
|
369 | % self.path) | |
370 |
|
370 | |||
371 | if src_url: |
|
371 | if src_url: | |
372 | url = str(self._get_url(src_url)) |
|
372 | url = str(self._get_url(src_url)) | |
373 | MercurialRepository.check_url(url, self.config) |
|
373 | MercurialRepository.check_url(url, self.config) | |
374 |
|
374 | |||
375 | self._remote.clone(url, self.path, do_workspace_checkout) |
|
375 | self._remote.clone(url, self.path, do_workspace_checkout) | |
376 |
|
376 | |||
377 | # Don't try to create if we've already cloned repo |
|
377 | # Don't try to create if we've already cloned repo | |
378 | create = False |
|
378 | create = False | |
379 |
|
379 | |||
380 | if create: |
|
380 | if create: | |
381 | os.makedirs(self.path, mode=0o755) |
|
381 | os.makedirs(self.path, mode=0o755) | |
382 | self._remote.localrepository(create) |
|
382 | self._remote.localrepository(create) | |
383 |
|
383 | |||
384 | @LazyProperty |
|
384 | @LazyProperty | |
385 | def in_memory_commit(self): |
|
385 | def in_memory_commit(self): | |
386 | return MercurialInMemoryCommit(self) |
|
386 | return MercurialInMemoryCommit(self) | |
387 |
|
387 | |||
388 | @LazyProperty |
|
388 | @LazyProperty | |
389 | def description(self): |
|
389 | def description(self): | |
390 | description = self._remote.get_config_value( |
|
390 | description = self._remote.get_config_value( | |
391 | 'web', 'description', untrusted=True) |
|
391 | 'web', 'description', untrusted=True) | |
392 | return safe_unicode(description or self.DEFAULT_DESCRIPTION) |
|
392 | return safe_unicode(description or self.DEFAULT_DESCRIPTION) | |
393 |
|
393 | |||
394 | @LazyProperty |
|
394 | @LazyProperty | |
395 | def contact(self): |
|
395 | def contact(self): | |
396 | contact = ( |
|
396 | contact = ( | |
397 | self._remote.get_config_value("web", "contact") or |
|
397 | self._remote.get_config_value("web", "contact") or | |
398 | self._remote.get_config_value("ui", "username")) |
|
398 | self._remote.get_config_value("ui", "username")) | |
399 | return safe_unicode(contact or self.DEFAULT_CONTACT) |
|
399 | return safe_unicode(contact or self.DEFAULT_CONTACT) | |
400 |
|
400 | |||
401 | @LazyProperty |
|
401 | @LazyProperty | |
402 | def last_change(self): |
|
402 | def last_change(self): | |
403 | """ |
|
403 | """ | |
404 | Returns last change made on this repository as |
|
404 | Returns last change made on this repository as | |
405 | `datetime.datetime` object. |
|
405 | `datetime.datetime` object. | |
406 | """ |
|
406 | """ | |
407 | try: |
|
407 | try: | |
408 | return self.get_commit().date |
|
408 | return self.get_commit().date | |
409 | except RepositoryError: |
|
409 | except RepositoryError: | |
410 | tzoffset = makedate()[1] |
|
410 | tzoffset = makedate()[1] | |
411 | return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset) |
|
411 | return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset) | |
412 |
|
412 | |||
413 | def _get_fs_mtime(self): |
|
413 | def _get_fs_mtime(self): | |
414 | # fallback to filesystem |
|
414 | # fallback to filesystem | |
415 | cl_path = os.path.join(self.path, '.hg', "00changelog.i") |
|
415 | cl_path = os.path.join(self.path, '.hg', "00changelog.i") | |
416 | st_path = os.path.join(self.path, '.hg', "store") |
|
416 | st_path = os.path.join(self.path, '.hg', "store") | |
417 | if os.path.exists(cl_path): |
|
417 | if os.path.exists(cl_path): | |
418 | return os.stat(cl_path).st_mtime |
|
418 | return os.stat(cl_path).st_mtime | |
419 | else: |
|
419 | else: | |
420 | return os.stat(st_path).st_mtime |
|
420 | return os.stat(st_path).st_mtime | |
421 |
|
421 | |||
422 | def _get_url(self, url): |
|
422 | def _get_url(self, url): | |
423 | """ |
|
423 | """ | |
424 | Returns normalized url. If schema is not given, would fall |
|
424 | Returns normalized url. If schema is not given, would fall | |
425 | to filesystem |
|
425 | to filesystem | |
426 | (``file:///``) schema. |
|
426 | (``file:///``) schema. | |
427 | """ |
|
427 | """ | |
428 | url = url.encode('utf8') |
|
428 | url = url.encode('utf8') | |
429 | if url != 'default' and '://' not in url: |
|
429 | if url != 'default' and '://' not in url: | |
430 | url = "file:" + urllib.request.pathname2url(url) |
|
430 | url = "file:" + urllib.request.pathname2url(url) | |
431 | return url |
|
431 | return url | |
432 |
|
432 | |||
433 | def get_hook_location(self): |
|
433 | def get_hook_location(self): | |
434 | """ |
|
434 | """ | |
435 | returns absolute path to location where hooks are stored |
|
435 | returns absolute path to location where hooks are stored | |
436 | """ |
|
436 | """ | |
437 | return os.path.join(self.path, '.hg', '.hgrc') |
|
437 | return os.path.join(self.path, '.hg', '.hgrc') | |
438 |
|
438 | |||
439 | def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, |
|
439 | def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, | |
440 | translate_tag=None, maybe_unreachable=False, reference_obj=None): |
|
440 | translate_tag=None, maybe_unreachable=False, reference_obj=None): | |
441 | """ |
|
441 | """ | |
442 | Returns ``MercurialCommit`` object representing repository's |
|
442 | Returns ``MercurialCommit`` object representing repository's | |
443 | commit at the given `commit_id` or `commit_idx`. |
|
443 | commit at the given `commit_id` or `commit_idx`. | |
444 | """ |
|
444 | """ | |
445 | if self.is_empty(): |
|
445 | if self.is_empty(): | |
446 | raise EmptyRepositoryError("There are no commits yet") |
|
446 | raise EmptyRepositoryError("There are no commits yet") | |
447 |
|
447 | |||
448 | if commit_id is not None: |
|
448 | if commit_id is not None: | |
449 | self._validate_commit_id(commit_id) |
|
449 | self._validate_commit_id(commit_id) | |
450 | try: |
|
450 | try: | |
451 | # we have cached idx, use it without contacting the remote |
|
451 | # we have cached idx, use it without contacting the remote | |
452 | idx = self._commit_ids[commit_id] |
|
452 | idx = self._commit_ids[commit_id] | |
453 | return MercurialCommit(self, commit_id, idx, pre_load=pre_load) |
|
453 | return MercurialCommit(self, commit_id, idx, pre_load=pre_load) | |
454 | except KeyError: |
|
454 | except KeyError: | |
455 | pass |
|
455 | pass | |
456 |
|
456 | |||
457 | elif commit_idx is not None: |
|
457 | elif commit_idx is not None: | |
458 | self._validate_commit_idx(commit_idx) |
|
458 | self._validate_commit_idx(commit_idx) | |
459 | try: |
|
459 | try: | |
460 | _commit_id = self.commit_ids[commit_idx] |
|
460 | _commit_id = self.commit_ids[commit_idx] | |
461 | if commit_idx < 0: |
|
461 | if commit_idx < 0: | |
462 | commit_idx = self.commit_ids.index(_commit_id) |
|
462 | commit_idx = self.commit_ids.index(_commit_id) | |
463 |
|
463 | |||
464 | return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load) |
|
464 | return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load) | |
465 | except IndexError: |
|
465 | except IndexError: | |
466 | commit_id = commit_idx |
|
466 | commit_id = commit_idx | |
467 | else: |
|
467 | else: | |
468 | commit_id = "tip" |
|
468 | commit_id = "tip" | |
469 |
|
469 | |||
470 | #TODO: decide if we pass bytes or str into lookup ? |
|
470 | #TODO: decide if we pass bytes or str into lookup ? | |
471 | # if isinstance(commit_id, unicode): |
|
471 | # if isinstance(commit_id, unicode): | |
472 | # commit_id = safe_str(commit_id) |
|
472 | # commit_id = safe_str(commit_id) | |
473 |
|
473 | |||
474 | try: |
|
474 | try: | |
475 | raw_id, idx = self._remote.lookup(commit_id, both=True) |
|
475 | raw_id, idx = self._remote.lookup(commit_id, both=True) | |
476 | except CommitDoesNotExistError: |
|
476 | except CommitDoesNotExistError: | |
477 | msg = "Commit {} does not exist for `{}`".format( |
|
477 | msg = "Commit {} does not exist for `{}`".format( | |
478 | *map(safe_str, [commit_id, self.name])) |
|
478 | *map(safe_str, [commit_id, self.name])) | |
479 | raise CommitDoesNotExistError(msg) |
|
479 | raise CommitDoesNotExistError(msg) | |
480 |
|
480 | |||
481 | return MercurialCommit(self, raw_id, idx, pre_load=pre_load) |
|
481 | return MercurialCommit(self, raw_id, idx, pre_load=pre_load) | |
482 |
|
482 | |||
483 | def get_commits( |
|
483 | def get_commits( | |
484 | self, start_id=None, end_id=None, start_date=None, end_date=None, |
|
484 | self, start_id=None, end_id=None, start_date=None, end_date=None, | |
485 | branch_name=None, show_hidden=False, pre_load=None, translate_tags=None): |
|
485 | branch_name=None, show_hidden=False, pre_load=None, translate_tags=None): | |
486 | """ |
|
486 | """ | |
487 | Returns generator of ``MercurialCommit`` objects from start to end |
|
487 | Returns generator of ``MercurialCommit`` objects from start to end | |
488 | (both are inclusive) |
|
488 | (both are inclusive) | |
489 |
|
489 | |||
490 | :param start_id: None, str(commit_id) |
|
490 | :param start_id: None, str(commit_id) | |
491 | :param end_id: None, str(commit_id) |
|
491 | :param end_id: None, str(commit_id) | |
492 | :param start_date: if specified, commits with commit date less than |
|
492 | :param start_date: if specified, commits with commit date less than | |
493 | ``start_date`` would be filtered out from returned set |
|
493 | ``start_date`` would be filtered out from returned set | |
494 | :param end_date: if specified, commits with commit date greater than |
|
494 | :param end_date: if specified, commits with commit date greater than | |
495 | ``end_date`` would be filtered out from returned set |
|
495 | ``end_date`` would be filtered out from returned set | |
496 | :param branch_name: if specified, commits not reachable from given |
|
496 | :param branch_name: if specified, commits not reachable from given | |
497 | branch would be filtered out from returned set |
|
497 | branch would be filtered out from returned set | |
498 | :param show_hidden: Show hidden commits such as obsolete or hidden from |
|
498 | :param show_hidden: Show hidden commits such as obsolete or hidden from | |
499 | Mercurial evolve |
|
499 | Mercurial evolve | |
500 | :raise BranchDoesNotExistError: If given ``branch_name`` does not |
|
500 | :raise BranchDoesNotExistError: If given ``branch_name`` does not | |
501 | exist. |
|
501 | exist. | |
502 | :raise CommitDoesNotExistError: If commit for given ``start`` or |
|
502 | :raise CommitDoesNotExistError: If commit for given ``start`` or | |
503 | ``end`` could not be found. |
|
503 | ``end`` could not be found. | |
504 | """ |
|
504 | """ | |
505 | # actually we should check now if it's not an empty repo |
|
505 | # actually we should check now if it's not an empty repo | |
506 | if self.is_empty(): |
|
506 | if self.is_empty(): | |
507 | raise EmptyRepositoryError("There are no commits yet") |
|
507 | raise EmptyRepositoryError("There are no commits yet") | |
508 | self._validate_branch_name(branch_name) |
|
508 | self._validate_branch_name(branch_name) | |
509 |
|
509 | |||
510 | branch_ancestors = False |
|
510 | branch_ancestors = False | |
511 | if start_id is not None: |
|
511 | if start_id is not None: | |
512 | self._validate_commit_id(start_id) |
|
512 | self._validate_commit_id(start_id) | |
513 | c_start = self.get_commit(commit_id=start_id) |
|
513 | c_start = self.get_commit(commit_id=start_id) | |
514 | start_pos = self._commit_ids[c_start.raw_id] |
|
514 | start_pos = self._commit_ids[c_start.raw_id] | |
515 | else: |
|
515 | else: | |
516 | start_pos = None |
|
516 | start_pos = None | |
517 |
|
517 | |||
518 | if end_id is not None: |
|
518 | if end_id is not None: | |
519 | self._validate_commit_id(end_id) |
|
519 | self._validate_commit_id(end_id) | |
520 | c_end = self.get_commit(commit_id=end_id) |
|
520 | c_end = self.get_commit(commit_id=end_id) | |
521 | end_pos = max(0, self._commit_ids[c_end.raw_id]) |
|
521 | end_pos = max(0, self._commit_ids[c_end.raw_id]) | |
522 | else: |
|
522 | else: | |
523 | end_pos = None |
|
523 | end_pos = None | |
524 |
|
524 | |||
525 | if None not in [start_id, end_id] and start_pos > end_pos: |
|
525 | if None not in [start_id, end_id] and start_pos > end_pos: | |
526 | raise RepositoryError( |
|
526 | raise RepositoryError( | |
527 | "Start commit '%s' cannot be after end commit '%s'" % |
|
527 | "Start commit '%s' cannot be after end commit '%s'" % | |
528 | (start_id, end_id)) |
|
528 | (start_id, end_id)) | |
529 |
|
529 | |||
530 | if end_pos is not None: |
|
530 | if end_pos is not None: | |
531 | end_pos += 1 |
|
531 | end_pos += 1 | |
532 |
|
532 | |||
533 | commit_filter = [] |
|
533 | commit_filter = [] | |
534 |
|
534 | |||
535 | if branch_name and not branch_ancestors: |
|
535 | if branch_name and not branch_ancestors: | |
536 | commit_filter.append('branch("%s")' % (branch_name,)) |
|
536 | commit_filter.append('branch("%s")' % (branch_name,)) | |
537 | elif branch_name and branch_ancestors: |
|
537 | elif branch_name and branch_ancestors: | |
538 | commit_filter.append('ancestors(branch("%s"))' % (branch_name,)) |
|
538 | commit_filter.append('ancestors(branch("%s"))' % (branch_name,)) | |
539 |
|
539 | |||
540 | if start_date and not end_date: |
|
540 | if start_date and not end_date: | |
541 | commit_filter.append('date(">%s")' % (start_date,)) |
|
541 | commit_filter.append('date(">%s")' % (start_date,)) | |
542 | if end_date and not start_date: |
|
542 | if end_date and not start_date: | |
543 | commit_filter.append('date("<%s")' % (end_date,)) |
|
543 | commit_filter.append('date("<%s")' % (end_date,)) | |
544 | if start_date and end_date: |
|
544 | if start_date and end_date: | |
545 | commit_filter.append( |
|
545 | commit_filter.append( | |
546 | 'date(">%s") and date("<%s")' % (start_date, end_date)) |
|
546 | 'date(">%s") and date("<%s")' % (start_date, end_date)) | |
547 |
|
547 | |||
548 | if not show_hidden: |
|
548 | if not show_hidden: | |
549 | commit_filter.append('not obsolete()') |
|
549 | commit_filter.append('not obsolete()') | |
550 | commit_filter.append('not hidden()') |
|
550 | commit_filter.append('not hidden()') | |
551 |
|
551 | |||
552 | # TODO: johbo: Figure out a simpler way for this solution |
|
552 | # TODO: johbo: Figure out a simpler way for this solution | |
553 | collection_generator = CollectionGenerator |
|
553 | collection_generator = CollectionGenerator | |
554 | if commit_filter: |
|
554 | if commit_filter: | |
555 | commit_filter = ' and '.join(map(safe_str, commit_filter)) |
|
555 | commit_filter = ' and '.join(map(safe_str, commit_filter)) | |
556 | revisions = self._remote.rev_range([commit_filter]) |
|
556 | revisions = self._remote.rev_range([commit_filter]) | |
557 | collection_generator = MercurialIndexBasedCollectionGenerator |
|
557 | collection_generator = MercurialIndexBasedCollectionGenerator | |
558 | else: |
|
558 | else: | |
559 | revisions = self.commit_ids |
|
559 | revisions = self.commit_ids | |
560 |
|
560 | |||
561 | if start_pos or end_pos: |
|
561 | if start_pos or end_pos: | |
562 | revisions = revisions[start_pos:end_pos] |
|
562 | revisions = revisions[start_pos:end_pos] | |
563 |
|
563 | |||
564 | return collection_generator(self, revisions, pre_load=pre_load) |
|
564 | return collection_generator(self, revisions, pre_load=pre_load) | |
565 |
|
565 | |||
566 | def pull(self, url, commit_ids=None): |
|
566 | def pull(self, url, commit_ids=None): | |
567 | """ |
|
567 | """ | |
568 | Pull changes from external location. |
|
568 | Pull changes from external location. | |
569 |
|
569 | |||
570 | :param commit_ids: Optional. Can be set to a list of commit ids |
|
570 | :param commit_ids: Optional. Can be set to a list of commit ids | |
571 | which shall be pulled from the other repository. |
|
571 | which shall be pulled from the other repository. | |
572 | """ |
|
572 | """ | |
573 | url = self._get_url(url) |
|
573 | url = self._get_url(url) | |
574 | self._remote.pull(url, commit_ids=commit_ids) |
|
574 | self._remote.pull(url, commit_ids=commit_ids) | |
575 | self._remote.invalidate_vcs_cache() |
|
575 | self._remote.invalidate_vcs_cache() | |
576 |
|
576 | |||
577 | def fetch(self, url, commit_ids=None): |
|
577 | def fetch(self, url, commit_ids=None): | |
578 | """ |
|
578 | """ | |
579 | Backward compatibility with GIT fetch==pull |
|
579 | Backward compatibility with GIT fetch==pull | |
580 | """ |
|
580 | """ | |
581 | return self.pull(url, commit_ids=commit_ids) |
|
581 | return self.pull(url, commit_ids=commit_ids) | |
582 |
|
582 | |||
583 | def push(self, url): |
|
583 | def push(self, url): | |
584 | url = self._get_url(url) |
|
584 | url = self._get_url(url) | |
585 | self._remote.sync_push(url) |
|
585 | self._remote.sync_push(url) | |
586 |
|
586 | |||
587 | def _local_clone(self, clone_path): |
|
587 | def _local_clone(self, clone_path): | |
588 | """ |
|
588 | """ | |
589 | Create a local clone of the current repo. |
|
589 | Create a local clone of the current repo. | |
590 | """ |
|
590 | """ | |
591 | self._remote.clone(self.path, clone_path, update_after_clone=True, |
|
591 | self._remote.clone(self.path, clone_path, update_after_clone=True, | |
592 | hooks=False) |
|
592 | hooks=False) | |
593 |
|
593 | |||
594 | def _update(self, revision, clean=False): |
|
594 | def _update(self, revision, clean=False): | |
595 | """ |
|
595 | """ | |
596 | Update the working copy to the specified revision. |
|
596 | Update the working copy to the specified revision. | |
597 | """ |
|
597 | """ | |
598 | log.debug('Doing checkout to commit: `%s` for %s', revision, self) |
|
598 | log.debug('Doing checkout to commit: `%s` for %s', revision, self) | |
599 | self._remote.update(revision, clean=clean) |
|
599 | self._remote.update(revision, clean=clean) | |
600 |
|
600 | |||
601 | def _identify(self): |
|
601 | def _identify(self): | |
602 | """ |
|
602 | """ | |
603 | Return the current state of the working directory. |
|
603 | Return the current state of the working directory. | |
604 | """ |
|
604 | """ | |
605 | return self._remote.identify().strip().rstrip('+') |
|
605 | return self._remote.identify().strip().rstrip('+') | |
606 |
|
606 | |||
607 | def _heads(self, branch=None): |
|
607 | def _heads(self, branch=None): | |
608 | """ |
|
608 | """ | |
609 | Return the commit ids of the repository heads. |
|
609 | Return the commit ids of the repository heads. | |
610 | """ |
|
610 | """ | |
611 | return self._remote.heads(branch=branch).strip().split(' ') |
|
611 | return self._remote.heads(branch=branch).strip().split(' ') | |
612 |
|
612 | |||
613 | def _ancestor(self, revision1, revision2): |
|
613 | def _ancestor(self, revision1, revision2): | |
614 | """ |
|
614 | """ | |
615 | Return the common ancestor of the two revisions. |
|
615 | Return the common ancestor of the two revisions. | |
616 | """ |
|
616 | """ | |
617 | return self._remote.ancestor(revision1, revision2) |
|
617 | return self._remote.ancestor(revision1, revision2) | |
618 |
|
618 | |||
619 | def _local_push( |
|
619 | def _local_push( | |
620 | self, revision, repository_path, push_branches=False, |
|
620 | self, revision, repository_path, push_branches=False, | |
621 | enable_hooks=False): |
|
621 | enable_hooks=False): | |
622 | """ |
|
622 | """ | |
623 | Push the given revision to the specified repository. |
|
623 | Push the given revision to the specified repository. | |
624 |
|
624 | |||
625 | :param push_branches: allow to create branches in the target repo. |
|
625 | :param push_branches: allow to create branches in the target repo. | |
626 | """ |
|
626 | """ | |
627 | self._remote.push( |
|
627 | self._remote.push( | |
628 | [revision], repository_path, hooks=enable_hooks, |
|
628 | [revision], repository_path, hooks=enable_hooks, | |
629 | push_branches=push_branches) |
|
629 | push_branches=push_branches) | |
630 |
|
630 | |||
631 | def _local_merge(self, target_ref, merge_message, user_name, user_email, |
|
631 | def _local_merge(self, target_ref, merge_message, user_name, user_email, | |
632 | source_ref, use_rebase=False, close_commit_id=None, dry_run=False): |
|
632 | source_ref, use_rebase=False, close_commit_id=None, dry_run=False): | |
633 | """ |
|
633 | """ | |
634 | Merge the given source_revision into the checked out revision. |
|
634 | Merge the given source_revision into the checked out revision. | |
635 |
|
635 | |||
636 | Returns the commit id of the merge and a boolean indicating if the |
|
636 | Returns the commit id of the merge and a boolean indicating if the | |
637 | commit needs to be pushed. |
|
637 | commit needs to be pushed. | |
638 | """ |
|
638 | """ | |
639 | source_ref_commit_id = source_ref.commit_id |
|
639 | source_ref_commit_id = source_ref.commit_id | |
640 | target_ref_commit_id = target_ref.commit_id |
|
640 | target_ref_commit_id = target_ref.commit_id | |
641 |
|
641 | |||
642 | # update our workdir to target ref, for proper merge |
|
642 | # update our workdir to target ref, for proper merge | |
643 | self._update(target_ref_commit_id, clean=True) |
|
643 | self._update(target_ref_commit_id, clean=True) | |
644 |
|
644 | |||
645 | ancestor = self._ancestor(target_ref_commit_id, source_ref_commit_id) |
|
645 | ancestor = self._ancestor(target_ref_commit_id, source_ref_commit_id) | |
646 | is_the_same_branch = self._is_the_same_branch(target_ref, source_ref) |
|
646 | is_the_same_branch = self._is_the_same_branch(target_ref, source_ref) | |
647 |
|
647 | |||
648 | if close_commit_id: |
|
648 | if close_commit_id: | |
649 | # NOTE(marcink): if we get the close commit, this is our new source |
|
649 | # NOTE(marcink): if we get the close commit, this is our new source | |
650 | # which will include the close commit itself. |
|
650 | # which will include the close commit itself. | |
651 | source_ref_commit_id = close_commit_id |
|
651 | source_ref_commit_id = close_commit_id | |
652 |
|
652 | |||
653 | if ancestor == source_ref_commit_id: |
|
653 | if ancestor == source_ref_commit_id: | |
654 | # Nothing to do, the changes were already integrated |
|
654 | # Nothing to do, the changes were already integrated | |
655 | return target_ref_commit_id, False |
|
655 | return target_ref_commit_id, False | |
656 |
|
656 | |||
657 | elif ancestor == target_ref_commit_id and is_the_same_branch: |
|
657 | elif ancestor == target_ref_commit_id and is_the_same_branch: | |
658 | # In this case we should force a commit message |
|
658 | # In this case we should force a commit message | |
659 | return source_ref_commit_id, True |
|
659 | return source_ref_commit_id, True | |
660 |
|
660 | |||
661 | unresolved = None |
|
661 | unresolved = None | |
662 | if use_rebase: |
|
662 | if use_rebase: | |
663 | try: |
|
663 | try: | |
664 | bookmark_name = 'rcbook%s%s' % (source_ref_commit_id, target_ref_commit_id) |
|
664 | bookmark_name = 'rcbook%s%s' % (source_ref_commit_id, target_ref_commit_id) | |
665 | self.bookmark(bookmark_name, revision=source_ref.commit_id) |
|
665 | self.bookmark(bookmark_name, revision=source_ref.commit_id) | |
666 | self._remote.rebase( |
|
666 | self._remote.rebase( | |
667 | source=source_ref_commit_id, dest=target_ref_commit_id) |
|
667 | source=source_ref_commit_id, dest=target_ref_commit_id) | |
668 | self._remote.invalidate_vcs_cache() |
|
668 | self._remote.invalidate_vcs_cache() | |
669 | self._update(bookmark_name, clean=True) |
|
669 | self._update(bookmark_name, clean=True) | |
670 | return self._identify(), True |
|
670 | return self._identify(), True | |
671 | except RepositoryError as e: |
|
671 | except RepositoryError as e: | |
672 | # The rebase-abort may raise another exception which 'hides' |
|
672 | # The rebase-abort may raise another exception which 'hides' | |
673 | # the original one, therefore we log it here. |
|
673 | # the original one, therefore we log it here. | |
674 | log.exception('Error while rebasing shadow repo during merge.') |
|
674 | log.exception('Error while rebasing shadow repo during merge.') | |
675 | if 'unresolved conflicts' in safe_str(e): |
|
675 | if 'unresolved conflicts' in safe_str(e): | |
676 | unresolved = self._remote.get_unresolved_files() |
|
676 | unresolved = self._remote.get_unresolved_files() | |
677 | log.debug('unresolved files: %s', unresolved) |
|
677 | log.debug('unresolved files: %s', unresolved) | |
678 |
|
678 | |||
679 | # Cleanup any rebase leftovers |
|
679 | # Cleanup any rebase leftovers | |
680 | self._remote.invalidate_vcs_cache() |
|
680 | self._remote.invalidate_vcs_cache() | |
681 | self._remote.rebase(abort=True) |
|
681 | self._remote.rebase(abort=True) | |
682 | self._remote.invalidate_vcs_cache() |
|
682 | self._remote.invalidate_vcs_cache() | |
683 | self._remote.update(clean=True) |
|
683 | self._remote.update(clean=True) | |
684 | if unresolved: |
|
684 | if unresolved: | |
685 | raise UnresolvedFilesInRepo(unresolved) |
|
685 | raise UnresolvedFilesInRepo(unresolved) | |
686 | else: |
|
686 | else: | |
687 | raise |
|
687 | raise | |
688 | else: |
|
688 | else: | |
689 | try: |
|
689 | try: | |
690 | self._remote.merge(source_ref_commit_id) |
|
690 | self._remote.merge(source_ref_commit_id) | |
691 | self._remote.invalidate_vcs_cache() |
|
691 | self._remote.invalidate_vcs_cache() | |
692 | self._remote.commit( |
|
692 | self._remote.commit( | |
693 | message=safe_str(merge_message), |
|
693 | message=safe_str(merge_message), | |
694 | username=safe_str('%s <%s>' % (user_name, user_email))) |
|
694 | username=safe_str('%s <%s>' % (user_name, user_email))) | |
695 | self._remote.invalidate_vcs_cache() |
|
695 | self._remote.invalidate_vcs_cache() | |
696 | return self._identify(), True |
|
696 | return self._identify(), True | |
697 | except RepositoryError as e: |
|
697 | except RepositoryError as e: | |
698 | # The merge-abort may raise another exception which 'hides' |
|
698 | # The merge-abort may raise another exception which 'hides' | |
699 | # the original one, therefore we log it here. |
|
699 | # the original one, therefore we log it here. | |
700 | log.exception('Error while merging shadow repo during merge.') |
|
700 | log.exception('Error while merging shadow repo during merge.') | |
701 | if 'unresolved merge conflicts' in safe_str(e): |
|
701 | if 'unresolved merge conflicts' in safe_str(e): | |
702 | unresolved = self._remote.get_unresolved_files() |
|
702 | unresolved = self._remote.get_unresolved_files() | |
703 | log.debug('unresolved files: %s', unresolved) |
|
703 | log.debug('unresolved files: %s', unresolved) | |
704 |
|
704 | |||
705 | # Cleanup any merge leftovers |
|
705 | # Cleanup any merge leftovers | |
706 | self._remote.update(clean=True) |
|
706 | self._remote.update(clean=True) | |
707 | if unresolved: |
|
707 | if unresolved: | |
708 | raise UnresolvedFilesInRepo(unresolved) |
|
708 | raise UnresolvedFilesInRepo(unresolved) | |
709 | else: |
|
709 | else: | |
710 | raise |
|
710 | raise | |
711 |
|
711 | |||
712 | def _local_close(self, target_ref, user_name, user_email, |
|
712 | def _local_close(self, target_ref, user_name, user_email, | |
713 | source_ref, close_message=''): |
|
713 | source_ref, close_message=''): | |
714 | """ |
|
714 | """ | |
715 | Close the branch of the given source_revision |
|
715 | Close the branch of the given source_revision | |
716 |
|
716 | |||
717 | Returns the commit id of the close and a boolean indicating if the |
|
717 | Returns the commit id of the close and a boolean indicating if the | |
718 | commit needs to be pushed. |
|
718 | commit needs to be pushed. | |
719 | """ |
|
719 | """ | |
720 | self._update(source_ref.commit_id) |
|
720 | self._update(source_ref.commit_id) | |
721 | message = close_message or "Closing branch: `{}`".format(source_ref.name) |
|
721 | message = close_message or "Closing branch: `{}`".format(source_ref.name) | |
722 | try: |
|
722 | try: | |
723 | self._remote.commit( |
|
723 | self._remote.commit( | |
724 | message=safe_str(message), |
|
724 | message=safe_str(message), | |
725 | username=safe_str('%s <%s>' % (user_name, user_email)), |
|
725 | username=safe_str('%s <%s>' % (user_name, user_email)), | |
726 | close_branch=True) |
|
726 | close_branch=True) | |
727 | self._remote.invalidate_vcs_cache() |
|
727 | self._remote.invalidate_vcs_cache() | |
728 | return self._identify(), True |
|
728 | return self._identify(), True | |
729 | except RepositoryError: |
|
729 | except RepositoryError: | |
730 | # Cleanup any commit leftovers |
|
730 | # Cleanup any commit leftovers | |
731 | self._remote.update(clean=True) |
|
731 | self._remote.update(clean=True) | |
732 | raise |
|
732 | raise | |
733 |
|
733 | |||
734 | def _is_the_same_branch(self, target_ref, source_ref): |
|
734 | def _is_the_same_branch(self, target_ref, source_ref): | |
735 | return ( |
|
735 | return ( | |
736 | self._get_branch_name(target_ref) == |
|
736 | self._get_branch_name(target_ref) == | |
737 | self._get_branch_name(source_ref)) |
|
737 | self._get_branch_name(source_ref)) | |
738 |
|
738 | |||
739 | def _get_branch_name(self, ref): |
|
739 | def _get_branch_name(self, ref): | |
740 | if ref.type == 'branch': |
|
740 | if ref.type == 'branch': | |
741 | return ref.name |
|
741 | return ref.name | |
742 | return self._remote.ctx_branch(ref.commit_id) |
|
742 | return self._remote.ctx_branch(ref.commit_id) | |
743 |
|
743 | |||
744 | def _maybe_prepare_merge_workspace( |
|
744 | def _maybe_prepare_merge_workspace( | |
745 | self, repo_id, workspace_id, unused_target_ref, unused_source_ref): |
|
745 | self, repo_id, workspace_id, unused_target_ref, unused_source_ref): | |
746 | shadow_repository_path = self._get_shadow_repository_path( |
|
746 | shadow_repository_path = self._get_shadow_repository_path( | |
747 | self.path, repo_id, workspace_id) |
|
747 | self.path, repo_id, workspace_id) | |
748 | if not os.path.exists(shadow_repository_path): |
|
748 | if not os.path.exists(shadow_repository_path): | |
749 | self._local_clone(shadow_repository_path) |
|
749 | self._local_clone(shadow_repository_path) | |
750 | log.debug( |
|
750 | log.debug( | |
751 | 'Prepared shadow repository in %s', shadow_repository_path) |
|
751 | 'Prepared shadow repository in %s', shadow_repository_path) | |
752 |
|
752 | |||
753 | return shadow_repository_path |
|
753 | return shadow_repository_path | |
754 |
|
754 | |||
755 | def _merge_repo(self, repo_id, workspace_id, target_ref, |
|
755 | def _merge_repo(self, repo_id, workspace_id, target_ref, | |
756 | source_repo, source_ref, merge_message, |
|
756 | source_repo, source_ref, merge_message, | |
757 | merger_name, merger_email, dry_run=False, |
|
757 | merger_name, merger_email, dry_run=False, | |
758 | use_rebase=False, close_branch=False): |
|
758 | use_rebase=False, close_branch=False): | |
759 |
|
759 | |||
760 | log.debug('Executing merge_repo with %s strategy, dry_run mode:%s', |
|
760 | log.debug('Executing merge_repo with %s strategy, dry_run mode:%s', | |
761 | 'rebase' if use_rebase else 'merge', dry_run) |
|
761 | 'rebase' if use_rebase else 'merge', dry_run) | |
762 | if target_ref.commit_id not in self._heads(): |
|
762 | if target_ref.commit_id not in self._heads(): | |
763 | return MergeResponse( |
|
763 | return MergeResponse( | |
764 | False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD, |
|
764 | False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD, | |
765 | metadata={'target_ref': target_ref}) |
|
765 | metadata={'target_ref': target_ref}) | |
766 |
|
766 | |||
767 | try: |
|
767 | try: | |
768 | if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1: |
|
768 | if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1: | |
769 | heads_all = self._heads(target_ref.name) |
|
769 | heads_all = self._heads(target_ref.name) | |
770 | max_heads = 10 |
|
770 | max_heads = 10 | |
771 | if len(heads_all) > max_heads: |
|
771 | if len(heads_all) > max_heads: | |
772 | heads = '\n,'.join( |
|
772 | heads = '\n,'.join( | |
773 | heads_all[:max_heads] + |
|
773 | heads_all[:max_heads] + | |
774 | ['and {} more.'.format(len(heads_all)-max_heads)]) |
|
774 | ['and {} more.'.format(len(heads_all)-max_heads)]) | |
775 | else: |
|
775 | else: | |
776 | heads = '\n,'.join(heads_all) |
|
776 | heads = '\n,'.join(heads_all) | |
777 | metadata = { |
|
777 | metadata = { | |
778 | 'target_ref': target_ref, |
|
778 | 'target_ref': target_ref, | |
779 | 'source_ref': source_ref, |
|
779 | 'source_ref': source_ref, | |
780 | 'heads': heads |
|
780 | 'heads': heads | |
781 | } |
|
781 | } | |
782 | return MergeResponse( |
|
782 | return MergeResponse( | |
783 | False, False, None, |
|
783 | False, False, None, | |
784 | MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS, |
|
784 | MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS, | |
785 | metadata=metadata) |
|
785 | metadata=metadata) | |
786 | except CommitDoesNotExistError: |
|
786 | except CommitDoesNotExistError: | |
787 | log.exception('Failure when looking up branch heads on hg target') |
|
787 | log.exception('Failure when looking up branch heads on hg target') | |
788 | return MergeResponse( |
|
788 | return MergeResponse( | |
789 | False, False, None, MergeFailureReason.MISSING_TARGET_REF, |
|
789 | False, False, None, MergeFailureReason.MISSING_TARGET_REF, | |
790 | metadata={'target_ref': target_ref}) |
|
790 | metadata={'target_ref': target_ref}) | |
791 |
|
791 | |||
792 | shadow_repository_path = self._maybe_prepare_merge_workspace( |
|
792 | shadow_repository_path = self._maybe_prepare_merge_workspace( | |
793 | repo_id, workspace_id, target_ref, source_ref) |
|
793 | repo_id, workspace_id, target_ref, source_ref) | |
794 | shadow_repo = self.get_shadow_instance(shadow_repository_path) |
|
794 | shadow_repo = self.get_shadow_instance(shadow_repository_path) | |
795 |
|
795 | |||
796 | log.debug('Pulling in target reference %s', target_ref) |
|
796 | log.debug('Pulling in target reference %s', target_ref) | |
797 | self._validate_pull_reference(target_ref) |
|
797 | self._validate_pull_reference(target_ref) | |
798 | shadow_repo._local_pull(self.path, target_ref) |
|
798 | shadow_repo._local_pull(self.path, target_ref) | |
799 |
|
799 | |||
800 | try: |
|
800 | try: | |
801 | log.debug('Pulling in source reference %s', source_ref) |
|
801 | log.debug('Pulling in source reference %s', source_ref) | |
802 | source_repo._validate_pull_reference(source_ref) |
|
802 | source_repo._validate_pull_reference(source_ref) | |
803 | shadow_repo._local_pull(source_repo.path, source_ref) |
|
803 | shadow_repo._local_pull(source_repo.path, source_ref) | |
804 | except CommitDoesNotExistError: |
|
804 | except CommitDoesNotExistError: | |
805 | log.exception('Failure when doing local pull on hg shadow repo') |
|
805 | log.exception('Failure when doing local pull on hg shadow repo') | |
806 | return MergeResponse( |
|
806 | return MergeResponse( | |
807 | False, False, None, MergeFailureReason.MISSING_SOURCE_REF, |
|
807 | False, False, None, MergeFailureReason.MISSING_SOURCE_REF, | |
808 | metadata={'source_ref': source_ref}) |
|
808 | metadata={'source_ref': source_ref}) | |
809 |
|
809 | |||
810 | merge_ref = None |
|
810 | merge_ref = None | |
811 | merge_commit_id = None |
|
811 | merge_commit_id = None | |
812 | close_commit_id = None |
|
812 | close_commit_id = None | |
813 | merge_failure_reason = MergeFailureReason.NONE |
|
813 | merge_failure_reason = MergeFailureReason.NONE | |
814 | metadata = {} |
|
814 | metadata = {} | |
815 |
|
815 | |||
816 | # enforce that close branch should be used only in case we source from |
|
816 | # enforce that close branch should be used only in case we source from | |
817 | # an actual Branch |
|
817 | # an actual Branch | |
818 | close_branch = close_branch and source_ref.type == 'branch' |
|
818 | close_branch = close_branch and source_ref.type == 'branch' | |
819 |
|
819 | |||
820 | # don't allow to close branch if source and target are the same |
|
820 | # don't allow to close branch if source and target are the same | |
821 | close_branch = close_branch and source_ref.name != target_ref.name |
|
821 | close_branch = close_branch and source_ref.name != target_ref.name | |
822 |
|
822 | |||
823 | needs_push_on_close = False |
|
823 | needs_push_on_close = False | |
824 | if close_branch and not use_rebase and not dry_run: |
|
824 | if close_branch and not use_rebase and not dry_run: | |
825 | try: |
|
825 | try: | |
826 | close_commit_id, needs_push_on_close = shadow_repo._local_close( |
|
826 | close_commit_id, needs_push_on_close = shadow_repo._local_close( | |
827 | target_ref, merger_name, merger_email, source_ref) |
|
827 | target_ref, merger_name, merger_email, source_ref) | |
828 | merge_possible = True |
|
828 | merge_possible = True | |
829 | except RepositoryError: |
|
829 | except RepositoryError: | |
830 | log.exception('Failure when doing close branch on ' |
|
830 | log.exception('Failure when doing close branch on ' | |
831 | 'shadow repo: %s', shadow_repo) |
|
831 | 'shadow repo: %s', shadow_repo) | |
832 | merge_possible = False |
|
832 | merge_possible = False | |
833 | merge_failure_reason = MergeFailureReason.MERGE_FAILED |
|
833 | merge_failure_reason = MergeFailureReason.MERGE_FAILED | |
834 | else: |
|
834 | else: | |
835 | merge_possible = True |
|
835 | merge_possible = True | |
836 |
|
836 | |||
837 | needs_push = False |
|
837 | needs_push = False | |
838 | if merge_possible: |
|
838 | if merge_possible: | |
839 |
|
839 | |||
840 | try: |
|
840 | try: | |
841 | merge_commit_id, needs_push = shadow_repo._local_merge( |
|
841 | merge_commit_id, needs_push = shadow_repo._local_merge( | |
842 | target_ref, merge_message, merger_name, merger_email, |
|
842 | target_ref, merge_message, merger_name, merger_email, | |
843 | source_ref, use_rebase=use_rebase, |
|
843 | source_ref, use_rebase=use_rebase, | |
844 | close_commit_id=close_commit_id, dry_run=dry_run) |
|
844 | close_commit_id=close_commit_id, dry_run=dry_run) | |
845 | merge_possible = True |
|
845 | merge_possible = True | |
846 |
|
846 | |||
847 | # read the state of the close action, if it |
|
847 | # read the state of the close action, if it | |
848 | # maybe required a push |
|
848 | # maybe required a push | |
849 | needs_push = needs_push or needs_push_on_close |
|
849 | needs_push = needs_push or needs_push_on_close | |
850 |
|
850 | |||
851 | # Set a bookmark pointing to the merge commit. This bookmark |
|
851 | # Set a bookmark pointing to the merge commit. This bookmark | |
852 | # may be used to easily identify the last successful merge |
|
852 | # may be used to easily identify the last successful merge | |
853 | # commit in the shadow repository. |
|
853 | # commit in the shadow repository. | |
854 | shadow_repo.bookmark('pr-merge', revision=merge_commit_id) |
|
854 | shadow_repo.bookmark('pr-merge', revision=merge_commit_id) | |
855 | merge_ref = Reference('book', 'pr-merge', merge_commit_id) |
|
855 | merge_ref = Reference('book', 'pr-merge', merge_commit_id) | |
856 | except SubrepoMergeError: |
|
856 | except SubrepoMergeError: | |
857 | log.exception( |
|
857 | log.exception( | |
858 | 'Subrepo merge error during local merge on hg shadow repo.') |
|
858 | 'Subrepo merge error during local merge on hg shadow repo.') | |
859 | merge_possible = False |
|
859 | merge_possible = False | |
860 | merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED |
|
860 | merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED | |
861 | needs_push = False |
|
861 | needs_push = False | |
862 | except RepositoryError as e: |
|
862 | except RepositoryError as e: | |
863 | log.exception('Failure when doing local merge on hg shadow repo') |
|
863 | log.exception('Failure when doing local merge on hg shadow repo') | |
864 | if isinstance(e, UnresolvedFilesInRepo): |
|
864 | if isinstance(e, UnresolvedFilesInRepo): | |
865 | all_conflicts = list(e.args[0]) |
|
865 | all_conflicts = list(e.args[0]) | |
866 | max_conflicts = 20 |
|
866 | max_conflicts = 20 | |
867 | if len(all_conflicts) > max_conflicts: |
|
867 | if len(all_conflicts) > max_conflicts: | |
868 | conflicts = all_conflicts[:max_conflicts] \ |
|
868 | conflicts = all_conflicts[:max_conflicts] \ | |
869 | + ['and {} more.'.format(len(all_conflicts)-max_conflicts)] |
|
869 | + ['and {} more.'.format(len(all_conflicts)-max_conflicts)] | |
870 | else: |
|
870 | else: | |
871 | conflicts = all_conflicts |
|
871 | conflicts = all_conflicts | |
872 | metadata['unresolved_files'] = \ |
|
872 | metadata['unresolved_files'] = \ | |
873 | '\n* conflict: ' + \ |
|
873 | '\n* conflict: ' + \ | |
874 | ('\n * conflict: '.join(conflicts)) |
|
874 | ('\n * conflict: '.join(conflicts)) | |
875 |
|
875 | |||
876 | merge_possible = False |
|
876 | merge_possible = False | |
877 | merge_failure_reason = MergeFailureReason.MERGE_FAILED |
|
877 | merge_failure_reason = MergeFailureReason.MERGE_FAILED | |
878 | needs_push = False |
|
878 | needs_push = False | |
879 |
|
879 | |||
880 | if merge_possible and not dry_run: |
|
880 | if merge_possible and not dry_run: | |
881 | if needs_push: |
|
881 | if needs_push: | |
882 | # In case the target is a bookmark, update it, so after pushing |
|
882 | # In case the target is a bookmark, update it, so after pushing | |
883 | # the bookmarks is also updated in the target. |
|
883 | # the bookmarks is also updated in the target. | |
884 | if target_ref.type == 'book': |
|
884 | if target_ref.type == 'book': | |
885 | shadow_repo.bookmark( |
|
885 | shadow_repo.bookmark( | |
886 | target_ref.name, revision=merge_commit_id) |
|
886 | target_ref.name, revision=merge_commit_id) | |
887 | try: |
|
887 | try: | |
888 | shadow_repo_with_hooks = self.get_shadow_instance( |
|
888 | shadow_repo_with_hooks = self.get_shadow_instance( | |
889 | shadow_repository_path, |
|
889 | shadow_repository_path, | |
890 | enable_hooks=True) |
|
890 | enable_hooks=True) | |
891 | # This is the actual merge action, we push from shadow |
|
891 | # This is the actual merge action, we push from shadow | |
892 | # into origin. |
|
892 | # into origin. | |
893 | # Note: the push_branches option will push any new branch |
|
893 | # Note: the push_branches option will push any new branch | |
894 | # defined in the source repository to the target. This may |
|
894 | # defined in the source repository to the target. This may | |
895 | # be dangerous as branches are permanent in Mercurial. |
|
895 | # be dangerous as branches are permanent in Mercurial. | |
896 | # This feature was requested in issue #441. |
|
896 | # This feature was requested in issue #441. | |
897 | shadow_repo_with_hooks._local_push( |
|
897 | shadow_repo_with_hooks._local_push( | |
898 | merge_commit_id, self.path, push_branches=True, |
|
898 | merge_commit_id, self.path, push_branches=True, | |
899 | enable_hooks=True) |
|
899 | enable_hooks=True) | |
900 |
|
900 | |||
901 | # maybe we also need to push the close_commit_id |
|
901 | # maybe we also need to push the close_commit_id | |
902 | if close_commit_id: |
|
902 | if close_commit_id: | |
903 | shadow_repo_with_hooks._local_push( |
|
903 | shadow_repo_with_hooks._local_push( | |
904 | close_commit_id, self.path, push_branches=True, |
|
904 | close_commit_id, self.path, push_branches=True, | |
905 | enable_hooks=True) |
|
905 | enable_hooks=True) | |
906 | merge_succeeded = True |
|
906 | merge_succeeded = True | |
907 | except RepositoryError: |
|
907 | except RepositoryError: | |
908 | log.exception( |
|
908 | log.exception( | |
909 | 'Failure when doing local push from the shadow ' |
|
909 | 'Failure when doing local push from the shadow ' | |
910 | 'repository to the target repository at %s.', self.path) |
|
910 | 'repository to the target repository at %s.', self.path) | |
911 | merge_succeeded = False |
|
911 | merge_succeeded = False | |
912 | merge_failure_reason = MergeFailureReason.PUSH_FAILED |
|
912 | merge_failure_reason = MergeFailureReason.PUSH_FAILED | |
913 | metadata['target'] = 'hg shadow repo' |
|
913 | metadata['target'] = 'hg shadow repo' | |
914 | metadata['merge_commit'] = merge_commit_id |
|
914 | metadata['merge_commit'] = merge_commit_id | |
915 | else: |
|
915 | else: | |
916 | merge_succeeded = True |
|
916 | merge_succeeded = True | |
917 | else: |
|
917 | else: | |
918 | merge_succeeded = False |
|
918 | merge_succeeded = False | |
919 |
|
919 | |||
920 | return MergeResponse( |
|
920 | return MergeResponse( | |
921 | merge_possible, merge_succeeded, merge_ref, merge_failure_reason, |
|
921 | merge_possible, merge_succeeded, merge_ref, merge_failure_reason, | |
922 | metadata=metadata) |
|
922 | metadata=metadata) | |
923 |
|
923 | |||
924 | def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False): |
|
924 | def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False): | |
925 | config = self.config.copy() |
|
925 | config = self.config.copy() | |
926 | if not enable_hooks: |
|
926 | if not enable_hooks: | |
927 | config.clear_section('hooks') |
|
927 | config.clear_section('hooks') | |
928 | return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache}) |
|
928 | return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache}) | |
929 |
|
929 | |||
930 | def _validate_pull_reference(self, reference): |
|
930 | def _validate_pull_reference(self, reference): | |
931 | if not (reference.name in self.bookmarks or |
|
931 | if not (reference.name in self.bookmarks or | |
932 | reference.name in self.branches or |
|
932 | reference.name in self.branches or | |
933 | self.get_commit(reference.commit_id)): |
|
933 | self.get_commit(reference.commit_id)): | |
934 | raise CommitDoesNotExistError( |
|
934 | raise CommitDoesNotExistError( | |
935 | 'Unknown branch, bookmark or commit id') |
|
935 | 'Unknown branch, bookmark or commit id') | |
936 |
|
936 | |||
937 | def _local_pull(self, repository_path, reference): |
|
937 | def _local_pull(self, repository_path, reference): | |
938 | """ |
|
938 | """ | |
939 | Fetch a branch, bookmark or commit from a local repository. |
|
939 | Fetch a branch, bookmark or commit from a local repository. | |
940 | """ |
|
940 | """ | |
941 | repository_path = os.path.abspath(repository_path) |
|
941 | repository_path = os.path.abspath(repository_path) | |
942 | if repository_path == self.path: |
|
942 | if repository_path == self.path: | |
943 | raise ValueError('Cannot pull from the same repository') |
|
943 | raise ValueError('Cannot pull from the same repository') | |
944 |
|
944 | |||
945 | reference_type_to_option_name = { |
|
945 | reference_type_to_option_name = { | |
946 | 'book': 'bookmark', |
|
946 | 'book': 'bookmark', | |
947 | 'branch': 'branch', |
|
947 | 'branch': 'branch', | |
948 | } |
|
948 | } | |
949 | option_name = reference_type_to_option_name.get( |
|
949 | option_name = reference_type_to_option_name.get( | |
950 | reference.type, 'revision') |
|
950 | reference.type, 'revision') | |
951 |
|
951 | |||
952 | if option_name == 'revision': |
|
952 | if option_name == 'revision': | |
953 | ref = reference.commit_id |
|
953 | ref = reference.commit_id | |
954 | else: |
|
954 | else: | |
955 | ref = reference.name |
|
955 | ref = reference.name | |
956 |
|
956 | |||
957 | options = {option_name: [ref]} |
|
957 | options = {option_name: [ref]} | |
958 | self._remote.pull_cmd(repository_path, hooks=False, **options) |
|
958 | self._remote.pull_cmd(repository_path, hooks=False, **options) | |
959 | self._remote.invalidate_vcs_cache() |
|
959 | self._remote.invalidate_vcs_cache() | |
960 |
|
960 | |||
961 | def bookmark(self, bookmark, revision=None): |
|
961 | def bookmark(self, bookmark, revision=None): | |
962 |
if isinstance(bookmark, |
|
962 | if isinstance(bookmark, str): | |
963 | bookmark = safe_str(bookmark) |
|
963 | bookmark = safe_str(bookmark) | |
964 | self._remote.bookmark(bookmark, revision=revision) |
|
964 | self._remote.bookmark(bookmark, revision=revision) | |
965 | self._remote.invalidate_vcs_cache() |
|
965 | self._remote.invalidate_vcs_cache() | |
966 |
|
966 | |||
967 | def get_path_permissions(self, username): |
|
967 | def get_path_permissions(self, username): | |
968 | hgacl_file = os.path.join(self.path, '.hg/hgacl') |
|
968 | hgacl_file = os.path.join(self.path, '.hg/hgacl') | |
969 |
|
969 | |||
970 | def read_patterns(suffix): |
|
970 | def read_patterns(suffix): | |
971 | svalue = None |
|
971 | svalue = None | |
972 | for section, option in [ |
|
972 | for section, option in [ | |
973 | ('narrowacl', username + suffix), |
|
973 | ('narrowacl', username + suffix), | |
974 | ('narrowacl', 'default' + suffix), |
|
974 | ('narrowacl', 'default' + suffix), | |
975 | ('narrowhgacl', username + suffix), |
|
975 | ('narrowhgacl', username + suffix), | |
976 | ('narrowhgacl', 'default' + suffix) |
|
976 | ('narrowhgacl', 'default' + suffix) | |
977 | ]: |
|
977 | ]: | |
978 | try: |
|
978 | try: | |
979 | svalue = hgacl.get(section, option) |
|
979 | svalue = hgacl.get(section, option) | |
980 | break # stop at the first value we find |
|
980 | break # stop at the first value we find | |
981 | except configparser.NoOptionError: |
|
981 | except configparser.NoOptionError: | |
982 | pass |
|
982 | pass | |
983 | if not svalue: |
|
983 | if not svalue: | |
984 | return None |
|
984 | return None | |
985 | result = ['/'] |
|
985 | result = ['/'] | |
986 | for pattern in svalue.split(): |
|
986 | for pattern in svalue.split(): | |
987 | result.append(pattern) |
|
987 | result.append(pattern) | |
988 | if '*' not in pattern and '?' not in pattern: |
|
988 | if '*' not in pattern and '?' not in pattern: | |
989 | result.append(pattern + '/*') |
|
989 | result.append(pattern + '/*') | |
990 | return result |
|
990 | return result | |
991 |
|
991 | |||
992 | if os.path.exists(hgacl_file): |
|
992 | if os.path.exists(hgacl_file): | |
993 | try: |
|
993 | try: | |
994 | hgacl = configparser.RawConfigParser() |
|
994 | hgacl = configparser.RawConfigParser() | |
995 | hgacl.read(hgacl_file) |
|
995 | hgacl.read(hgacl_file) | |
996 |
|
996 | |||
997 | includes = read_patterns('.includes') |
|
997 | includes = read_patterns('.includes') | |
998 | excludes = read_patterns('.excludes') |
|
998 | excludes = read_patterns('.excludes') | |
999 | return BasePathPermissionChecker.create_from_patterns( |
|
999 | return BasePathPermissionChecker.create_from_patterns( | |
1000 | includes, excludes) |
|
1000 | includes, excludes) | |
1001 | except BaseException as e: |
|
1001 | except BaseException as e: | |
1002 | msg = 'Cannot read ACL settings from {} on {}: {}'.format( |
|
1002 | msg = 'Cannot read ACL settings from {} on {}: {}'.format( | |
1003 | hgacl_file, self.name, e) |
|
1003 | hgacl_file, self.name, e) | |
1004 | raise exceptions.RepositoryRequirementError(msg) |
|
1004 | raise exceptions.RepositoryRequirementError(msg) | |
1005 | else: |
|
1005 | else: | |
1006 | return None |
|
1006 | return None | |
1007 |
|
1007 | |||
1008 |
|
1008 | |||
1009 | class MercurialIndexBasedCollectionGenerator(CollectionGenerator): |
|
1009 | class MercurialIndexBasedCollectionGenerator(CollectionGenerator): | |
1010 |
|
1010 | |||
1011 | def _commit_factory(self, commit_id): |
|
1011 | def _commit_factory(self, commit_id): | |
1012 | return self.repo.get_commit( |
|
1012 | return self.repo.get_commit( | |
1013 | commit_idx=commit_id, pre_load=self.pre_load) |
|
1013 | commit_idx=commit_id, pre_load=self.pre_load) |
@@ -1,253 +1,253 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2014-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2014-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | SVN commit module |
|
22 | SVN commit module | |
23 | """ |
|
23 | """ | |
24 |
|
24 | |||
25 |
|
25 | |||
26 | import dateutil.parser |
|
26 | import dateutil.parser | |
27 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
27 | from zope.cachedescriptors.property import Lazy as LazyProperty | |
28 |
|
28 | |||
29 | from rhodecode.lib.utils import safe_str, safe_unicode |
|
29 | from rhodecode.lib.utils import safe_str, safe_unicode | |
30 | from rhodecode.lib.vcs import nodes, path as vcspath |
|
30 | from rhodecode.lib.vcs import nodes, path as vcspath | |
31 | from rhodecode.lib.vcs.backends import base |
|
31 | from rhodecode.lib.vcs.backends import base | |
32 | from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError |
|
32 | from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError | |
33 |
|
33 | |||
34 |
|
34 | |||
35 | _SVN_PROP_TRUE = '*' |
|
35 | _SVN_PROP_TRUE = '*' | |
36 |
|
36 | |||
37 |
|
37 | |||
38 | class SubversionCommit(base.BaseCommit): |
|
38 | class SubversionCommit(base.BaseCommit): | |
39 | """ |
|
39 | """ | |
40 | Subversion specific implementation of commits |
|
40 | Subversion specific implementation of commits | |
41 |
|
41 | |||
42 | .. attribute:: branch |
|
42 | .. attribute:: branch | |
43 |
|
43 | |||
44 | The Subversion backend does not support to assign branches to |
|
44 | The Subversion backend does not support to assign branches to | |
45 | specific commits. This attribute has always the value `None`. |
|
45 | specific commits. This attribute has always the value `None`. | |
46 |
|
46 | |||
47 | """ |
|
47 | """ | |
48 |
|
48 | |||
49 | def __init__(self, repository, commit_id): |
|
49 | def __init__(self, repository, commit_id): | |
50 | self.repository = repository |
|
50 | self.repository = repository | |
51 | self.idx = self.repository._get_commit_idx(commit_id) |
|
51 | self.idx = self.repository._get_commit_idx(commit_id) | |
52 | self._svn_rev = self.idx + 1 |
|
52 | self._svn_rev = self.idx + 1 | |
53 | self._remote = repository._remote |
|
53 | self._remote = repository._remote | |
54 | # TODO: handling of raw_id should be a method on repository itself, |
|
54 | # TODO: handling of raw_id should be a method on repository itself, | |
55 | # which knows how to translate commit index and commit id |
|
55 | # which knows how to translate commit index and commit id | |
56 | self.raw_id = commit_id |
|
56 | self.raw_id = commit_id | |
57 | self.short_id = commit_id |
|
57 | self.short_id = commit_id | |
58 | self.id = 'r%s' % (commit_id, ) |
|
58 | self.id = 'r%s' % (commit_id, ) | |
59 |
|
59 | |||
60 | # TODO: Implement the following placeholder attributes |
|
60 | # TODO: Implement the following placeholder attributes | |
61 | self.nodes = {} |
|
61 | self.nodes = {} | |
62 | self.tags = [] |
|
62 | self.tags = [] | |
63 |
|
63 | |||
64 | @property |
|
64 | @property | |
65 | def author(self): |
|
65 | def author(self): | |
66 | return safe_unicode(self._properties.get('svn:author')) |
|
66 | return safe_unicode(self._properties.get('svn:author')) | |
67 |
|
67 | |||
68 | @property |
|
68 | @property | |
69 | def date(self): |
|
69 | def date(self): | |
70 | return _date_from_svn_properties(self._properties) |
|
70 | return _date_from_svn_properties(self._properties) | |
71 |
|
71 | |||
72 | @property |
|
72 | @property | |
73 | def message(self): |
|
73 | def message(self): | |
74 | return safe_unicode(self._properties.get('svn:log')) |
|
74 | return safe_unicode(self._properties.get('svn:log')) | |
75 |
|
75 | |||
76 | @LazyProperty |
|
76 | @LazyProperty | |
77 | def _properties(self): |
|
77 | def _properties(self): | |
78 | return self._remote.revision_properties(self._svn_rev) |
|
78 | return self._remote.revision_properties(self._svn_rev) | |
79 |
|
79 | |||
80 | @LazyProperty |
|
80 | @LazyProperty | |
81 | def parents(self): |
|
81 | def parents(self): | |
82 | parent_idx = self.idx - 1 |
|
82 | parent_idx = self.idx - 1 | |
83 | if parent_idx >= 0: |
|
83 | if parent_idx >= 0: | |
84 | parent = self.repository.get_commit(commit_idx=parent_idx) |
|
84 | parent = self.repository.get_commit(commit_idx=parent_idx) | |
85 | return [parent] |
|
85 | return [parent] | |
86 | return [] |
|
86 | return [] | |
87 |
|
87 | |||
88 | @LazyProperty |
|
88 | @LazyProperty | |
89 | def children(self): |
|
89 | def children(self): | |
90 | child_idx = self.idx + 1 |
|
90 | child_idx = self.idx + 1 | |
91 | if child_idx < len(self.repository.commit_ids): |
|
91 | if child_idx < len(self.repository.commit_ids): | |
92 | child = self.repository.get_commit(commit_idx=child_idx) |
|
92 | child = self.repository.get_commit(commit_idx=child_idx) | |
93 | return [child] |
|
93 | return [child] | |
94 | return [] |
|
94 | return [] | |
95 |
|
95 | |||
96 | def get_file_mode(self, path): |
|
96 | def get_file_mode(self, path): | |
97 | # Note: Subversion flags files which are executable with a special |
|
97 | # Note: Subversion flags files which are executable with a special | |
98 | # property `svn:executable` which is set to the value ``"*"``. |
|
98 | # property `svn:executable` which is set to the value ``"*"``. | |
99 | if self._get_file_property(path, 'svn:executable') == _SVN_PROP_TRUE: |
|
99 | if self._get_file_property(path, 'svn:executable') == _SVN_PROP_TRUE: | |
100 | return base.FILEMODE_EXECUTABLE |
|
100 | return base.FILEMODE_EXECUTABLE | |
101 | else: |
|
101 | else: | |
102 | return base.FILEMODE_DEFAULT |
|
102 | return base.FILEMODE_DEFAULT | |
103 |
|
103 | |||
104 | def is_link(self, path): |
|
104 | def is_link(self, path): | |
105 | # Note: Subversion has a flag for special files, the content of the |
|
105 | # Note: Subversion has a flag for special files, the content of the | |
106 | # file contains the type of that file. |
|
106 | # file contains the type of that file. | |
107 | if self._get_file_property(path, 'svn:special') == _SVN_PROP_TRUE: |
|
107 | if self._get_file_property(path, 'svn:special') == _SVN_PROP_TRUE: | |
108 | return self.get_file_content(path).startswith('link') |
|
108 | return self.get_file_content(path).startswith('link') | |
109 | return False |
|
109 | return False | |
110 |
|
110 | |||
111 | def is_node_binary(self, path): |
|
111 | def is_node_binary(self, path): | |
112 | path = self._fix_path(path) |
|
112 | path = self._fix_path(path) | |
113 | return self._remote.is_binary(self._svn_rev, safe_str(path)) |
|
113 | return self._remote.is_binary(self._svn_rev, safe_str(path)) | |
114 |
|
114 | |||
115 | def _get_file_property(self, path, name): |
|
115 | def _get_file_property(self, path, name): | |
116 | file_properties = self._remote.node_properties( |
|
116 | file_properties = self._remote.node_properties( | |
117 | safe_str(path), self._svn_rev) |
|
117 | safe_str(path), self._svn_rev) | |
118 | return file_properties.get(name) |
|
118 | return file_properties.get(name) | |
119 |
|
119 | |||
120 | def get_file_content(self, path): |
|
120 | def get_file_content(self, path): | |
121 | path = self._fix_path(path) |
|
121 | path = self._fix_path(path) | |
122 | return self._remote.get_file_content(safe_str(path), self._svn_rev) |
|
122 | return self._remote.get_file_content(safe_str(path), self._svn_rev) | |
123 |
|
123 | |||
124 | def get_file_content_streamed(self, path): |
|
124 | def get_file_content_streamed(self, path): | |
125 | path = self._fix_path(path) |
|
125 | path = self._fix_path(path) | |
126 | stream_method = getattr(self._remote, 'stream:get_file_content') |
|
126 | stream_method = getattr(self._remote, 'stream:get_file_content') | |
127 | return stream_method(safe_str(path), self._svn_rev) |
|
127 | return stream_method(safe_str(path), self._svn_rev) | |
128 |
|
128 | |||
129 | def get_file_size(self, path): |
|
129 | def get_file_size(self, path): | |
130 | path = self._fix_path(path) |
|
130 | path = self._fix_path(path) | |
131 | return self._remote.get_file_size(safe_str(path), self._svn_rev) |
|
131 | return self._remote.get_file_size(safe_str(path), self._svn_rev) | |
132 |
|
132 | |||
133 | def get_path_history(self, path, limit=None, pre_load=None): |
|
133 | def get_path_history(self, path, limit=None, pre_load=None): | |
134 | path = safe_str(self._fix_path(path)) |
|
134 | path = safe_str(self._fix_path(path)) | |
135 | history = self._remote.node_history(path, self._svn_rev, limit) |
|
135 | history = self._remote.node_history(path, self._svn_rev, limit) | |
136 | return [ |
|
136 | return [ | |
137 | self.repository.get_commit(commit_id=str(svn_rev)) |
|
137 | self.repository.get_commit(commit_id=str(svn_rev)) | |
138 | for svn_rev in history] |
|
138 | for svn_rev in history] | |
139 |
|
139 | |||
140 | def get_file_annotate(self, path, pre_load=None): |
|
140 | def get_file_annotate(self, path, pre_load=None): | |
141 | result = self._remote.file_annotate(safe_str(path), self._svn_rev) |
|
141 | result = self._remote.file_annotate(safe_str(path), self._svn_rev) | |
142 |
|
142 | |||
143 | for zero_based_line_no, svn_rev, content in result: |
|
143 | for zero_based_line_no, svn_rev, content in result: | |
144 | commit_id = str(svn_rev) |
|
144 | commit_id = str(svn_rev) | |
145 | line_no = zero_based_line_no + 1 |
|
145 | line_no = zero_based_line_no + 1 | |
146 | yield ( |
|
146 | yield ( | |
147 | line_no, |
|
147 | line_no, | |
148 | commit_id, |
|
148 | commit_id, | |
149 | lambda: self.repository.get_commit(commit_id=commit_id), |
|
149 | lambda: self.repository.get_commit(commit_id=commit_id), | |
150 | content) |
|
150 | content) | |
151 |
|
151 | |||
152 | def get_node(self, path, pre_load=None): |
|
152 | def get_node(self, path, pre_load=None): | |
153 | path = self._fix_path(path) |
|
153 | path = self._fix_path(path) | |
154 | if path not in self.nodes: |
|
154 | if path not in self.nodes: | |
155 |
|
155 | |||
156 | if path == '': |
|
156 | if path == '': | |
157 | node = nodes.RootNode(commit=self) |
|
157 | node = nodes.RootNode(commit=self) | |
158 | else: |
|
158 | else: | |
159 | node_type = self._remote.get_node_type( |
|
159 | node_type = self._remote.get_node_type( | |
160 | safe_str(path), self._svn_rev) |
|
160 | safe_str(path), self._svn_rev) | |
161 | if node_type == 'dir': |
|
161 | if node_type == 'dir': | |
162 | node = nodes.DirNode(path, commit=self) |
|
162 | node = nodes.DirNode(path, commit=self) | |
163 | elif node_type == 'file': |
|
163 | elif node_type == 'file': | |
164 | node = nodes.FileNode(path, commit=self, pre_load=pre_load) |
|
164 | node = nodes.FileNode(path, commit=self, pre_load=pre_load) | |
165 | else: |
|
165 | else: | |
166 | raise self.no_node_at_path(path) |
|
166 | raise self.no_node_at_path(path) | |
167 |
|
167 | |||
168 | self.nodes[path] = node |
|
168 | self.nodes[path] = node | |
169 | return self.nodes[path] |
|
169 | return self.nodes[path] | |
170 |
|
170 | |||
171 | def get_nodes(self, path): |
|
171 | def get_nodes(self, path): | |
172 | if self._get_kind(path) != nodes.NodeKind.DIR: |
|
172 | if self._get_kind(path) != nodes.NodeKind.DIR: | |
173 | raise CommitError( |
|
173 | raise CommitError( | |
174 | "Directory does not exist for commit %s at " |
|
174 | "Directory does not exist for commit %s at " | |
175 | " '%s'" % (self.raw_id, path)) |
|
175 | " '%s'" % (self.raw_id, path)) | |
176 | path = safe_str(self._fix_path(path)) |
|
176 | path = safe_str(self._fix_path(path)) | |
177 |
|
177 | |||
178 | path_nodes = [] |
|
178 | path_nodes = [] | |
179 | for name, kind in self._remote.get_nodes(path, revision=self._svn_rev): |
|
179 | for name, kind in self._remote.get_nodes(path, revision=self._svn_rev): | |
180 | node_path = vcspath.join(path, name) |
|
180 | node_path = vcspath.join(path, name) | |
181 | if kind == 'dir': |
|
181 | if kind == 'dir': | |
182 | node = nodes.DirNode(node_path, commit=self) |
|
182 | node = nodes.DirNode(node_path, commit=self) | |
183 | elif kind == 'file': |
|
183 | elif kind == 'file': | |
184 | node = nodes.FileNode(node_path, commit=self) |
|
184 | node = nodes.FileNode(node_path, commit=self) | |
185 | else: |
|
185 | else: | |
186 | raise ValueError("Node kind %s not supported." % (kind, )) |
|
186 | raise ValueError("Node kind %s not supported." % (kind, )) | |
187 | self.nodes[node_path] = node |
|
187 | self.nodes[node_path] = node | |
188 | path_nodes.append(node) |
|
188 | path_nodes.append(node) | |
189 |
|
189 | |||
190 | return path_nodes |
|
190 | return path_nodes | |
191 |
|
191 | |||
192 | def _get_kind(self, path): |
|
192 | def _get_kind(self, path): | |
193 | path = self._fix_path(path) |
|
193 | path = self._fix_path(path) | |
194 | kind = self._remote.get_node_type(path, self._svn_rev) |
|
194 | kind = self._remote.get_node_type(path, self._svn_rev) | |
195 | if kind == 'file': |
|
195 | if kind == 'file': | |
196 | return nodes.NodeKind.FILE |
|
196 | return nodes.NodeKind.FILE | |
197 | elif kind == 'dir': |
|
197 | elif kind == 'dir': | |
198 | return nodes.NodeKind.DIR |
|
198 | return nodes.NodeKind.DIR | |
199 | else: |
|
199 | else: | |
200 | raise CommitError( |
|
200 | raise CommitError( | |
201 | "Node does not exist at the given path '%s'" % (path, )) |
|
201 | "Node does not exist at the given path '%s'" % (path, )) | |
202 |
|
202 | |||
203 | @LazyProperty |
|
203 | @LazyProperty | |
204 | def _changes_cache(self): |
|
204 | def _changes_cache(self): | |
205 | return self._remote.revision_changes(self._svn_rev) |
|
205 | return self._remote.revision_changes(self._svn_rev) | |
206 |
|
206 | |||
207 | @LazyProperty |
|
207 | @LazyProperty | |
208 | def affected_files(self): |
|
208 | def affected_files(self): | |
209 | changed_files = set() |
|
209 | changed_files = set() | |
210 |
for files in self._changes_cache. |
|
210 | for files in self._changes_cache.values(): | |
211 | changed_files.update(files) |
|
211 | changed_files.update(files) | |
212 | return list(changed_files) |
|
212 | return list(changed_files) | |
213 |
|
213 | |||
214 | @LazyProperty |
|
214 | @LazyProperty | |
215 | def id(self): |
|
215 | def id(self): | |
216 | return self.raw_id |
|
216 | return self.raw_id | |
217 |
|
217 | |||
218 | @property |
|
218 | @property | |
219 | def added(self): |
|
219 | def added(self): | |
220 | return nodes.AddedFileNodesGenerator(self.added_paths, self) |
|
220 | return nodes.AddedFileNodesGenerator(self.added_paths, self) | |
221 |
|
221 | |||
222 | @LazyProperty |
|
222 | @LazyProperty | |
223 | def added_paths(self): |
|
223 | def added_paths(self): | |
224 | return [n for n in self._changes_cache['added']] |
|
224 | return [n for n in self._changes_cache['added']] | |
225 |
|
225 | |||
226 | @property |
|
226 | @property | |
227 | def changed(self): |
|
227 | def changed(self): | |
228 | return nodes.ChangedFileNodesGenerator(self.changed_paths, self) |
|
228 | return nodes.ChangedFileNodesGenerator(self.changed_paths, self) | |
229 |
|
229 | |||
230 | @LazyProperty |
|
230 | @LazyProperty | |
231 | def changed_paths(self): |
|
231 | def changed_paths(self): | |
232 | return [n for n in self._changes_cache['changed']] |
|
232 | return [n for n in self._changes_cache['changed']] | |
233 |
|
233 | |||
234 | @property |
|
234 | @property | |
235 | def removed(self): |
|
235 | def removed(self): | |
236 | return nodes.RemovedFileNodesGenerator(self.removed_paths, self) |
|
236 | return nodes.RemovedFileNodesGenerator(self.removed_paths, self) | |
237 |
|
237 | |||
238 | @LazyProperty |
|
238 | @LazyProperty | |
239 | def removed_paths(self): |
|
239 | def removed_paths(self): | |
240 | return [n for n in self._changes_cache['removed']] |
|
240 | return [n for n in self._changes_cache['removed']] | |
241 |
|
241 | |||
242 |
|
242 | |||
243 | def _date_from_svn_properties(properties): |
|
243 | def _date_from_svn_properties(properties): | |
244 | """ |
|
244 | """ | |
245 | Parses the date out of given svn properties. |
|
245 | Parses the date out of given svn properties. | |
246 |
|
246 | |||
247 | :return: :class:`datetime.datetime` instance. The object is naive. |
|
247 | :return: :class:`datetime.datetime` instance. The object is naive. | |
248 | """ |
|
248 | """ | |
249 |
|
249 | |||
250 | aware_date = dateutil.parser.parse(properties.get('svn:date')) |
|
250 | aware_date = dateutil.parser.parse(properties.get('svn:date')) | |
251 | # final_date = aware_date.astimezone(dateutil.tz.tzlocal()) |
|
251 | # final_date = aware_date.astimezone(dateutil.tz.tzlocal()) | |
252 | final_date = aware_date |
|
252 | final_date = aware_date | |
253 | return final_date.replace(tzinfo=None) |
|
253 | return final_date.replace(tzinfo=None) |
@@ -1,717 +1,716 b'' | |||||
1 |
|
1 | |||
2 | ; ######################################### |
|
2 | ; ######################################### | |
3 | ; RHODECODE COMMUNITY EDITION CONFIGURATION |
|
3 | ; RHODECODE COMMUNITY EDITION CONFIGURATION | |
4 | ; ######################################### |
|
4 | ; ######################################### | |
5 |
|
5 | |||
6 | [DEFAULT] |
|
6 | [DEFAULT] | |
7 | ; Debug flag sets all loggers to debug, and enables request tracking |
|
7 | ; Debug flag sets all loggers to debug, and enables request tracking | |
8 | debug = true |
|
8 | debug = true | |
9 |
|
9 | |||
10 | ; ######################################################################## |
|
10 | ; ######################################################################## | |
11 | ; EMAIL CONFIGURATION |
|
11 | ; EMAIL CONFIGURATION | |
12 | ; These settings will be used by the RhodeCode mailing system |
|
12 | ; These settings will be used by the RhodeCode mailing system | |
13 | ; ######################################################################## |
|
13 | ; ######################################################################## | |
14 |
|
14 | |||
15 | ; prefix all emails subjects with given prefix, helps filtering out emails |
|
15 | ; prefix all emails subjects with given prefix, helps filtering out emails | |
16 | #email_prefix = [RhodeCode] |
|
16 | #email_prefix = [RhodeCode] | |
17 |
|
17 | |||
18 | ; email FROM address all mails will be sent |
|
18 | ; email FROM address all mails will be sent | |
19 | #app_email_from = rhodecode-noreply@localhost |
|
19 | #app_email_from = rhodecode-noreply@localhost | |
20 |
|
20 | |||
21 | #smtp_server = mail.server.com |
|
21 | #smtp_server = mail.server.com | |
22 | #smtp_username = |
|
22 | #smtp_username = | |
23 | #smtp_password = |
|
23 | #smtp_password = | |
24 | #smtp_port = |
|
24 | #smtp_port = | |
25 | #smtp_use_tls = false |
|
25 | #smtp_use_tls = false | |
26 | #smtp_use_ssl = true |
|
26 | #smtp_use_ssl = true | |
27 |
|
27 | |||
28 | [server:main] |
|
28 | [server:main] | |
29 | ; COMMON HOST/IP CONFIG |
|
29 | ; COMMON HOST/IP CONFIG | |
30 | host = 0.0.0.0 |
|
30 | host = 0.0.0.0 | |
31 | port = 5000 |
|
31 | port = 5000 | |
32 |
|
32 | |||
33 |
|
33 | |||
34 | ; ########################### |
|
34 | ; ########################### | |
35 | ; GUNICORN APPLICATION SERVER |
|
35 | ; GUNICORN APPLICATION SERVER | |
36 | ; ########################### |
|
36 | ; ########################### | |
37 |
|
37 | |||
38 | ; run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini |
|
38 | ; run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini | |
39 |
|
39 | |||
40 | ; Module to use, this setting shouldn't be changed |
|
40 | ; Module to use, this setting shouldn't be changed | |
41 | use = egg:gunicorn#main |
|
41 | use = egg:gunicorn#main | |
42 |
|
42 | |||
43 | ; Sets the number of process workers. More workers means more concurrent connections |
|
43 | ; Sets the number of process workers. More workers means more concurrent connections | |
44 | ; RhodeCode can handle at the same time. Each additional worker also it increases |
|
44 | ; RhodeCode can handle at the same time. Each additional worker also it increases | |
45 | ; memory usage as each has it's own set of caches. |
|
45 | ; memory usage as each has it's own set of caches. | |
46 | ; Recommended value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers, but no more |
|
46 | ; Recommended value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers, but no more | |
47 | ; than 8-10 unless for really big deployments .e.g 700-1000 users. |
|
47 | ; than 8-10 unless for really big deployments .e.g 700-1000 users. | |
48 | ; `instance_id = *` must be set in the [app:main] section below (which is the default) |
|
48 | ; `instance_id = *` must be set in the [app:main] section below (which is the default) | |
49 | ; when using more than 1 worker. |
|
49 | ; when using more than 1 worker. | |
50 | #workers = 2 |
|
50 | #workers = 2 | |
51 |
|
51 | |||
52 | ; Gunicorn access log level |
|
52 | ; Gunicorn access log level | |
53 | #loglevel = info |
|
53 | #loglevel = info | |
54 |
|
54 | |||
55 | ; Process name visible in process list |
|
55 | ; Process name visible in process list | |
56 | #proc_name = rhodecode |
|
56 | #proc_name = rhodecode | |
57 |
|
57 | |||
58 | ; Type of worker class, one of `sync`, `gevent` |
|
58 | ; Type of worker class, one of `sync`, `gevent` | |
59 | ; Recommended type is `gevent` |
|
59 | ; Recommended type is `gevent` | |
60 | #worker_class = gevent |
|
60 | #worker_class = gevent | |
61 |
|
61 | |||
62 | ; The maximum number of simultaneous clients per worker. Valid only for gevent |
|
62 | ; The maximum number of simultaneous clients per worker. Valid only for gevent | |
63 | #worker_connections = 10 |
|
63 | #worker_connections = 10 | |
64 |
|
64 | |||
65 | ; Max number of requests that worker will handle before being gracefully restarted. |
|
65 | ; Max number of requests that worker will handle before being gracefully restarted. | |
66 | ; Prevents memory leaks, jitter adds variability so not all workers are restarted at once. |
|
66 | ; Prevents memory leaks, jitter adds variability so not all workers are restarted at once. | |
67 | #max_requests = 1000 |
|
67 | #max_requests = 1000 | |
68 | #max_requests_jitter = 30 |
|
68 | #max_requests_jitter = 30 | |
69 |
|
69 | |||
70 | ; Amount of time a worker can spend with handling a request before it |
|
70 | ; Amount of time a worker can spend with handling a request before it | |
71 | ; gets killed and restarted. By default set to 21600 (6hrs) |
|
71 | ; gets killed and restarted. By default set to 21600 (6hrs) | |
72 | ; Examples: 1800 (30min), 3600 (1hr), 7200 (2hr), 43200 (12h) |
|
72 | ; Examples: 1800 (30min), 3600 (1hr), 7200 (2hr), 43200 (12h) | |
73 | #timeout = 21600 |
|
73 | #timeout = 21600 | |
74 |
|
74 | |||
75 | ; The maximum size of HTTP request line in bytes. |
|
75 | ; The maximum size of HTTP request line in bytes. | |
76 | ; 0 for unlimited |
|
76 | ; 0 for unlimited | |
77 | #limit_request_line = 0 |
|
77 | #limit_request_line = 0 | |
78 |
|
78 | |||
79 |
|
79 | |||
80 | ; Prefix middleware for RhodeCode. |
|
80 | ; Prefix middleware for RhodeCode. | |
81 | ; recommended when using proxy setup. |
|
81 | ; recommended when using proxy setup. | |
82 | ; allows to set RhodeCode under a prefix in server. |
|
82 | ; allows to set RhodeCode under a prefix in server. | |
83 | ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well. |
|
83 | ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well. | |
84 | ; And set your prefix like: `prefix = /custom_prefix` |
|
84 | ; And set your prefix like: `prefix = /custom_prefix` | |
85 | ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need |
|
85 | ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need | |
86 | ; to make your cookies only work on prefix url |
|
86 | ; to make your cookies only work on prefix url | |
87 | [filter:proxy-prefix] |
|
87 | [filter:proxy-prefix] | |
88 | use = egg:PasteDeploy#prefix |
|
88 | use = egg:PasteDeploy#prefix | |
89 | prefix = / |
|
89 | prefix = / | |
90 |
|
90 | |||
91 | [app:main] |
|
91 | [app:main] | |
92 | ; The %(here)s variable will be replaced with the absolute path of parent directory |
|
92 | ; The %(here)s variable will be replaced with the absolute path of parent directory | |
93 | ; of this file |
|
93 | ; of this file | |
94 | ; Each option in the app:main can be override by an environmental variable |
|
94 | ; Each option in the app:main can be override by an environmental variable | |
95 | ; |
|
95 | ; | |
96 | ;To override an option: |
|
96 | ;To override an option: | |
97 | ; |
|
97 | ; | |
98 | ;RC_<KeyName> |
|
98 | ;RC_<KeyName> | |
99 | ;Everything should be uppercase, . and - should be replaced by _. |
|
99 | ;Everything should be uppercase, . and - should be replaced by _. | |
100 | ;For example, if you have these configuration settings: |
|
100 | ;For example, if you have these configuration settings: | |
101 | ;rc_cache.repo_object.backend = foo |
|
101 | ;rc_cache.repo_object.backend = foo | |
102 | ;can be overridden by |
|
102 | ;can be overridden by | |
103 | ;export RC_CACHE_REPO_OBJECT_BACKEND=foo |
|
103 | ;export RC_CACHE_REPO_OBJECT_BACKEND=foo | |
104 |
|
104 | |||
105 | is_test = True |
|
105 | is_test = True | |
106 | use = egg:rhodecode-enterprise-ce |
|
106 | use = egg:rhodecode-enterprise-ce | |
107 |
|
107 | |||
108 | ; enable proxy prefix middleware, defined above |
|
108 | ; enable proxy prefix middleware, defined above | |
109 | #filter-with = proxy-prefix |
|
109 | #filter-with = proxy-prefix | |
110 |
|
110 | |||
111 |
|
111 | |||
112 | ## RHODECODE PLUGINS ## |
|
112 | ## RHODECODE PLUGINS ## | |
113 | rhodecode.includes = rhodecode.api |
|
113 | rhodecode.includes = rhodecode.api | |
114 |
|
114 | |||
115 | # api prefix url |
|
115 | # api prefix url | |
116 | rhodecode.api.url = /_admin/api |
|
116 | rhodecode.api.url = /_admin/api | |
117 |
|
117 | |||
118 |
|
118 | |||
119 | ## END RHODECODE PLUGINS ## |
|
119 | ## END RHODECODE PLUGINS ## | |
120 |
|
120 | |||
121 | ## encryption key used to encrypt social plugin tokens, |
|
121 | ## encryption key used to encrypt social plugin tokens, | |
122 | ## remote_urls with credentials etc, if not set it defaults to |
|
122 | ## remote_urls with credentials etc, if not set it defaults to | |
123 | ## `beaker.session.secret` |
|
123 | ## `beaker.session.secret` | |
124 | #rhodecode.encrypted_values.secret = |
|
124 | #rhodecode.encrypted_values.secret = | |
125 |
|
125 | |||
126 | ; decryption strict mode (enabled by default). It controls if decryption raises |
|
126 | ; decryption strict mode (enabled by default). It controls if decryption raises | |
127 | ; `SignatureVerificationError` in case of wrong key, or damaged encryption data. |
|
127 | ; `SignatureVerificationError` in case of wrong key, or damaged encryption data. | |
128 | #rhodecode.encrypted_values.strict = false |
|
128 | #rhodecode.encrypted_values.strict = false | |
129 |
|
129 | |||
130 | ; Pick algorithm for encryption. Either fernet (more secure) or aes (default) |
|
130 | ; Pick algorithm for encryption. Either fernet (more secure) or aes (default) | |
131 | ; fernet is safer, and we strongly recommend switching to it. |
|
131 | ; fernet is safer, and we strongly recommend switching to it. | |
132 | ; Due to backward compatibility aes is used as default. |
|
132 | ; Due to backward compatibility aes is used as default. | |
133 | #rhodecode.encrypted_values.algorithm = fernet |
|
133 | #rhodecode.encrypted_values.algorithm = fernet | |
134 |
|
134 | |||
135 | ; Return gzipped responses from RhodeCode (static files/application) |
|
135 | ; Return gzipped responses from RhodeCode (static files/application) | |
136 | gzip_responses = false |
|
136 | gzip_responses = false | |
137 |
|
137 | |||
138 | ; Auto-generate javascript routes file on startup |
|
138 | ; Auto-generate javascript routes file on startup | |
139 | generate_js_files = false |
|
139 | generate_js_files = false | |
140 |
|
140 | |||
141 | ; System global default language. |
|
141 | ; System global default language. | |
142 | ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh |
|
142 | ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh | |
143 | lang = en |
|
143 | lang = en | |
144 |
|
144 | |||
145 | ; Perform a full repository scan and import on each server start. |
|
145 | ; Perform a full repository scan and import on each server start. | |
146 | ; Settings this to true could lead to very long startup time. |
|
146 | ; Settings this to true could lead to very long startup time. | |
147 | startup.import_repos = true |
|
147 | startup.import_repos = true | |
148 |
|
148 | |||
149 | ; Uncomment and set this path to use archive download cache. |
|
149 | ; Uncomment and set this path to use archive download cache. | |
150 | ; Once enabled, generated archives will be cached at this location |
|
150 | ; Once enabled, generated archives will be cached at this location | |
151 | ; and served from the cache during subsequent requests for the same archive of |
|
151 | ; and served from the cache during subsequent requests for the same archive of | |
152 | ; the repository. |
|
152 | ; the repository. | |
153 | #archive_cache_dir = /tmp/tarballcache |
|
153 | #archive_cache_dir = /tmp/tarballcache | |
154 |
|
154 | |||
155 | ; URL at which the application is running. This is used for Bootstrapping |
|
155 | ; URL at which the application is running. This is used for Bootstrapping | |
156 | ; requests in context when no web request is available. Used in ishell, or |
|
156 | ; requests in context when no web request is available. Used in ishell, or | |
157 | ; SSH calls. Set this for events to receive proper url for SSH calls. |
|
157 | ; SSH calls. Set this for events to receive proper url for SSH calls. | |
158 | app.base_url = http://rhodecode.local |
|
158 | app.base_url = http://rhodecode.local | |
159 |
|
159 | |||
160 | ; Unique application ID. Should be a random unique string for security. |
|
160 | ; Unique application ID. Should be a random unique string for security. | |
161 | app_instance_uuid = rc-production |
|
161 | app_instance_uuid = rc-production | |
162 |
|
162 | |||
163 | ## cut off limit for large diffs (size in bytes) |
|
163 | ## cut off limit for large diffs (size in bytes) | |
164 | cut_off_limit_diff = 1024000 |
|
164 | cut_off_limit_diff = 1024000 | |
165 | cut_off_limit_file = 256000 |
|
165 | cut_off_limit_file = 256000 | |
166 |
|
166 | |||
167 | ; Use cached version of vcs repositories everywhere. Recommended to be `true` |
|
167 | ; Use cached version of vcs repositories everywhere. Recommended to be `true` | |
168 | vcs_full_cache = false |
|
168 | vcs_full_cache = false | |
169 |
|
169 | |||
170 | ; Force https in RhodeCode, fixes https redirects, assumes it's always https. |
|
170 | ; Force https in RhodeCode, fixes https redirects, assumes it's always https. | |
171 | ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache |
|
171 | ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache | |
172 | force_https = false |
|
172 | force_https = false | |
173 |
|
173 | |||
174 | ; use Strict-Transport-Security headers |
|
174 | ; use Strict-Transport-Security headers | |
175 | use_htsts = false |
|
175 | use_htsts = false | |
176 |
|
176 | |||
177 | ; Set to true if your repos are exposed using the dumb protocol |
|
177 | ; Set to true if your repos are exposed using the dumb protocol | |
178 | git_update_server_info = false |
|
178 | git_update_server_info = false | |
179 |
|
179 | |||
180 | ; RSS/ATOM feed options |
|
180 | ; RSS/ATOM feed options | |
181 | rss_cut_off_limit = 256000 |
|
181 | rss_cut_off_limit = 256000 | |
182 | rss_items_per_page = 10 |
|
182 | rss_items_per_page = 10 | |
183 | rss_include_diff = false |
|
183 | rss_include_diff = false | |
184 |
|
184 | |||
185 | ; gist URL alias, used to create nicer urls for gist. This should be an |
|
185 | ; gist URL alias, used to create nicer urls for gist. This should be an | |
186 | ; url that does rewrites to _admin/gists/{gistid}. |
|
186 | ; url that does rewrites to _admin/gists/{gistid}. | |
187 | ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal |
|
187 | ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal | |
188 | ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid} |
|
188 | ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid} | |
189 | gist_alias_url = |
|
189 | gist_alias_url = | |
190 |
|
190 | |||
191 | ; List of views (using glob pattern syntax) that AUTH TOKENS could be |
|
191 | ; List of views (using glob pattern syntax) that AUTH TOKENS could be | |
192 | ; used for access. |
|
192 | ; used for access. | |
193 | ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it |
|
193 | ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it | |
194 | ; came from the the logged in user who own this authentication token. |
|
194 | ; came from the the logged in user who own this authentication token. | |
195 | ; Additionally @TOKEN syntax can be used to bound the view to specific |
|
195 | ; Additionally @TOKEN syntax can be used to bound the view to specific | |
196 | ; authentication token. Such view would be only accessible when used together |
|
196 | ; authentication token. Such view would be only accessible when used together | |
197 | ; with this authentication token |
|
197 | ; with this authentication token | |
198 | ; list of all views can be found under `/_admin/permissions/auth_token_access` |
|
198 | ; list of all views can be found under `/_admin/permissions/auth_token_access` | |
199 | ; The list should be "," separated and on a single line. |
|
199 | ; The list should be "," separated and on a single line. | |
200 | ; Most common views to enable: |
|
200 | ; Most common views to enable: | |
201 |
|
201 | |||
202 | # RepoCommitsView:repo_commit_download |
|
202 | # RepoCommitsView:repo_commit_download | |
203 | # RepoCommitsView:repo_commit_patch |
|
203 | # RepoCommitsView:repo_commit_patch | |
204 | # RepoCommitsView:repo_commit_raw |
|
204 | # RepoCommitsView:repo_commit_raw | |
205 | # RepoCommitsView:repo_commit_raw@TOKEN |
|
205 | # RepoCommitsView:repo_commit_raw@TOKEN | |
206 | # RepoFilesView:repo_files_diff |
|
206 | # RepoFilesView:repo_files_diff | |
207 | # RepoFilesView:repo_archivefile |
|
207 | # RepoFilesView:repo_archivefile | |
208 | # RepoFilesView:repo_file_raw |
|
208 | # RepoFilesView:repo_file_raw | |
209 | # GistView:* |
|
209 | # GistView:* | |
210 | api_access_controllers_whitelist = |
|
210 | api_access_controllers_whitelist = | |
211 |
|
211 | |||
212 | ; Default encoding used to convert from and to unicode |
|
212 | ; Default encoding used to convert from and to unicode | |
213 | ; can be also a comma separated list of encoding in case of mixed encodings |
|
213 | ; can be also a comma separated list of encoding in case of mixed encodings | |
214 | default_encoding = UTF-8 |
|
214 | default_encoding = UTF-8 | |
215 |
|
215 | |||
216 | ; instance-id prefix |
|
216 | ; instance-id prefix | |
217 | ; a prefix key for this instance used for cache invalidation when running |
|
217 | ; a prefix key for this instance used for cache invalidation when running | |
218 | ; multiple instances of RhodeCode, make sure it's globally unique for |
|
218 | ; multiple instances of RhodeCode, make sure it's globally unique for | |
219 | ; all running RhodeCode instances. Leave empty if you don't use it |
|
219 | ; all running RhodeCode instances. Leave empty if you don't use it | |
220 | instance_id = |
|
220 | instance_id = | |
221 |
|
221 | |||
222 | ; Fallback authentication plugin. Set this to a plugin ID to force the usage |
|
222 | ; Fallback authentication plugin. Set this to a plugin ID to force the usage | |
223 | ; of an authentication plugin also if it is disabled by it's settings. |
|
223 | ; of an authentication plugin also if it is disabled by it's settings. | |
224 | ; This could be useful if you are unable to log in to the system due to broken |
|
224 | ; This could be useful if you are unable to log in to the system due to broken | |
225 | ; authentication settings. Then you can enable e.g. the internal RhodeCode auth |
|
225 | ; authentication settings. Then you can enable e.g. the internal RhodeCode auth | |
226 | ; module to log in again and fix the settings. |
|
226 | ; module to log in again and fix the settings. | |
227 | ; Available builtin plugin IDs (hash is part of the ID): |
|
227 | ; Available builtin plugin IDs (hash is part of the ID): | |
228 | ; egg:rhodecode-enterprise-ce#rhodecode |
|
228 | ; egg:rhodecode-enterprise-ce#rhodecode | |
229 | ; egg:rhodecode-enterprise-ce#pam |
|
229 | ; egg:rhodecode-enterprise-ce#pam | |
230 | ; egg:rhodecode-enterprise-ce#ldap |
|
230 | ; egg:rhodecode-enterprise-ce#ldap | |
231 | ; egg:rhodecode-enterprise-ce#jasig_cas |
|
231 | ; egg:rhodecode-enterprise-ce#jasig_cas | |
232 | ; egg:rhodecode-enterprise-ce#headers |
|
232 | ; egg:rhodecode-enterprise-ce#headers | |
233 | ; egg:rhodecode-enterprise-ce#crowd |
|
233 | ; egg:rhodecode-enterprise-ce#crowd | |
234 |
|
234 | |||
235 | #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode |
|
235 | #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode | |
236 |
|
236 | |||
237 | ; Flag to control loading of legacy plugins in py:/path format |
|
237 | ; Flag to control loading of legacy plugins in py:/path format | |
238 | auth_plugin.import_legacy_plugins = true |
|
238 | auth_plugin.import_legacy_plugins = true | |
239 |
|
239 | |||
240 | ; alternative return HTTP header for failed authentication. Default HTTP |
|
240 | ; alternative return HTTP header for failed authentication. Default HTTP | |
241 | ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with |
|
241 | ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with | |
242 | ; handling that causing a series of failed authentication calls. |
|
242 | ; handling that causing a series of failed authentication calls. | |
243 | ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code |
|
243 | ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code | |
244 | ; This will be served instead of default 401 on bad authentication |
|
244 | ; This will be served instead of default 401 on bad authentication | |
245 | auth_ret_code = |
|
245 | auth_ret_code = | |
246 |
|
246 | |||
247 | ; use special detection method when serving auth_ret_code, instead of serving |
|
247 | ; use special detection method when serving auth_ret_code, instead of serving | |
248 | ; ret_code directly, use 401 initially (Which triggers credentials prompt) |
|
248 | ; ret_code directly, use 401 initially (Which triggers credentials prompt) | |
249 | ; and then serve auth_ret_code to clients |
|
249 | ; and then serve auth_ret_code to clients | |
250 | auth_ret_code_detection = false |
|
250 | auth_ret_code_detection = false | |
251 |
|
251 | |||
252 | ; locking return code. When repository is locked return this HTTP code. 2XX |
|
252 | ; locking return code. When repository is locked return this HTTP code. 2XX | |
253 | ; codes don't break the transactions while 4XX codes do |
|
253 | ; codes don't break the transactions while 4XX codes do | |
254 | lock_ret_code = 423 |
|
254 | lock_ret_code = 423 | |
255 |
|
255 | |||
256 | ; allows to change the repository location in settings page |
|
256 | ; allows to change the repository location in settings page | |
257 | allow_repo_location_change = true |
|
257 | allow_repo_location_change = true | |
258 |
|
258 | |||
259 | ; allows to setup custom hooks in settings page |
|
259 | ; allows to setup custom hooks in settings page | |
260 | allow_custom_hooks_settings = true |
|
260 | allow_custom_hooks_settings = true | |
261 |
|
261 | |||
262 | ## generated license token, goto license page in RhodeCode settings to obtain |
|
262 | ## generated license token, goto license page in RhodeCode settings to obtain | |
263 | ## new token |
|
263 | ## new token | |
264 | license_token = abra-cada-bra1-rce3 |
|
264 | license_token = abra-cada-bra1-rce3 | |
265 |
|
265 | |||
266 | ## supervisor connection uri, for managing supervisor and logs. |
|
266 | ## supervisor connection uri, for managing supervisor and logs. | |
267 | supervisor.uri = |
|
267 | supervisor.uri = | |
268 | ## supervisord group name/id we only want this RC instance to handle |
|
268 | ## supervisord group name/id we only want this RC instance to handle | |
269 | supervisor.group_id = dev |
|
269 | supervisor.group_id = dev | |
270 |
|
270 | |||
271 | ## Display extended labs settings |
|
271 | ## Display extended labs settings | |
272 | labs_settings_active = true |
|
272 | labs_settings_active = true | |
273 |
|
273 | |||
274 | ; Custom exception store path, defaults to TMPDIR |
|
274 | ; Custom exception store path, defaults to TMPDIR | |
275 | ; This is used to store exception from RhodeCode in shared directory |
|
275 | ; This is used to store exception from RhodeCode in shared directory | |
276 | #exception_tracker.store_path = |
|
276 | #exception_tracker.store_path = | |
277 |
|
277 | |||
278 | ; Send email with exception details when it happens |
|
278 | ; Send email with exception details when it happens | |
279 | #exception_tracker.send_email = false |
|
279 | #exception_tracker.send_email = false | |
280 |
|
280 | |||
281 | ; Comma separated list of recipients for exception emails, |
|
281 | ; Comma separated list of recipients for exception emails, | |
282 | ; e.g admin@rhodecode.com,devops@rhodecode.com |
|
282 | ; e.g admin@rhodecode.com,devops@rhodecode.com | |
283 | ; Can be left empty, then emails will be sent to ALL super-admins |
|
283 | ; Can be left empty, then emails will be sent to ALL super-admins | |
284 | #exception_tracker.send_email_recipients = |
|
284 | #exception_tracker.send_email_recipients = | |
285 |
|
285 | |||
286 | ; optional prefix to Add to email Subject |
|
286 | ; optional prefix to Add to email Subject | |
287 | #exception_tracker.email_prefix = [RHODECODE ERROR] |
|
287 | #exception_tracker.email_prefix = [RHODECODE ERROR] | |
288 |
|
288 | |||
289 | ; File store configuration. This is used to store and serve uploaded files |
|
289 | ; File store configuration. This is used to store and serve uploaded files | |
290 | file_store.enabled = true |
|
290 | file_store.enabled = true | |
291 |
|
291 | |||
292 | ; Storage backend, available options are: local |
|
292 | ; Storage backend, available options are: local | |
293 | file_store.backend = local |
|
293 | file_store.backend = local | |
294 |
|
294 | |||
295 | ; path to store the uploaded binaries |
|
295 | ; path to store the uploaded binaries | |
296 | file_store.storage_path = %(here)s/data/file_store |
|
296 | file_store.storage_path = %(here)s/data/file_store | |
297 |
|
297 | |||
298 |
|
298 | |||
299 | ; ############# |
|
299 | ; ############# | |
300 | ; CELERY CONFIG |
|
300 | ; CELERY CONFIG | |
301 | ; ############# |
|
301 | ; ############# | |
302 |
|
302 | |||
303 | ; manually run celery: /path/to/celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini |
|
303 | ; manually run celery: /path/to/celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini | |
304 |
|
304 | |||
305 | use_celery = false |
|
305 | use_celery = false | |
306 |
|
306 | |||
307 | ; path to store schedule database |
|
307 | ; path to store schedule database | |
308 | #celerybeat-schedule.path = |
|
308 | #celerybeat-schedule.path = | |
309 |
|
309 | |||
310 | ; connection url to the message broker (default redis) |
|
310 | ; connection url to the message broker (default redis) | |
311 | celery.broker_url = redis://localhost:6379/8 |
|
311 | celery.broker_url = redis://localhost:6379/8 | |
312 |
|
312 | |||
313 | ; rabbitmq example |
|
313 | ; rabbitmq example | |
314 | #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost |
|
314 | #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost | |
315 |
|
315 | |||
316 | ; maximum tasks to execute before worker restart |
|
316 | ; maximum tasks to execute before worker restart | |
317 | celery.max_tasks_per_child = 20 |
|
317 | celery.max_tasks_per_child = 20 | |
318 |
|
318 | |||
319 | ; tasks will never be sent to the queue, but executed locally instead. |
|
319 | ; tasks will never be sent to the queue, but executed locally instead. | |
320 | celery.task_always_eager = false |
|
320 | celery.task_always_eager = false | |
321 |
|
321 | |||
322 | ; ############# |
|
322 | ; ############# | |
323 | ; DOGPILE CACHE |
|
323 | ; DOGPILE CACHE | |
324 | ; ############# |
|
324 | ; ############# | |
325 |
|
325 | |||
326 | ; Default cache dir for caches. Putting this into a ramdisk can boost performance. |
|
326 | ; Default cache dir for caches. Putting this into a ramdisk can boost performance. | |
327 | ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space |
|
327 | ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space | |
328 | cache_dir = %(here)s/data |
|
328 | cache_dir = %(here)s/data | |
329 |
|
329 | |||
330 | ## locking and default file storage for Beaker. Putting this into a ramdisk |
|
330 | ## locking and default file storage for Beaker. Putting this into a ramdisk | |
331 | ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data |
|
331 | ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data | |
332 | beaker.cache.data_dir = %(here)s/rc/data/cache/beaker_data |
|
332 | beaker.cache.data_dir = %(here)s/rc/data/cache/beaker_data | |
333 | beaker.cache.lock_dir = %(here)s/rc/data/cache/beaker_lock |
|
333 | beaker.cache.lock_dir = %(here)s/rc/data/cache/beaker_lock | |
334 |
|
334 | |||
335 | beaker.cache.regions = long_term |
|
335 | beaker.cache.regions = long_term | |
336 |
|
336 | |||
337 | beaker.cache.long_term.type = memory |
|
337 | beaker.cache.long_term.type = memory | |
338 | beaker.cache.long_term.expire = 36000 |
|
338 | beaker.cache.long_term.expire = 36000 | |
339 | beaker.cache.long_term.key_length = 256 |
|
339 | beaker.cache.long_term.key_length = 256 | |
340 |
|
340 | |||
341 |
|
341 | |||
342 | ##################################### |
|
342 | ##################################### | |
343 | ### DOGPILE CACHE #### |
|
343 | ### DOGPILE CACHE #### | |
344 | ##################################### |
|
344 | ##################################### | |
345 |
|
345 | |||
346 | ## permission tree cache settings |
|
346 | ## permission tree cache settings | |
347 | rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace |
|
347 | rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace | |
348 | rc_cache.cache_perms.expiration_time = 0 |
|
348 | rc_cache.cache_perms.expiration_time = 0 | |
349 | rc_cache.cache_perms.arguments.filename = /tmp/rc_cache_1 |
|
349 | rc_cache.cache_perms.arguments.filename = /tmp/rc_cache_1 | |
350 |
|
350 | |||
351 |
|
351 | |||
352 | ## cache settings for SQL queries |
|
352 | ## cache settings for SQL queries | |
353 | rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru |
|
353 | rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru | |
354 | rc_cache.sql_cache_short.expiration_time = 0 |
|
354 | rc_cache.sql_cache_short.expiration_time = 0 | |
355 |
|
355 | |||
356 |
|
356 | |||
357 | ; ############## |
|
357 | ; ############## | |
358 | ; BEAKER SESSION |
|
358 | ; BEAKER SESSION | |
359 | ; ############## |
|
359 | ; ############## | |
360 |
|
360 | |||
361 | ; beaker.session.type is type of storage options for the logged users sessions. Current allowed |
|
361 | ; beaker.session.type is type of storage options for the logged users sessions. Current allowed | |
362 | ; types are file, ext:redis, ext:database, ext:memcached, and memory (default if not specified). |
|
362 | ; types are file, ext:redis, ext:database, ext:memcached, and memory (default if not specified). | |
363 | ; Fastest ones are Redis and ext:database |
|
363 | ; Fastest ones are Redis and ext:database | |
364 | beaker.session.type = file |
|
364 | beaker.session.type = file | |
365 | beaker.session.data_dir = %(here)s/rc/data/sessions/data |
|
365 | beaker.session.data_dir = %(here)s/rc/data/sessions/data | |
366 |
|
366 | |||
367 | ; Redis based sessions |
|
367 | ; Redis based sessions | |
368 | #beaker.session.type = ext:redis |
|
368 | #beaker.session.type = ext:redis | |
369 | #beaker.session.url = redis://127.0.0.1:6379/2 |
|
369 | #beaker.session.url = redis://127.0.0.1:6379/2 | |
370 |
|
370 | |||
371 | ; DB based session, fast, and allows easy management over logged in users |
|
371 | ; DB based session, fast, and allows easy management over logged in users | |
372 | #beaker.session.type = ext:database |
|
372 | #beaker.session.type = ext:database | |
373 | #beaker.session.table_name = db_session |
|
373 | #beaker.session.table_name = db_session | |
374 | #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode |
|
374 | #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode | |
375 | #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode |
|
375 | #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode | |
376 | #beaker.session.sa.pool_recycle = 3600 |
|
376 | #beaker.session.sa.pool_recycle = 3600 | |
377 | #beaker.session.sa.echo = false |
|
377 | #beaker.session.sa.echo = false | |
378 |
|
378 | |||
379 | beaker.session.key = rhodecode |
|
379 | beaker.session.key = rhodecode | |
380 | beaker.session.secret = test-rc-uytcxaz |
|
380 | beaker.session.secret = test-rc-uytcxaz | |
381 | beaker.session.lock_dir = %(here)s/rc/data/sessions/lock |
|
381 | beaker.session.lock_dir = %(here)s/rc/data/sessions/lock | |
382 |
|
382 | |||
383 | ; Secure encrypted cookie. Requires AES and AES python libraries |
|
383 | ; Secure encrypted cookie. Requires AES and AES python libraries | |
384 | ; you must disable beaker.session.secret to use this |
|
384 | ; you must disable beaker.session.secret to use this | |
385 | #beaker.session.encrypt_key = key_for_encryption |
|
385 | #beaker.session.encrypt_key = key_for_encryption | |
386 | #beaker.session.validate_key = validation_key |
|
386 | #beaker.session.validate_key = validation_key | |
387 |
|
387 | |||
388 | ; Sets session as invalid (also logging out user) if it haven not been |
|
388 | ; Sets session as invalid (also logging out user) if it haven not been | |
389 | ; accessed for given amount of time in seconds |
|
389 | ; accessed for given amount of time in seconds | |
390 | beaker.session.timeout = 2592000 |
|
390 | beaker.session.timeout = 2592000 | |
391 | beaker.session.httponly = true |
|
391 | beaker.session.httponly = true | |
392 |
|
392 | |||
393 | ; Path to use for the cookie. Set to prefix if you use prefix middleware |
|
393 | ; Path to use for the cookie. Set to prefix if you use prefix middleware | |
394 | #beaker.session.cookie_path = /custom_prefix |
|
394 | #beaker.session.cookie_path = /custom_prefix | |
395 |
|
395 | |||
396 | ; Set https secure cookie |
|
396 | ; Set https secure cookie | |
397 | beaker.session.secure = false |
|
397 | beaker.session.secure = false | |
398 |
|
398 | |||
399 | ## auto save the session to not to use .save() |
|
399 | ## auto save the session to not to use .save() | |
400 | beaker.session.auto = false |
|
400 | beaker.session.auto = false | |
401 |
|
401 | |||
402 | ; default cookie expiration time in seconds, set to `true` to set expire |
|
402 | ; default cookie expiration time in seconds, set to `true` to set expire | |
403 | ; at browser close |
|
403 | ; at browser close | |
404 | #beaker.session.cookie_expires = 3600 |
|
404 | #beaker.session.cookie_expires = 3600 | |
405 |
|
405 | |||
406 | ; ############################# |
|
406 | ; ############################# | |
407 | ; SEARCH INDEXING CONFIGURATION |
|
407 | ; SEARCH INDEXING CONFIGURATION | |
408 | ; ############################# |
|
408 | ; ############################# | |
409 |
|
409 | |||
410 | ; Full text search indexer is available in rhodecode-tools under |
|
410 | ; Full text search indexer is available in rhodecode-tools under | |
411 | ; `rhodecode-tools index` command |
|
411 | ; `rhodecode-tools index` command | |
412 |
|
412 | |||
413 | ; WHOOSH Backend, doesn't require additional services to run |
|
413 | ; WHOOSH Backend, doesn't require additional services to run | |
414 | ; it works good with few dozen repos |
|
414 | ; it works good with few dozen repos | |
415 | search.module = rhodecode.lib.index.whoosh |
|
415 | search.module = rhodecode.lib.index.whoosh | |
416 | search.location = %(here)s/data/index |
|
416 | search.location = %(here)s/data/index | |
417 |
|
417 | |||
418 | ; #################### |
|
418 | ; #################### | |
419 | ; CHANNELSTREAM CONFIG |
|
419 | ; CHANNELSTREAM CONFIG | |
420 | ; #################### |
|
420 | ; #################### | |
421 |
|
421 | |||
422 | ; channelstream enables persistent connections and live notification |
|
422 | ; channelstream enables persistent connections and live notification | |
423 | ; in the system. It's also used by the chat system |
|
423 | ; in the system. It's also used by the chat system | |
424 |
|
424 | |||
425 | channelstream.enabled = false |
|
425 | channelstream.enabled = false | |
426 |
|
426 | |||
427 | ; server address for channelstream server on the backend |
|
427 | ; server address for channelstream server on the backend | |
428 | channelstream.server = 127.0.0.1:9800 |
|
428 | channelstream.server = 127.0.0.1:9800 | |
429 |
|
429 | |||
430 | ; location of the channelstream server from outside world |
|
430 | ; location of the channelstream server from outside world | |
431 | ; use ws:// for http or wss:// for https. This address needs to be handled |
|
431 | ; use ws:// for http or wss:// for https. This address needs to be handled | |
432 | ; by external HTTP server such as Nginx or Apache |
|
432 | ; by external HTTP server such as Nginx or Apache | |
433 | ; see Nginx/Apache configuration examples in our docs |
|
433 | ; see Nginx/Apache configuration examples in our docs | |
434 | channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream |
|
434 | channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream | |
435 | channelstream.secret = secret |
|
435 | channelstream.secret = secret | |
436 | channelstream.history.location = %(here)s/channelstream_history |
|
436 | channelstream.history.location = %(here)s/channelstream_history | |
437 |
|
437 | |||
438 | ; Internal application path that Javascript uses to connect into. |
|
438 | ; Internal application path that Javascript uses to connect into. | |
439 | ; If you use proxy-prefix the prefix should be added before /_channelstream |
|
439 | ; If you use proxy-prefix the prefix should be added before /_channelstream | |
440 | channelstream.proxy_path = /_channelstream |
|
440 | channelstream.proxy_path = /_channelstream | |
441 |
|
441 | |||
442 |
|
442 | |||
443 | ; ############################## |
|
443 | ; ############################## | |
444 | ; MAIN RHODECODE DATABASE CONFIG |
|
444 | ; MAIN RHODECODE DATABASE CONFIG | |
445 | ; ############################## |
|
445 | ; ############################## | |
446 |
|
446 | |||
447 | #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30 |
|
447 | #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30 | |
448 | #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode |
|
448 | #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode | |
449 | #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8 |
|
449 | #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8 | |
450 | ; pymysql is an alternative driver for MySQL, use in case of problems with default one |
|
450 | ; pymysql is an alternative driver for MySQL, use in case of problems with default one | |
451 | #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode |
|
451 | #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode | |
452 |
|
452 | |||
453 | sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode_test.db?timeout=30 |
|
453 | sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode_test.db?timeout=30 | |
454 |
|
454 | |||
455 | ; see sqlalchemy docs for other advanced settings |
|
455 | ; see sqlalchemy docs for other advanced settings | |
456 | ; print the sql statements to output |
|
456 | ; print the sql statements to output | |
457 | sqlalchemy.db1.echo = false |
|
457 | sqlalchemy.db1.echo = false | |
458 |
|
458 | |||
459 | ; recycle the connections after this amount of seconds |
|
459 | ; recycle the connections after this amount of seconds | |
460 | sqlalchemy.db1.pool_recycle = 3600 |
|
460 | sqlalchemy.db1.pool_recycle = 3600 | |
461 | sqlalchemy.db1.convert_unicode = true |
|
|||
462 |
|
461 | |||
463 | ; the number of connections to keep open inside the connection pool. |
|
462 | ; the number of connections to keep open inside the connection pool. | |
464 | ; 0 indicates no limit |
|
463 | ; 0 indicates no limit | |
465 | #sqlalchemy.db1.pool_size = 5 |
|
464 | #sqlalchemy.db1.pool_size = 5 | |
466 |
|
465 | |||
467 | ; The number of connections to allow in connection pool "overflow", that is |
|
466 | ; The number of connections to allow in connection pool "overflow", that is | |
468 | ; connections that can be opened above and beyond the pool_size setting, |
|
467 | ; connections that can be opened above and beyond the pool_size setting, | |
469 | ; which defaults to five. |
|
468 | ; which defaults to five. | |
470 | #sqlalchemy.db1.max_overflow = 10 |
|
469 | #sqlalchemy.db1.max_overflow = 10 | |
471 |
|
470 | |||
472 | ; Connection check ping, used to detect broken database connections |
|
471 | ; Connection check ping, used to detect broken database connections | |
473 | ; could be enabled to better handle cases if MySQL has gone away errors |
|
472 | ; could be enabled to better handle cases if MySQL has gone away errors | |
474 | #sqlalchemy.db1.ping_connection = true |
|
473 | #sqlalchemy.db1.ping_connection = true | |
475 |
|
474 | |||
476 | ; ########## |
|
475 | ; ########## | |
477 | ; VCS CONFIG |
|
476 | ; VCS CONFIG | |
478 | ; ########## |
|
477 | ; ########## | |
479 | vcs.server.enable = true |
|
478 | vcs.server.enable = true | |
480 | vcs.server = localhost:9901 |
|
479 | vcs.server = localhost:9901 | |
481 |
|
480 | |||
482 | ; Web server connectivity protocol, responsible for web based VCS operations |
|
481 | ; Web server connectivity protocol, responsible for web based VCS operations | |
483 | ; Available protocols are: |
|
482 | ; Available protocols are: | |
484 | ; `http` - use http-rpc backend (default) |
|
483 | ; `http` - use http-rpc backend (default) | |
485 | vcs.server.protocol = http |
|
484 | vcs.server.protocol = http | |
486 |
|
485 | |||
487 | ; Push/Pull operations protocol, available options are: |
|
486 | ; Push/Pull operations protocol, available options are: | |
488 | ; `http` - use http-rpc backend (default) |
|
487 | ; `http` - use http-rpc backend (default) | |
489 | vcs.scm_app_implementation = http |
|
488 | vcs.scm_app_implementation = http | |
490 |
|
489 | |||
491 | ; Push/Pull operations hooks protocol, available options are: |
|
490 | ; Push/Pull operations hooks protocol, available options are: | |
492 | ; `http` - use http-rpc backend (default) |
|
491 | ; `http` - use http-rpc backend (default) | |
493 | vcs.hooks.protocol = http |
|
492 | vcs.hooks.protocol = http | |
494 |
|
493 | |||
495 | ; Host on which this instance is listening for hooks. If vcsserver is in other location |
|
494 | ; Host on which this instance is listening for hooks. If vcsserver is in other location | |
496 | ; this should be adjusted. |
|
495 | ; this should be adjusted. | |
497 | vcs.hooks.host = * |
|
496 | vcs.hooks.host = * | |
498 |
|
497 | |||
499 | ; Start VCSServer with this instance as a subprocess, useful for development |
|
498 | ; Start VCSServer with this instance as a subprocess, useful for development | |
500 | vcs.start_server = false |
|
499 | vcs.start_server = false | |
501 |
|
500 | |||
502 | ; List of enabled VCS backends, available options are: |
|
501 | ; List of enabled VCS backends, available options are: | |
503 | ; `hg` - mercurial |
|
502 | ; `hg` - mercurial | |
504 | ; `git` - git |
|
503 | ; `git` - git | |
505 | ; `svn` - subversion |
|
504 | ; `svn` - subversion | |
506 | vcs.backends = hg, git, svn |
|
505 | vcs.backends = hg, git, svn | |
507 |
|
506 | |||
508 | ; Wait this number of seconds before killing connection to the vcsserver |
|
507 | ; Wait this number of seconds before killing connection to the vcsserver | |
509 | vcs.connection_timeout = 3600 |
|
508 | vcs.connection_timeout = 3600 | |
510 |
|
509 | |||
511 | ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out. |
|
510 | ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out. | |
512 | ; Set a numeric version for your current SVN e.g 1.8, or 1.12 |
|
511 | ; Set a numeric version for your current SVN e.g 1.8, or 1.12 | |
513 | ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible |
|
512 | ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible | |
514 | #vcs.svn.compatible_version = 1.8 |
|
513 | #vcs.svn.compatible_version = 1.8 | |
515 |
|
514 | |||
516 | ; Cache flag to cache vcsserver remote calls locally |
|
515 | ; Cache flag to cache vcsserver remote calls locally | |
517 | ; It uses cache_region `cache_repo` |
|
516 | ; It uses cache_region `cache_repo` | |
518 | vcs.methods.cache = false |
|
517 | vcs.methods.cache = false | |
519 |
|
518 | |||
520 | ; #################################################### |
|
519 | ; #################################################### | |
521 | ; Subversion proxy support (mod_dav_svn) |
|
520 | ; Subversion proxy support (mod_dav_svn) | |
522 | ; Maps RhodeCode repo groups into SVN paths for Apache |
|
521 | ; Maps RhodeCode repo groups into SVN paths for Apache | |
523 | ; #################################################### |
|
522 | ; #################################################### | |
524 |
|
523 | |||
525 | ; Enable or disable the config file generation. |
|
524 | ; Enable or disable the config file generation. | |
526 | svn.proxy.generate_config = false |
|
525 | svn.proxy.generate_config = false | |
527 |
|
526 | |||
528 | ; Generate config file with `SVNListParentPath` set to `On`. |
|
527 | ; Generate config file with `SVNListParentPath` set to `On`. | |
529 | svn.proxy.list_parent_path = true |
|
528 | svn.proxy.list_parent_path = true | |
530 |
|
529 | |||
531 | ; Set location and file name of generated config file. |
|
530 | ; Set location and file name of generated config file. | |
532 | svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf |
|
531 | svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf | |
533 |
|
532 | |||
534 | ; alternative mod_dav config template. This needs to be a valid mako template |
|
533 | ; alternative mod_dav config template. This needs to be a valid mako template | |
535 | ; Example template can be found in the source code: |
|
534 | ; Example template can be found in the source code: | |
536 | ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako |
|
535 | ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako | |
537 | #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako |
|
536 | #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako | |
538 |
|
537 | |||
539 | ; Used as a prefix to the `Location` block in the generated config file. |
|
538 | ; Used as a prefix to the `Location` block in the generated config file. | |
540 | ; In most cases it should be set to `/`. |
|
539 | ; In most cases it should be set to `/`. | |
541 | svn.proxy.location_root = / |
|
540 | svn.proxy.location_root = / | |
542 |
|
541 | |||
543 | ; Command to reload the mod dav svn configuration on change. |
|
542 | ; Command to reload the mod dav svn configuration on change. | |
544 | ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh |
|
543 | ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh | |
545 | ; Make sure user who runs RhodeCode process is allowed to reload Apache |
|
544 | ; Make sure user who runs RhodeCode process is allowed to reload Apache | |
546 | #svn.proxy.reload_cmd = /etc/init.d/apache2 reload |
|
545 | #svn.proxy.reload_cmd = /etc/init.d/apache2 reload | |
547 |
|
546 | |||
548 | ; If the timeout expires before the reload command finishes, the command will |
|
547 | ; If the timeout expires before the reload command finishes, the command will | |
549 | ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds. |
|
548 | ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds. | |
550 | #svn.proxy.reload_timeout = 10 |
|
549 | #svn.proxy.reload_timeout = 10 | |
551 |
|
550 | |||
552 | ; #################### |
|
551 | ; #################### | |
553 | ; SSH Support Settings |
|
552 | ; SSH Support Settings | |
554 | ; #################### |
|
553 | ; #################### | |
555 |
|
554 | |||
556 | ; Defines if a custom authorized_keys file should be created and written on |
|
555 | ; Defines if a custom authorized_keys file should be created and written on | |
557 | ; any change user ssh keys. Setting this to false also disables possibility |
|
556 | ; any change user ssh keys. Setting this to false also disables possibility | |
558 | ; of adding SSH keys by users from web interface. Super admins can still |
|
557 | ; of adding SSH keys by users from web interface. Super admins can still | |
559 | ; manage SSH Keys. |
|
558 | ; manage SSH Keys. | |
560 | ssh.generate_authorized_keyfile = true |
|
559 | ssh.generate_authorized_keyfile = true | |
561 |
|
560 | |||
562 | ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding` |
|
561 | ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding` | |
563 | # ssh.authorized_keys_ssh_opts = |
|
562 | # ssh.authorized_keys_ssh_opts = | |
564 |
|
563 | |||
565 | ; Path to the authorized_keys file where the generate entries are placed. |
|
564 | ; Path to the authorized_keys file where the generate entries are placed. | |
566 | ; It is possible to have multiple key files specified in `sshd_config` e.g. |
|
565 | ; It is possible to have multiple key files specified in `sshd_config` e.g. | |
567 | ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode |
|
566 | ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode | |
568 | ssh.authorized_keys_file_path = %(here)s/rc/authorized_keys_rhodecode |
|
567 | ssh.authorized_keys_file_path = %(here)s/rc/authorized_keys_rhodecode | |
569 |
|
568 | |||
570 | ; Command to execute the SSH wrapper. The binary is available in the |
|
569 | ; Command to execute the SSH wrapper. The binary is available in the | |
571 | ; RhodeCode installation directory. |
|
570 | ; RhodeCode installation directory. | |
572 | ; e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper |
|
571 | ; e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper | |
573 | ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper |
|
572 | ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper | |
574 |
|
573 | |||
575 | ; Allow shell when executing the ssh-wrapper command |
|
574 | ; Allow shell when executing the ssh-wrapper command | |
576 | ssh.wrapper_cmd_allow_shell = false |
|
575 | ssh.wrapper_cmd_allow_shell = false | |
577 |
|
576 | |||
578 | ; Enables logging, and detailed output send back to the client during SSH |
|
577 | ; Enables logging, and detailed output send back to the client during SSH | |
579 | ; operations. Useful for debugging, shouldn't be used in production. |
|
578 | ; operations. Useful for debugging, shouldn't be used in production. | |
580 | ssh.enable_debug_logging = false |
|
579 | ssh.enable_debug_logging = false | |
581 |
|
580 | |||
582 | ; Paths to binary executable, by default they are the names, but we can |
|
581 | ; Paths to binary executable, by default they are the names, but we can | |
583 | ; override them if we want to use a custom one |
|
582 | ; override them if we want to use a custom one | |
584 | ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg |
|
583 | ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg | |
585 | ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git |
|
584 | ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git | |
586 | ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve |
|
585 | ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve | |
587 |
|
586 | |||
588 | ; Enables SSH key generator web interface. Disabling this still allows users |
|
587 | ; Enables SSH key generator web interface. Disabling this still allows users | |
589 | ; to add their own keys. |
|
588 | ; to add their own keys. | |
590 | ssh.enable_ui_key_generator = true |
|
589 | ssh.enable_ui_key_generator = true | |
591 |
|
590 | |||
592 | ; Statsd client config, this is used to send metrics to statsd |
|
591 | ; Statsd client config, this is used to send metrics to statsd | |
593 | ; We recommend setting statsd_exported and scrape them using Promethues |
|
592 | ; We recommend setting statsd_exported and scrape them using Promethues | |
594 | #statsd.enabled = false |
|
593 | #statsd.enabled = false | |
595 | #statsd.statsd_host = 0.0.0.0 |
|
594 | #statsd.statsd_host = 0.0.0.0 | |
596 | #statsd.statsd_port = 8125 |
|
595 | #statsd.statsd_port = 8125 | |
597 | #statsd.statsd_prefix = |
|
596 | #statsd.statsd_prefix = | |
598 | #statsd.statsd_ipv6 = false |
|
597 | #statsd.statsd_ipv6 = false | |
599 |
|
598 | |||
600 | ; configure logging automatically at server startup set to false |
|
599 | ; configure logging automatically at server startup set to false | |
601 | ; to use the below custom logging config. |
|
600 | ; to use the below custom logging config. | |
602 | ; RC_LOGGING_FORMATTER |
|
601 | ; RC_LOGGING_FORMATTER | |
603 | ; RC_LOGGING_LEVEL |
|
602 | ; RC_LOGGING_LEVEL | |
604 | ; env variables can control the settings for logging in case of autoconfigure |
|
603 | ; env variables can control the settings for logging in case of autoconfigure | |
605 |
|
604 | |||
606 | logging.autoconfigure = false |
|
605 | logging.autoconfigure = false | |
607 |
|
606 | |||
608 | ; specify your own custom logging config file to configure logging |
|
607 | ; specify your own custom logging config file to configure logging | |
609 | #logging.logging_conf_file = /path/to/custom_logging.ini |
|
608 | #logging.logging_conf_file = /path/to/custom_logging.ini | |
610 |
|
609 | |||
611 | ; Dummy marker to add new entries after. |
|
610 | ; Dummy marker to add new entries after. | |
612 | ; Add any custom entries below. Please don't remove this marker. |
|
611 | ; Add any custom entries below. Please don't remove this marker. | |
613 | custom.conf = 1 |
|
612 | custom.conf = 1 | |
614 |
|
613 | |||
615 |
|
614 | |||
616 | ; ##################### |
|
615 | ; ##################### | |
617 | ; LOGGING CONFIGURATION |
|
616 | ; LOGGING CONFIGURATION | |
618 | ; ##################### |
|
617 | ; ##################### | |
619 |
|
618 | |||
620 | [loggers] |
|
619 | [loggers] | |
621 | keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper |
|
620 | keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper | |
622 |
|
621 | |||
623 | [handlers] |
|
622 | [handlers] | |
624 | keys = console, console_sql |
|
623 | keys = console, console_sql | |
625 |
|
624 | |||
626 | [formatters] |
|
625 | [formatters] | |
627 | keys = generic, json, color_formatter, color_formatter_sql |
|
626 | keys = generic, json, color_formatter, color_formatter_sql | |
628 |
|
627 | |||
629 | ; ####### |
|
628 | ; ####### | |
630 | ; LOGGERS |
|
629 | ; LOGGERS | |
631 | ; ####### |
|
630 | ; ####### | |
632 | [logger_root] |
|
631 | [logger_root] | |
633 | level = NOTSET |
|
632 | level = NOTSET | |
634 | handlers = console |
|
633 | handlers = console | |
635 |
|
634 | |||
636 | [logger_routes] |
|
635 | [logger_routes] | |
637 | level = DEBUG |
|
636 | level = DEBUG | |
638 | handlers = |
|
637 | handlers = | |
639 | qualname = routes.middleware |
|
638 | qualname = routes.middleware | |
640 | ## "level = DEBUG" logs the route matched and routing variables. |
|
639 | ## "level = DEBUG" logs the route matched and routing variables. | |
641 | propagate = 1 |
|
640 | propagate = 1 | |
642 |
|
641 | |||
643 | [logger_sqlalchemy] |
|
642 | [logger_sqlalchemy] | |
644 | level = INFO |
|
643 | level = INFO | |
645 | handlers = console_sql |
|
644 | handlers = console_sql | |
646 | qualname = sqlalchemy.engine |
|
645 | qualname = sqlalchemy.engine | |
647 | propagate = 0 |
|
646 | propagate = 0 | |
648 |
|
647 | |||
649 | [logger_beaker] |
|
648 | [logger_beaker] | |
650 | level = DEBUG |
|
649 | level = DEBUG | |
651 | handlers = |
|
650 | handlers = | |
652 | qualname = beaker.container |
|
651 | qualname = beaker.container | |
653 | propagate = 1 |
|
652 | propagate = 1 | |
654 |
|
653 | |||
655 | [logger_rhodecode] |
|
654 | [logger_rhodecode] | |
656 | level = DEBUG |
|
655 | level = DEBUG | |
657 | handlers = |
|
656 | handlers = | |
658 | qualname = rhodecode |
|
657 | qualname = rhodecode | |
659 | propagate = 1 |
|
658 | propagate = 1 | |
660 |
|
659 | |||
661 | [logger_ssh_wrapper] |
|
660 | [logger_ssh_wrapper] | |
662 | level = DEBUG |
|
661 | level = DEBUG | |
663 | handlers = |
|
662 | handlers = | |
664 | qualname = ssh_wrapper |
|
663 | qualname = ssh_wrapper | |
665 | propagate = 1 |
|
664 | propagate = 1 | |
666 |
|
665 | |||
667 | [logger_celery] |
|
666 | [logger_celery] | |
668 | level = DEBUG |
|
667 | level = DEBUG | |
669 | handlers = |
|
668 | handlers = | |
670 | qualname = celery |
|
669 | qualname = celery | |
671 |
|
670 | |||
672 |
|
671 | |||
673 | ; ######## |
|
672 | ; ######## | |
674 | ; HANDLERS |
|
673 | ; HANDLERS | |
675 | ; ######## |
|
674 | ; ######## | |
676 |
|
675 | |||
677 | [handler_console] |
|
676 | [handler_console] | |
678 | class = StreamHandler |
|
677 | class = StreamHandler | |
679 | args = (sys.stderr, ) |
|
678 | args = (sys.stderr, ) | |
680 | level = DEBUG |
|
679 | level = DEBUG | |
681 | ; To enable JSON formatted logs replace 'generic/color_formatter' with 'json' |
|
680 | ; To enable JSON formatted logs replace 'generic/color_formatter' with 'json' | |
682 | ; This allows sending properly formatted logs to grafana loki or elasticsearch |
|
681 | ; This allows sending properly formatted logs to grafana loki or elasticsearch | |
683 | formatter = generic |
|
682 | formatter = generic | |
684 |
|
683 | |||
685 | [handler_console_sql] |
|
684 | [handler_console_sql] | |
686 | ; "level = DEBUG" logs SQL queries and results. |
|
685 | ; "level = DEBUG" logs SQL queries and results. | |
687 | ; "level = INFO" logs SQL queries. |
|
686 | ; "level = INFO" logs SQL queries. | |
688 | ; "level = WARN" logs neither. (Recommended for production systems.) |
|
687 | ; "level = WARN" logs neither. (Recommended for production systems.) | |
689 | class = StreamHandler |
|
688 | class = StreamHandler | |
690 | args = (sys.stderr, ) |
|
689 | args = (sys.stderr, ) | |
691 | level = WARN |
|
690 | level = WARN | |
692 | ; To enable JSON formatted logs replace 'generic/color_formatter_sql' with 'json' |
|
691 | ; To enable JSON formatted logs replace 'generic/color_formatter_sql' with 'json' | |
693 | ; This allows sending properly formatted logs to grafana loki or elasticsearch |
|
692 | ; This allows sending properly formatted logs to grafana loki or elasticsearch | |
694 | formatter = generic |
|
693 | formatter = generic | |
695 |
|
694 | |||
696 | ; ########## |
|
695 | ; ########## | |
697 | ; FORMATTERS |
|
696 | ; FORMATTERS | |
698 | ; ########## |
|
697 | ; ########## | |
699 |
|
698 | |||
700 | [formatter_generic] |
|
699 | [formatter_generic] | |
701 | class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter |
|
700 | class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter | |
702 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s |
|
701 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s | |
703 | datefmt = %Y-%m-%d %H:%M:%S |
|
702 | datefmt = %Y-%m-%d %H:%M:%S | |
704 |
|
703 | |||
705 | [formatter_color_formatter] |
|
704 | [formatter_color_formatter] | |
706 | class = rhodecode.lib.logging_formatter.ColorFormatter |
|
705 | class = rhodecode.lib.logging_formatter.ColorFormatter | |
707 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s |
|
706 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s | |
708 | datefmt = %Y-%m-%d %H:%M:%S |
|
707 | datefmt = %Y-%m-%d %H:%M:%S | |
709 |
|
708 | |||
710 | [formatter_color_formatter_sql] |
|
709 | [formatter_color_formatter_sql] | |
711 | class = rhodecode.lib.logging_formatter.ColorFormatterSql |
|
710 | class = rhodecode.lib.logging_formatter.ColorFormatterSql | |
712 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s |
|
711 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s | |
713 | datefmt = %Y-%m-%d %H:%M:%S |
|
712 | datefmt = %Y-%m-%d %H:%M:%S | |
714 |
|
713 | |||
715 | [formatter_json] |
|
714 | [formatter_json] | |
716 | format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s |
|
715 | format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s | |
717 | class = rhodecode.lib._vendor.jsonlogger.JsonFormatter |
|
716 | class = rhodecode.lib._vendor.jsonlogger.JsonFormatter |
General Comments 0
You need to be logged in to leave comments.
Login now