Show More
@@ -0,0 +1,33 b'' | |||||
|
1 | # -*- coding: utf-8 -*- | |||
|
2 | ||||
|
3 | # Copyright (C) 2016-2017 RhodeCode GmbH | |||
|
4 | # | |||
|
5 | # This program is free software: you can redistribute it and/or modify | |||
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |||
|
7 | # (only), as published by the Free Software Foundation. | |||
|
8 | # | |||
|
9 | # This program is distributed in the hope that it will be useful, | |||
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
12 | # GNU General Public License for more details. | |||
|
13 | # | |||
|
14 | # You should have received a copy of the GNU Affero General Public License | |||
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
|
16 | # | |||
|
17 | # This program is dual-licensed. If you wish to learn more about the | |||
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |||
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |||
|
20 | ||||
|
21 | ||||
|
22 | def includeme(config): | |||
|
23 | ||||
|
24 | config.add_route( | |||
|
25 | name='user_autocomplete_data', | |||
|
26 | pattern='/_users') | |||
|
27 | ||||
|
28 | config.add_route( | |||
|
29 | name='user_group_autocomplete_data', | |||
|
30 | pattern='/_user_groups') | |||
|
31 | ||||
|
32 | # Scan module for configuration decorators. | |||
|
33 | config.scan() |
@@ -0,0 +1,19 b'' | |||||
|
1 | # -*- coding: utf-8 -*- | |||
|
2 | ||||
|
3 | # Copyright (C) 2016-2017 RhodeCode GmbH | |||
|
4 | # | |||
|
5 | # This program is free software: you can redistribute it and/or modify | |||
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |||
|
7 | # (only), as published by the Free Software Foundation. | |||
|
8 | # | |||
|
9 | # This program is distributed in the hope that it will be useful, | |||
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
12 | # GNU General Public License for more details. | |||
|
13 | # | |||
|
14 | # You should have received a copy of the GNU Affero General Public License | |||
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
|
16 | # | |||
|
17 | # This program is dual-licensed. If you wish to learn more about the | |||
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |||
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
@@ -0,0 +1,112 b'' | |||||
|
1 | # -*- coding: utf-8 -*- | |||
|
2 | ||||
|
3 | # Copyright (C) 2016-2017 RhodeCode GmbH | |||
|
4 | # | |||
|
5 | # This program is free software: you can redistribute it and/or modify | |||
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |||
|
7 | # (only), as published by the Free Software Foundation. | |||
|
8 | # | |||
|
9 | # This program is distributed in the hope that it will be useful, | |||
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
12 | # GNU General Public License for more details. | |||
|
13 | # | |||
|
14 | # You should have received a copy of the GNU Affero General Public License | |||
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
|
16 | # | |||
|
17 | # This program is dual-licensed. If you wish to learn more about the | |||
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |||
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |||
|
20 | ||||
|
21 | import json | |||
|
22 | import pytest | |||
|
23 | ||||
|
24 | from rhodecode.tests import TestController | |||
|
25 | from rhodecode.tests.fixture import Fixture | |||
|
26 | ||||
|
27 | ||||
|
28 | fixture = Fixture() | |||
|
29 | ||||
|
30 | ||||
|
31 | def route_path(name, params=None, **kwargs): | |||
|
32 | import urllib | |||
|
33 | ||||
|
34 | base_url = { | |||
|
35 | 'user_autocomplete_data': '/_users', | |||
|
36 | 'user_group_autocomplete_data': '/_user_groups' | |||
|
37 | }[name].format(**kwargs) | |||
|
38 | ||||
|
39 | if params: | |||
|
40 | base_url = '{}?{}'.format(base_url, urllib.urlencode(params)) | |||
|
41 | return base_url | |||
|
42 | ||||
|
43 | ||||
|
44 | class TestUserAutocompleteData(TestController): | |||
|
45 | ||||
|
46 | def test_returns_list_of_users(self, user_util, xhr_header): | |||
|
47 | self.log_user() | |||
|
48 | user = user_util.create_user(active=True) | |||
|
49 | user_name = user.username | |||
|
50 | response = self.app.get( | |||
|
51 | route_path('user_autocomplete_data'), | |||
|
52 | extra_environ=xhr_header, status=200) | |||
|
53 | result = json.loads(response.body) | |||
|
54 | values = [suggestion['value'] for suggestion in result['suggestions']] | |||
|
55 | assert user_name in values | |||
|
56 | ||||
|
57 | def test_returns_inactive_users_when_active_flag_sent( | |||
|
58 | self, user_util, xhr_header): | |||
|
59 | self.log_user() | |||
|
60 | user = user_util.create_user(active=False) | |||
|
61 | user_name = user.username | |||
|
62 | ||||
|
63 | response = self.app.get( | |||
|
64 | route_path('user_autocomplete_data', | |||
|
65 | params=dict(user_groups='true', active='0')), | |||
|
66 | extra_environ=xhr_header, status=200) | |||
|
67 | result = json.loads(response.body) | |||
|
68 | values = [suggestion['value'] for suggestion in result['suggestions']] | |||
|
69 | assert user_name in values | |||
|
70 | ||||
|
71 | response = self.app.get( | |||
|
72 | route_path('user_autocomplete_data', | |||
|
73 | params=dict(user_groups='true', active='1')), | |||
|
74 | extra_environ=xhr_header, status=200) | |||
|
75 | result = json.loads(response.body) | |||
|
76 | values = [suggestion['value'] for suggestion in result['suggestions']] | |||
|
77 | assert user_name not in values | |||
|
78 | ||||
|
79 | def test_returns_groups_when_user_groups_flag_sent( | |||
|
80 | self, user_util, xhr_header): | |||
|
81 | self.log_user() | |||
|
82 | group = user_util.create_user_group(user_groups_active=True) | |||
|
83 | group_name = group.users_group_name | |||
|
84 | response = self.app.get( | |||
|
85 | route_path('user_autocomplete_data', | |||
|
86 | params=dict(user_groups='true')), | |||
|
87 | extra_environ=xhr_header, status=200) | |||
|
88 | result = json.loads(response.body) | |||
|
89 | values = [suggestion['value'] for suggestion in result['suggestions']] | |||
|
90 | assert group_name in values | |||
|
91 | ||||
|
92 | @pytest.mark.parametrize('query, count', [ | |||
|
93 | ('hello1', 0), | |||
|
94 | ('dev', 2), | |||
|
95 | ]) | |||
|
96 | def test_result_is_limited_when_query_is_sent(self, user_util, xhr_header, | |||
|
97 | query, count): | |||
|
98 | self.log_user() | |||
|
99 | ||||
|
100 | user_util._test_name = 'dev-test' | |||
|
101 | user_util.create_user() | |||
|
102 | ||||
|
103 | user_util._test_name = 'dev-group-test' | |||
|
104 | user_util.create_user_group() | |||
|
105 | ||||
|
106 | response = self.app.get( | |||
|
107 | route_path('user_autocomplete_data', | |||
|
108 | params=dict(user_groups='true', query=query)), | |||
|
109 | extra_environ=xhr_header, status=200) | |||
|
110 | ||||
|
111 | result = json.loads(response.body) | |||
|
112 | assert len(result['suggestions']) == count |
@@ -0,0 +1,117 b'' | |||||
|
1 | # -*- coding: utf-8 -*- | |||
|
2 | ||||
|
3 | # Copyright (C) 2016-2017 RhodeCode GmbH | |||
|
4 | # | |||
|
5 | # This program is free software: you can redistribute it and/or modify | |||
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |||
|
7 | # (only), as published by the Free Software Foundation. | |||
|
8 | # | |||
|
9 | # This program is distributed in the hope that it will be useful, | |||
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
12 | # GNU General Public License for more details. | |||
|
13 | # | |||
|
14 | # You should have received a copy of the GNU Affero General Public License | |||
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
|
16 | # | |||
|
17 | # This program is dual-licensed. If you wish to learn more about the | |||
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |||
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |||
|
20 | # -*- coding: utf-8 -*- | |||
|
21 | ||||
|
22 | # Copyright (C) 2016-2017 RhodeCode GmbH | |||
|
23 | # | |||
|
24 | # This program is free software: you can redistribute it and/or modify | |||
|
25 | # it under the terms of the GNU Affero General Public License, version 3 | |||
|
26 | # (only), as published by the Free Software Foundation. | |||
|
27 | # | |||
|
28 | # This program is distributed in the hope that it will be useful, | |||
|
29 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
30 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
31 | # GNU General Public License for more details. | |||
|
32 | # | |||
|
33 | # You should have received a copy of the GNU Affero General Public License | |||
|
34 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
|
35 | # | |||
|
36 | # This program is dual-licensed. If you wish to learn more about the | |||
|
37 | # RhodeCode Enterprise Edition, including its added features, Support services, | |||
|
38 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |||
|
39 | ||||
|
40 | import json | |||
|
41 | ||||
|
42 | import pytest | |||
|
43 | ||||
|
44 | from rhodecode.tests import TestController | |||
|
45 | from rhodecode.tests.fixture import Fixture | |||
|
46 | ||||
|
47 | ||||
|
48 | fixture = Fixture() | |||
|
49 | ||||
|
50 | ||||
|
51 | def route_path(name, params=None, **kwargs): | |||
|
52 | import urllib | |||
|
53 | ||||
|
54 | base_url = { | |||
|
55 | 'user_autocomplete_data': '/_users', | |||
|
56 | 'user_group_autocomplete_data': '/_user_groups' | |||
|
57 | }[name].format(**kwargs) | |||
|
58 | ||||
|
59 | if params: | |||
|
60 | base_url = '{}?{}'.format(base_url, urllib.urlencode(params)) | |||
|
61 | return base_url | |||
|
62 | ||||
|
63 | ||||
|
64 | class TestUserGroupAutocompleteData(TestController): | |||
|
65 | ||||
|
66 | def test_returns_list_of_user_groups(self, user_util, xhr_header): | |||
|
67 | self.log_user() | |||
|
68 | user_group = user_util.create_user_group(active=True) | |||
|
69 | user_group_name = user_group.users_group_name | |||
|
70 | response = self.app.get( | |||
|
71 | route_path('user_group_autocomplete_data'), | |||
|
72 | extra_environ=xhr_header, status=200) | |||
|
73 | result = json.loads(response.body) | |||
|
74 | values = [suggestion['value'] for suggestion in result['suggestions']] | |||
|
75 | assert user_group_name in values | |||
|
76 | ||||
|
77 | def test_returns_inactive_user_groups_when_active_flag_sent( | |||
|
78 | self, user_util, xhr_header): | |||
|
79 | self.log_user() | |||
|
80 | user_group = user_util.create_user_group(active=False) | |||
|
81 | user_group_name = user_group.users_group_name | |||
|
82 | ||||
|
83 | response = self.app.get( | |||
|
84 | route_path('user_group_autocomplete_data', | |||
|
85 | params=dict(active='0')), | |||
|
86 | extra_environ=xhr_header, status=200) | |||
|
87 | result = json.loads(response.body) | |||
|
88 | values = [suggestion['value'] for suggestion in result['suggestions']] | |||
|
89 | assert user_group_name in values | |||
|
90 | ||||
|
91 | response = self.app.get( | |||
|
92 | route_path('user_group_autocomplete_data', | |||
|
93 | params=dict(active='1')), | |||
|
94 | extra_environ=xhr_header, status=200) | |||
|
95 | result = json.loads(response.body) | |||
|
96 | values = [suggestion['value'] for suggestion in result['suggestions']] | |||
|
97 | assert user_group_name not in values | |||
|
98 | ||||
|
99 | @pytest.mark.parametrize('query, count', [ | |||
|
100 | ('hello1', 0), | |||
|
101 | ('dev', 1), | |||
|
102 | ]) | |||
|
103 | def test_result_is_limited_when_query_is_sent(self, user_util, xhr_header, query, count): | |||
|
104 | self.log_user() | |||
|
105 | ||||
|
106 | user_util._test_name = 'dev-test' | |||
|
107 | user_util.create_user_group() | |||
|
108 | ||||
|
109 | response = self.app.get( | |||
|
110 | route_path('user_group_autocomplete_data', | |||
|
111 | params=dict(user_groups='true', | |||
|
112 | query=query)), | |||
|
113 | extra_environ=xhr_header, status=200) | |||
|
114 | ||||
|
115 | result = json.loads(response.body) | |||
|
116 | ||||
|
117 | assert len(result['suggestions']) == count |
@@ -0,0 +1,81 b'' | |||||
|
1 | # -*- coding: utf-8 -*- | |||
|
2 | ||||
|
3 | # Copyright (C) 2016-2017 RhodeCode GmbH | |||
|
4 | # | |||
|
5 | # This program is free software: you can redistribute it and/or modify | |||
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |||
|
7 | # (only), as published by the Free Software Foundation. | |||
|
8 | # | |||
|
9 | # This program is distributed in the hope that it will be useful, | |||
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
12 | # GNU General Public License for more details. | |||
|
13 | # | |||
|
14 | # You should have received a copy of the GNU Affero General Public License | |||
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
|
16 | # | |||
|
17 | # This program is dual-licensed. If you wish to learn more about the | |||
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |||
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |||
|
20 | ||||
|
21 | import logging | |||
|
22 | ||||
|
23 | from pyramid.view import view_config | |||
|
24 | ||||
|
25 | from rhodecode.apps._base import BaseAppView | |||
|
26 | from rhodecode.lib.auth import LoginRequired, NotAnonymous | |||
|
27 | from rhodecode.lib.utils2 import str2bool | |||
|
28 | from rhodecode.model.repo import RepoModel | |||
|
29 | ||||
|
30 | log = logging.getLogger(__name__) | |||
|
31 | ||||
|
32 | ||||
|
33 | class HomeView(BaseAppView): | |||
|
34 | ||||
|
35 | def load_default_context(self): | |||
|
36 | c = self._get_local_tmpl_context() | |||
|
37 | c.user = c.auth_user.get_instance() | |||
|
38 | self._register_global_c(c) | |||
|
39 | return c | |||
|
40 | ||||
|
41 | @LoginRequired() | |||
|
42 | @view_config( | |||
|
43 | route_name='user_autocomplete_data', request_method='GET', | |||
|
44 | renderer='json_ext', xhr=True) | |||
|
45 | def user_autocomplete_data(self): | |||
|
46 | query = self.request.GET.get('query') | |||
|
47 | active = str2bool(self.request.GET.get('active') or True) | |||
|
48 | include_groups = str2bool(self.request.GET.get('user_groups')) | |||
|
49 | ||||
|
50 | log.debug('generating user list, query:%s, active:%s, with_groups:%s', | |||
|
51 | query, active, include_groups) | |||
|
52 | ||||
|
53 | repo_model = RepoModel() | |||
|
54 | _users = repo_model.get_users( | |||
|
55 | name_contains=query, only_active=active) | |||
|
56 | ||||
|
57 | if include_groups: | |||
|
58 | # extend with user groups | |||
|
59 | _user_groups = repo_model.get_user_groups( | |||
|
60 | name_contains=query, only_active=active) | |||
|
61 | _users = _users + _user_groups | |||
|
62 | ||||
|
63 | return {'suggestions': _users} | |||
|
64 | ||||
|
65 | @LoginRequired() | |||
|
66 | @NotAnonymous() | |||
|
67 | @view_config( | |||
|
68 | route_name='user_group_autocomplete_data', request_method='GET', | |||
|
69 | renderer='json_ext', xhr=True) | |||
|
70 | def user_group_autocomplete_data(self): | |||
|
71 | query = self.request.GET.get('query') | |||
|
72 | active = str2bool(self.request.GET.get('active') or True) | |||
|
73 | log.debug('generating user group list, query:%s, active:%s', | |||
|
74 | query, active) | |||
|
75 | ||||
|
76 | repo_model = RepoModel() | |||
|
77 | _user_groups = repo_model.get_user_groups( | |||
|
78 | name_contains=query, only_active=active) | |||
|
79 | _user_groups = _user_groups | |||
|
80 | ||||
|
81 | return {'suggestions': _user_groups} |
@@ -1,512 +1,513 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2017 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | Pylons middleware initialization |
|
22 | Pylons middleware initialization | |
23 | """ |
|
23 | """ | |
24 | import logging |
|
24 | import logging | |
25 | from collections import OrderedDict |
|
25 | from collections import OrderedDict | |
26 |
|
26 | |||
27 | from paste.registry import RegistryManager |
|
27 | from paste.registry import RegistryManager | |
28 | from paste.gzipper import make_gzip_middleware |
|
28 | from paste.gzipper import make_gzip_middleware | |
29 | from pylons.wsgiapp import PylonsApp |
|
29 | from pylons.wsgiapp import PylonsApp | |
30 | from pyramid.authorization import ACLAuthorizationPolicy |
|
30 | from pyramid.authorization import ACLAuthorizationPolicy | |
31 | from pyramid.config import Configurator |
|
31 | from pyramid.config import Configurator | |
32 | from pyramid.settings import asbool, aslist |
|
32 | from pyramid.settings import asbool, aslist | |
33 | from pyramid.wsgi import wsgiapp |
|
33 | from pyramid.wsgi import wsgiapp | |
34 | from pyramid.httpexceptions import ( |
|
34 | from pyramid.httpexceptions import ( | |
35 | HTTPException, HTTPError, HTTPInternalServerError, HTTPFound) |
|
35 | HTTPException, HTTPError, HTTPInternalServerError, HTTPFound) | |
36 | from pyramid.events import ApplicationCreated |
|
36 | from pyramid.events import ApplicationCreated | |
37 | from pyramid.renderers import render_to_response |
|
37 | from pyramid.renderers import render_to_response | |
38 | from routes.middleware import RoutesMiddleware |
|
38 | from routes.middleware import RoutesMiddleware | |
39 | import routes.util |
|
39 | import routes.util | |
40 |
|
40 | |||
41 | import rhodecode |
|
41 | import rhodecode | |
42 | from rhodecode.model import meta |
|
42 | from rhodecode.model import meta | |
43 | from rhodecode.config import patches |
|
43 | from rhodecode.config import patches | |
44 | from rhodecode.config.routing import STATIC_FILE_PREFIX |
|
44 | from rhodecode.config.routing import STATIC_FILE_PREFIX | |
45 | from rhodecode.config.environment import ( |
|
45 | from rhodecode.config.environment import ( | |
46 | load_environment, load_pyramid_environment) |
|
46 | load_environment, load_pyramid_environment) | |
47 | from rhodecode.lib.middleware import csrf |
|
47 | from rhodecode.lib.middleware import csrf | |
48 | from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled |
|
48 | from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled | |
49 | from rhodecode.lib.middleware.error_handling import ( |
|
49 | from rhodecode.lib.middleware.error_handling import ( | |
50 | PylonsErrorHandlingMiddleware) |
|
50 | PylonsErrorHandlingMiddleware) | |
51 | from rhodecode.lib.middleware.https_fixup import HttpsFixup |
|
51 | from rhodecode.lib.middleware.https_fixup import HttpsFixup | |
52 | from rhodecode.lib.middleware.vcs import VCSMiddleware |
|
52 | from rhodecode.lib.middleware.vcs import VCSMiddleware | |
53 | from rhodecode.lib.plugins.utils import register_rhodecode_plugin |
|
53 | from rhodecode.lib.plugins.utils import register_rhodecode_plugin | |
54 | from rhodecode.lib.utils2 import aslist as rhodecode_aslist |
|
54 | from rhodecode.lib.utils2 import aslist as rhodecode_aslist | |
55 | from rhodecode.subscribers import ( |
|
55 | from rhodecode.subscribers import ( | |
56 | scan_repositories_if_enabled, write_metadata_if_needed, |
|
56 | scan_repositories_if_enabled, write_metadata_if_needed, | |
57 | write_js_routes_if_enabled, create_largeobjects_dirs_if_needed) |
|
57 | write_js_routes_if_enabled, create_largeobjects_dirs_if_needed) | |
58 |
|
58 | |||
59 |
|
59 | |||
60 | log = logging.getLogger(__name__) |
|
60 | log = logging.getLogger(__name__) | |
61 |
|
61 | |||
62 |
|
62 | |||
63 | # this is used to avoid avoid the route lookup overhead in routesmiddleware |
|
63 | # this is used to avoid avoid the route lookup overhead in routesmiddleware | |
64 | # for certain routes which won't go to pylons to - eg. static files, debugger |
|
64 | # for certain routes which won't go to pylons to - eg. static files, debugger | |
65 | # it is only needed for the pylons migration and can be removed once complete |
|
65 | # it is only needed for the pylons migration and can be removed once complete | |
66 | class SkippableRoutesMiddleware(RoutesMiddleware): |
|
66 | class SkippableRoutesMiddleware(RoutesMiddleware): | |
67 | """ Routes middleware that allows you to skip prefixes """ |
|
67 | """ Routes middleware that allows you to skip prefixes """ | |
68 |
|
68 | |||
69 | def __init__(self, *args, **kw): |
|
69 | def __init__(self, *args, **kw): | |
70 | self.skip_prefixes = kw.pop('skip_prefixes', []) |
|
70 | self.skip_prefixes = kw.pop('skip_prefixes', []) | |
71 | super(SkippableRoutesMiddleware, self).__init__(*args, **kw) |
|
71 | super(SkippableRoutesMiddleware, self).__init__(*args, **kw) | |
72 |
|
72 | |||
73 | def __call__(self, environ, start_response): |
|
73 | def __call__(self, environ, start_response): | |
74 | for prefix in self.skip_prefixes: |
|
74 | for prefix in self.skip_prefixes: | |
75 | if environ['PATH_INFO'].startswith(prefix): |
|
75 | if environ['PATH_INFO'].startswith(prefix): | |
76 | # added to avoid the case when a missing /_static route falls |
|
76 | # added to avoid the case when a missing /_static route falls | |
77 | # through to pylons and causes an exception as pylons is |
|
77 | # through to pylons and causes an exception as pylons is | |
78 | # expecting wsgiorg.routingargs to be set in the environ |
|
78 | # expecting wsgiorg.routingargs to be set in the environ | |
79 | # by RoutesMiddleware. |
|
79 | # by RoutesMiddleware. | |
80 | if 'wsgiorg.routing_args' not in environ: |
|
80 | if 'wsgiorg.routing_args' not in environ: | |
81 | environ['wsgiorg.routing_args'] = (None, {}) |
|
81 | environ['wsgiorg.routing_args'] = (None, {}) | |
82 | return self.app(environ, start_response) |
|
82 | return self.app(environ, start_response) | |
83 |
|
83 | |||
84 | return super(SkippableRoutesMiddleware, self).__call__( |
|
84 | return super(SkippableRoutesMiddleware, self).__call__( | |
85 | environ, start_response) |
|
85 | environ, start_response) | |
86 |
|
86 | |||
87 |
|
87 | |||
88 | def make_app(global_conf, static_files=True, **app_conf): |
|
88 | def make_app(global_conf, static_files=True, **app_conf): | |
89 | """Create a Pylons WSGI application and return it |
|
89 | """Create a Pylons WSGI application and return it | |
90 |
|
90 | |||
91 | ``global_conf`` |
|
91 | ``global_conf`` | |
92 | The inherited configuration for this application. Normally from |
|
92 | The inherited configuration for this application. Normally from | |
93 | the [DEFAULT] section of the Paste ini file. |
|
93 | the [DEFAULT] section of the Paste ini file. | |
94 |
|
94 | |||
95 | ``app_conf`` |
|
95 | ``app_conf`` | |
96 | The application's local configuration. Normally specified in |
|
96 | The application's local configuration. Normally specified in | |
97 | the [app:<name>] section of the Paste ini file (where <name> |
|
97 | the [app:<name>] section of the Paste ini file (where <name> | |
98 | defaults to main). |
|
98 | defaults to main). | |
99 |
|
99 | |||
100 | """ |
|
100 | """ | |
101 | # Apply compatibility patches |
|
101 | # Apply compatibility patches | |
102 | patches.kombu_1_5_1_python_2_7_11() |
|
102 | patches.kombu_1_5_1_python_2_7_11() | |
103 | patches.inspect_getargspec() |
|
103 | patches.inspect_getargspec() | |
104 |
|
104 | |||
105 | # Configure the Pylons environment |
|
105 | # Configure the Pylons environment | |
106 | config = load_environment(global_conf, app_conf) |
|
106 | config = load_environment(global_conf, app_conf) | |
107 |
|
107 | |||
108 | # The Pylons WSGI app |
|
108 | # The Pylons WSGI app | |
109 | app = PylonsApp(config=config) |
|
109 | app = PylonsApp(config=config) | |
110 | if rhodecode.is_test: |
|
110 | if rhodecode.is_test: | |
111 | app = csrf.CSRFDetector(app) |
|
111 | app = csrf.CSRFDetector(app) | |
112 |
|
112 | |||
113 | expected_origin = config.get('expected_origin') |
|
113 | expected_origin = config.get('expected_origin') | |
114 | if expected_origin: |
|
114 | if expected_origin: | |
115 | # The API can be accessed from other Origins. |
|
115 | # The API can be accessed from other Origins. | |
116 | app = csrf.OriginChecker(app, expected_origin, |
|
116 | app = csrf.OriginChecker(app, expected_origin, | |
117 | skip_urls=[routes.util.url_for('api')]) |
|
117 | skip_urls=[routes.util.url_for('api')]) | |
118 |
|
118 | |||
119 | # Establish the Registry for this application |
|
119 | # Establish the Registry for this application | |
120 | app = RegistryManager(app) |
|
120 | app = RegistryManager(app) | |
121 |
|
121 | |||
122 | app.config = config |
|
122 | app.config = config | |
123 |
|
123 | |||
124 | return app |
|
124 | return app | |
125 |
|
125 | |||
126 |
|
126 | |||
127 | def make_pyramid_app(global_config, **settings): |
|
127 | def make_pyramid_app(global_config, **settings): | |
128 | """ |
|
128 | """ | |
129 | Constructs the WSGI application based on Pyramid and wraps the Pylons based |
|
129 | Constructs the WSGI application based on Pyramid and wraps the Pylons based | |
130 | application. |
|
130 | application. | |
131 |
|
131 | |||
132 | Specials: |
|
132 | Specials: | |
133 |
|
133 | |||
134 | * We migrate from Pylons to Pyramid. While doing this, we keep both |
|
134 | * We migrate from Pylons to Pyramid. While doing this, we keep both | |
135 | frameworks functional. This involves moving some WSGI middlewares around |
|
135 | frameworks functional. This involves moving some WSGI middlewares around | |
136 | and providing access to some data internals, so that the old code is |
|
136 | and providing access to some data internals, so that the old code is | |
137 | still functional. |
|
137 | still functional. | |
138 |
|
138 | |||
139 | * The application can also be integrated like a plugin via the call to |
|
139 | * The application can also be integrated like a plugin via the call to | |
140 | `includeme`. This is accompanied with the other utility functions which |
|
140 | `includeme`. This is accompanied with the other utility functions which | |
141 | are called. Changing this should be done with great care to not break |
|
141 | are called. Changing this should be done with great care to not break | |
142 | cases when these fragments are assembled from another place. |
|
142 | cases when these fragments are assembled from another place. | |
143 |
|
143 | |||
144 | """ |
|
144 | """ | |
145 | # The edition string should be available in pylons too, so we add it here |
|
145 | # The edition string should be available in pylons too, so we add it here | |
146 | # before copying the settings. |
|
146 | # before copying the settings. | |
147 | settings.setdefault('rhodecode.edition', 'Community Edition') |
|
147 | settings.setdefault('rhodecode.edition', 'Community Edition') | |
148 |
|
148 | |||
149 | # As long as our Pylons application does expect "unprepared" settings, make |
|
149 | # As long as our Pylons application does expect "unprepared" settings, make | |
150 | # sure that we keep an unmodified copy. This avoids unintentional change of |
|
150 | # sure that we keep an unmodified copy. This avoids unintentional change of | |
151 | # behavior in the old application. |
|
151 | # behavior in the old application. | |
152 | settings_pylons = settings.copy() |
|
152 | settings_pylons = settings.copy() | |
153 |
|
153 | |||
154 | sanitize_settings_and_apply_defaults(settings) |
|
154 | sanitize_settings_and_apply_defaults(settings) | |
155 | config = Configurator(settings=settings) |
|
155 | config = Configurator(settings=settings) | |
156 | add_pylons_compat_data(config.registry, global_config, settings_pylons) |
|
156 | add_pylons_compat_data(config.registry, global_config, settings_pylons) | |
157 |
|
157 | |||
158 | load_pyramid_environment(global_config, settings) |
|
158 | load_pyramid_environment(global_config, settings) | |
159 |
|
159 | |||
160 | includeme_first(config) |
|
160 | includeme_first(config) | |
161 | includeme(config) |
|
161 | includeme(config) | |
162 | pyramid_app = config.make_wsgi_app() |
|
162 | pyramid_app = config.make_wsgi_app() | |
163 | pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config) |
|
163 | pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config) | |
164 | pyramid_app.config = config |
|
164 | pyramid_app.config = config | |
165 |
|
165 | |||
166 | # creating the app uses a connection - return it after we are done |
|
166 | # creating the app uses a connection - return it after we are done | |
167 | meta.Session.remove() |
|
167 | meta.Session.remove() | |
168 |
|
168 | |||
169 | return pyramid_app |
|
169 | return pyramid_app | |
170 |
|
170 | |||
171 |
|
171 | |||
172 | def make_not_found_view(config): |
|
172 | def make_not_found_view(config): | |
173 | """ |
|
173 | """ | |
174 | This creates the view which should be registered as not-found-view to |
|
174 | This creates the view which should be registered as not-found-view to | |
175 | pyramid. Basically it contains of the old pylons app, converted to a view. |
|
175 | pyramid. Basically it contains of the old pylons app, converted to a view. | |
176 | Additionally it is wrapped by some other middlewares. |
|
176 | Additionally it is wrapped by some other middlewares. | |
177 | """ |
|
177 | """ | |
178 | settings = config.registry.settings |
|
178 | settings = config.registry.settings | |
179 | vcs_server_enabled = settings['vcs.server.enable'] |
|
179 | vcs_server_enabled = settings['vcs.server.enable'] | |
180 |
|
180 | |||
181 | # Make pylons app from unprepared settings. |
|
181 | # Make pylons app from unprepared settings. | |
182 | pylons_app = make_app( |
|
182 | pylons_app = make_app( | |
183 | config.registry._pylons_compat_global_config, |
|
183 | config.registry._pylons_compat_global_config, | |
184 | **config.registry._pylons_compat_settings) |
|
184 | **config.registry._pylons_compat_settings) | |
185 | config.registry._pylons_compat_config = pylons_app.config |
|
185 | config.registry._pylons_compat_config = pylons_app.config | |
186 |
|
186 | |||
187 | # Appenlight monitoring. |
|
187 | # Appenlight monitoring. | |
188 | pylons_app, appenlight_client = wrap_in_appenlight_if_enabled( |
|
188 | pylons_app, appenlight_client = wrap_in_appenlight_if_enabled( | |
189 | pylons_app, settings) |
|
189 | pylons_app, settings) | |
190 |
|
190 | |||
191 | # The pylons app is executed inside of the pyramid 404 exception handler. |
|
191 | # The pylons app is executed inside of the pyramid 404 exception handler. | |
192 | # Exceptions which are raised inside of it are not handled by pyramid |
|
192 | # Exceptions which are raised inside of it are not handled by pyramid | |
193 | # again. Therefore we add a middleware that invokes the error handler in |
|
193 | # again. Therefore we add a middleware that invokes the error handler in | |
194 | # case of an exception or error response. This way we return proper error |
|
194 | # case of an exception or error response. This way we return proper error | |
195 | # HTML pages in case of an error. |
|
195 | # HTML pages in case of an error. | |
196 | reraise = (settings.get('debugtoolbar.enabled', False) or |
|
196 | reraise = (settings.get('debugtoolbar.enabled', False) or | |
197 | rhodecode.disable_error_handler) |
|
197 | rhodecode.disable_error_handler) | |
198 | pylons_app = PylonsErrorHandlingMiddleware( |
|
198 | pylons_app = PylonsErrorHandlingMiddleware( | |
199 | pylons_app, error_handler, reraise) |
|
199 | pylons_app, error_handler, reraise) | |
200 |
|
200 | |||
201 | # The VCSMiddleware shall operate like a fallback if pyramid doesn't find a |
|
201 | # The VCSMiddleware shall operate like a fallback if pyramid doesn't find a | |
202 | # view to handle the request. Therefore it is wrapped around the pylons |
|
202 | # view to handle the request. Therefore it is wrapped around the pylons | |
203 | # app. It has to be outside of the error handling otherwise error responses |
|
203 | # app. It has to be outside of the error handling otherwise error responses | |
204 | # from the vcsserver are converted to HTML error pages. This confuses the |
|
204 | # from the vcsserver are converted to HTML error pages. This confuses the | |
205 | # command line tools and the user won't get a meaningful error message. |
|
205 | # command line tools and the user won't get a meaningful error message. | |
206 | if vcs_server_enabled: |
|
206 | if vcs_server_enabled: | |
207 | pylons_app = VCSMiddleware( |
|
207 | pylons_app = VCSMiddleware( | |
208 | pylons_app, settings, appenlight_client, registry=config.registry) |
|
208 | pylons_app, settings, appenlight_client, registry=config.registry) | |
209 |
|
209 | |||
210 | # Convert WSGI app to pyramid view and return it. |
|
210 | # Convert WSGI app to pyramid view and return it. | |
211 | return wsgiapp(pylons_app) |
|
211 | return wsgiapp(pylons_app) | |
212 |
|
212 | |||
213 |
|
213 | |||
214 | def add_pylons_compat_data(registry, global_config, settings): |
|
214 | def add_pylons_compat_data(registry, global_config, settings): | |
215 | """ |
|
215 | """ | |
216 | Attach data to the registry to support the Pylons integration. |
|
216 | Attach data to the registry to support the Pylons integration. | |
217 | """ |
|
217 | """ | |
218 | registry._pylons_compat_global_config = global_config |
|
218 | registry._pylons_compat_global_config = global_config | |
219 | registry._pylons_compat_settings = settings |
|
219 | registry._pylons_compat_settings = settings | |
220 |
|
220 | |||
221 |
|
221 | |||
222 | def error_handler(exception, request): |
|
222 | def error_handler(exception, request): | |
223 | import rhodecode |
|
223 | import rhodecode | |
224 | from rhodecode.lib.utils2 import AttributeDict |
|
224 | from rhodecode.lib.utils2 import AttributeDict | |
225 |
|
225 | |||
226 | rhodecode_title = rhodecode.CONFIG.get('rhodecode_title') or 'RhodeCode' |
|
226 | rhodecode_title = rhodecode.CONFIG.get('rhodecode_title') or 'RhodeCode' | |
227 |
|
227 | |||
228 | base_response = HTTPInternalServerError() |
|
228 | base_response = HTTPInternalServerError() | |
229 | # prefer original exception for the response since it may have headers set |
|
229 | # prefer original exception for the response since it may have headers set | |
230 | if isinstance(exception, HTTPException): |
|
230 | if isinstance(exception, HTTPException): | |
231 | base_response = exception |
|
231 | base_response = exception | |
232 |
|
232 | |||
233 | def is_http_error(response): |
|
233 | def is_http_error(response): | |
234 | # error which should have traceback |
|
234 | # error which should have traceback | |
235 | return response.status_code > 499 |
|
235 | return response.status_code > 499 | |
236 |
|
236 | |||
237 | if is_http_error(base_response): |
|
237 | if is_http_error(base_response): | |
238 | log.exception( |
|
238 | log.exception( | |
239 | 'error occurred handling this request for path: %s', request.path) |
|
239 | 'error occurred handling this request for path: %s', request.path) | |
240 |
|
240 | |||
241 | c = AttributeDict() |
|
241 | c = AttributeDict() | |
242 | c.error_message = base_response.status |
|
242 | c.error_message = base_response.status | |
243 | c.error_explanation = base_response.explanation or str(base_response) |
|
243 | c.error_explanation = base_response.explanation or str(base_response) | |
244 | c.visual = AttributeDict() |
|
244 | c.visual = AttributeDict() | |
245 |
|
245 | |||
246 | c.visual.rhodecode_support_url = ( |
|
246 | c.visual.rhodecode_support_url = ( | |
247 | request.registry.settings.get('rhodecode_support_url') or |
|
247 | request.registry.settings.get('rhodecode_support_url') or | |
248 | request.route_url('rhodecode_support') |
|
248 | request.route_url('rhodecode_support') | |
249 | ) |
|
249 | ) | |
250 | c.redirect_time = 0 |
|
250 | c.redirect_time = 0 | |
251 | c.rhodecode_name = rhodecode_title |
|
251 | c.rhodecode_name = rhodecode_title | |
252 | if not c.rhodecode_name: |
|
252 | if not c.rhodecode_name: | |
253 | c.rhodecode_name = 'Rhodecode' |
|
253 | c.rhodecode_name = 'Rhodecode' | |
254 |
|
254 | |||
255 | c.causes = [] |
|
255 | c.causes = [] | |
256 | if hasattr(base_response, 'causes'): |
|
256 | if hasattr(base_response, 'causes'): | |
257 | c.causes = base_response.causes |
|
257 | c.causes = base_response.causes | |
258 |
|
258 | |||
259 | response = render_to_response( |
|
259 | response = render_to_response( | |
260 | '/errors/error_document.mako', {'c': c}, request=request, |
|
260 | '/errors/error_document.mako', {'c': c}, request=request, | |
261 | response=base_response) |
|
261 | response=base_response) | |
262 |
|
262 | |||
263 | return response |
|
263 | return response | |
264 |
|
264 | |||
265 |
|
265 | |||
266 | def includeme(config): |
|
266 | def includeme(config): | |
267 | settings = config.registry.settings |
|
267 | settings = config.registry.settings | |
268 |
|
268 | |||
269 | # plugin information |
|
269 | # plugin information | |
270 | config.registry.rhodecode_plugins = OrderedDict() |
|
270 | config.registry.rhodecode_plugins = OrderedDict() | |
271 |
|
271 | |||
272 | config.add_directive( |
|
272 | config.add_directive( | |
273 | 'register_rhodecode_plugin', register_rhodecode_plugin) |
|
273 | 'register_rhodecode_plugin', register_rhodecode_plugin) | |
274 |
|
274 | |||
275 | if asbool(settings.get('appenlight', 'false')): |
|
275 | if asbool(settings.get('appenlight', 'false')): | |
276 | config.include('appenlight_client.ext.pyramid_tween') |
|
276 | config.include('appenlight_client.ext.pyramid_tween') | |
277 |
|
277 | |||
278 | # Includes which are required. The application would fail without them. |
|
278 | # Includes which are required. The application would fail without them. | |
279 | config.include('pyramid_mako') |
|
279 | config.include('pyramid_mako') | |
280 | config.include('pyramid_beaker') |
|
280 | config.include('pyramid_beaker') | |
281 |
|
281 | |||
282 | config.include('rhodecode.authentication') |
|
282 | config.include('rhodecode.authentication') | |
283 | config.include('rhodecode.integrations') |
|
283 | config.include('rhodecode.integrations') | |
284 |
|
284 | |||
285 | # apps |
|
285 | # apps | |
286 | config.include('rhodecode.apps._base') |
|
286 | config.include('rhodecode.apps._base') | |
287 |
|
287 | |||
288 | config.include('rhodecode.apps.admin') |
|
288 | config.include('rhodecode.apps.admin') | |
289 | config.include('rhodecode.apps.channelstream') |
|
289 | config.include('rhodecode.apps.channelstream') | |
290 | config.include('rhodecode.apps.login') |
|
290 | config.include('rhodecode.apps.login') | |
|
291 | config.include('rhodecode.apps.home') | |||
291 | config.include('rhodecode.apps.repository') |
|
292 | config.include('rhodecode.apps.repository') | |
292 | config.include('rhodecode.apps.user_profile') |
|
293 | config.include('rhodecode.apps.user_profile') | |
293 | config.include('rhodecode.apps.my_account') |
|
294 | config.include('rhodecode.apps.my_account') | |
294 | config.include('rhodecode.apps.svn_support') |
|
295 | config.include('rhodecode.apps.svn_support') | |
295 |
|
296 | |||
296 | config.include('rhodecode.tweens') |
|
297 | config.include('rhodecode.tweens') | |
297 | config.include('rhodecode.api') |
|
298 | config.include('rhodecode.api') | |
298 |
|
299 | |||
299 | config.add_route( |
|
300 | config.add_route( | |
300 | 'rhodecode_support', 'https://rhodecode.com/help/', static=True) |
|
301 | 'rhodecode_support', 'https://rhodecode.com/help/', static=True) | |
301 |
|
302 | |||
302 | config.add_translation_dirs('rhodecode:i18n/') |
|
303 | config.add_translation_dirs('rhodecode:i18n/') | |
303 | settings['default_locale_name'] = settings.get('lang', 'en') |
|
304 | settings['default_locale_name'] = settings.get('lang', 'en') | |
304 |
|
305 | |||
305 | # Add subscribers. |
|
306 | # Add subscribers. | |
306 | config.add_subscriber(create_largeobjects_dirs_if_needed, ApplicationCreated) |
|
307 | config.add_subscriber(create_largeobjects_dirs_if_needed, ApplicationCreated) | |
307 | config.add_subscriber(scan_repositories_if_enabled, ApplicationCreated) |
|
308 | config.add_subscriber(scan_repositories_if_enabled, ApplicationCreated) | |
308 | config.add_subscriber(write_metadata_if_needed, ApplicationCreated) |
|
309 | config.add_subscriber(write_metadata_if_needed, ApplicationCreated) | |
309 | config.add_subscriber(write_js_routes_if_enabled, ApplicationCreated) |
|
310 | config.add_subscriber(write_js_routes_if_enabled, ApplicationCreated) | |
310 |
|
311 | |||
311 | # Set the authorization policy. |
|
312 | # Set the authorization policy. | |
312 | authz_policy = ACLAuthorizationPolicy() |
|
313 | authz_policy = ACLAuthorizationPolicy() | |
313 | config.set_authorization_policy(authz_policy) |
|
314 | config.set_authorization_policy(authz_policy) | |
314 |
|
315 | |||
315 | # Set the default renderer for HTML templates to mako. |
|
316 | # Set the default renderer for HTML templates to mako. | |
316 | config.add_mako_renderer('.html') |
|
317 | config.add_mako_renderer('.html') | |
317 |
|
318 | |||
318 | config.add_renderer( |
|
319 | config.add_renderer( | |
319 | name='json_ext', |
|
320 | name='json_ext', | |
320 | factory='rhodecode.lib.ext_json_renderer.pyramid_ext_json') |
|
321 | factory='rhodecode.lib.ext_json_renderer.pyramid_ext_json') | |
321 |
|
322 | |||
322 | # include RhodeCode plugins |
|
323 | # include RhodeCode plugins | |
323 | includes = aslist(settings.get('rhodecode.includes', [])) |
|
324 | includes = aslist(settings.get('rhodecode.includes', [])) | |
324 | for inc in includes: |
|
325 | for inc in includes: | |
325 | config.include(inc) |
|
326 | config.include(inc) | |
326 |
|
327 | |||
327 | # This is the glue which allows us to migrate in chunks. By registering the |
|
328 | # This is the glue which allows us to migrate in chunks. By registering the | |
328 | # pylons based application as the "Not Found" view in Pyramid, we will |
|
329 | # pylons based application as the "Not Found" view in Pyramid, we will | |
329 | # fallback to the old application each time the new one does not yet know |
|
330 | # fallback to the old application each time the new one does not yet know | |
330 | # how to handle a request. |
|
331 | # how to handle a request. | |
331 | config.add_notfound_view(make_not_found_view(config)) |
|
332 | config.add_notfound_view(make_not_found_view(config)) | |
332 |
|
333 | |||
333 | if not settings.get('debugtoolbar.enabled', False): |
|
334 | if not settings.get('debugtoolbar.enabled', False): | |
334 | # if no toolbar, then any exception gets caught and rendered |
|
335 | # if no toolbar, then any exception gets caught and rendered | |
335 | config.add_view(error_handler, context=Exception) |
|
336 | config.add_view(error_handler, context=Exception) | |
336 |
|
337 | |||
337 | config.add_view(error_handler, context=HTTPError) |
|
338 | config.add_view(error_handler, context=HTTPError) | |
338 |
|
339 | |||
339 |
|
340 | |||
340 | def includeme_first(config): |
|
341 | def includeme_first(config): | |
341 | # redirect automatic browser favicon.ico requests to correct place |
|
342 | # redirect automatic browser favicon.ico requests to correct place | |
342 | def favicon_redirect(context, request): |
|
343 | def favicon_redirect(context, request): | |
343 | return HTTPFound( |
|
344 | return HTTPFound( | |
344 | request.static_path('rhodecode:public/images/favicon.ico')) |
|
345 | request.static_path('rhodecode:public/images/favicon.ico')) | |
345 |
|
346 | |||
346 | config.add_view(favicon_redirect, route_name='favicon') |
|
347 | config.add_view(favicon_redirect, route_name='favicon') | |
347 | config.add_route('favicon', '/favicon.ico') |
|
348 | config.add_route('favicon', '/favicon.ico') | |
348 |
|
349 | |||
349 | def robots_redirect(context, request): |
|
350 | def robots_redirect(context, request): | |
350 | return HTTPFound( |
|
351 | return HTTPFound( | |
351 | request.static_path('rhodecode:public/robots.txt')) |
|
352 | request.static_path('rhodecode:public/robots.txt')) | |
352 |
|
353 | |||
353 | config.add_view(robots_redirect, route_name='robots') |
|
354 | config.add_view(robots_redirect, route_name='robots') | |
354 | config.add_route('robots', '/robots.txt') |
|
355 | config.add_route('robots', '/robots.txt') | |
355 |
|
356 | |||
356 | config.add_static_view( |
|
357 | config.add_static_view( | |
357 | '_static/deform', 'deform:static') |
|
358 | '_static/deform', 'deform:static') | |
358 | config.add_static_view( |
|
359 | config.add_static_view( | |
359 | '_static/rhodecode', path='rhodecode:public', cache_max_age=3600 * 24) |
|
360 | '_static/rhodecode', path='rhodecode:public', cache_max_age=3600 * 24) | |
360 |
|
361 | |||
361 |
|
362 | |||
362 | def wrap_app_in_wsgi_middlewares(pyramid_app, config): |
|
363 | def wrap_app_in_wsgi_middlewares(pyramid_app, config): | |
363 | """ |
|
364 | """ | |
364 | Apply outer WSGI middlewares around the application. |
|
365 | Apply outer WSGI middlewares around the application. | |
365 |
|
366 | |||
366 | Part of this has been moved up from the Pylons layer, so that the |
|
367 | Part of this has been moved up from the Pylons layer, so that the | |
367 | data is also available if old Pylons code is hit through an already ported |
|
368 | data is also available if old Pylons code is hit through an already ported | |
368 | view. |
|
369 | view. | |
369 | """ |
|
370 | """ | |
370 | settings = config.registry.settings |
|
371 | settings = config.registry.settings | |
371 |
|
372 | |||
372 | # enable https redirects based on HTTP_X_URL_SCHEME set by proxy |
|
373 | # enable https redirects based on HTTP_X_URL_SCHEME set by proxy | |
373 | pyramid_app = HttpsFixup(pyramid_app, settings) |
|
374 | pyramid_app = HttpsFixup(pyramid_app, settings) | |
374 |
|
375 | |||
375 | # Add RoutesMiddleware to support the pylons compatibility tween during |
|
376 | # Add RoutesMiddleware to support the pylons compatibility tween during | |
376 | # migration to pyramid. |
|
377 | # migration to pyramid. | |
377 | pyramid_app = SkippableRoutesMiddleware( |
|
378 | pyramid_app = SkippableRoutesMiddleware( | |
378 | pyramid_app, config.registry._pylons_compat_config['routes.map'], |
|
379 | pyramid_app, config.registry._pylons_compat_config['routes.map'], | |
379 | skip_prefixes=(STATIC_FILE_PREFIX, '/_debug_toolbar')) |
|
380 | skip_prefixes=(STATIC_FILE_PREFIX, '/_debug_toolbar')) | |
380 |
|
381 | |||
381 | pyramid_app, _ = wrap_in_appenlight_if_enabled(pyramid_app, settings) |
|
382 | pyramid_app, _ = wrap_in_appenlight_if_enabled(pyramid_app, settings) | |
382 |
|
383 | |||
383 | if settings['gzip_responses']: |
|
384 | if settings['gzip_responses']: | |
384 | pyramid_app = make_gzip_middleware( |
|
385 | pyramid_app = make_gzip_middleware( | |
385 | pyramid_app, settings, compress_level=1) |
|
386 | pyramid_app, settings, compress_level=1) | |
386 |
|
387 | |||
387 | # this should be the outer most middleware in the wsgi stack since |
|
388 | # this should be the outer most middleware in the wsgi stack since | |
388 | # middleware like Routes make database calls |
|
389 | # middleware like Routes make database calls | |
389 | def pyramid_app_with_cleanup(environ, start_response): |
|
390 | def pyramid_app_with_cleanup(environ, start_response): | |
390 | try: |
|
391 | try: | |
391 | return pyramid_app(environ, start_response) |
|
392 | return pyramid_app(environ, start_response) | |
392 | finally: |
|
393 | finally: | |
393 | # Dispose current database session and rollback uncommitted |
|
394 | # Dispose current database session and rollback uncommitted | |
394 | # transactions. |
|
395 | # transactions. | |
395 | meta.Session.remove() |
|
396 | meta.Session.remove() | |
396 |
|
397 | |||
397 | # In a single threaded mode server, on non sqlite db we should have |
|
398 | # In a single threaded mode server, on non sqlite db we should have | |
398 | # '0 Current Checked out connections' at the end of a request, |
|
399 | # '0 Current Checked out connections' at the end of a request, | |
399 | # if not, then something, somewhere is leaving a connection open |
|
400 | # if not, then something, somewhere is leaving a connection open | |
400 | pool = meta.Base.metadata.bind.engine.pool |
|
401 | pool = meta.Base.metadata.bind.engine.pool | |
401 | log.debug('sa pool status: %s', pool.status()) |
|
402 | log.debug('sa pool status: %s', pool.status()) | |
402 |
|
403 | |||
403 |
|
404 | |||
404 | return pyramid_app_with_cleanup |
|
405 | return pyramid_app_with_cleanup | |
405 |
|
406 | |||
406 |
|
407 | |||
407 | def sanitize_settings_and_apply_defaults(settings): |
|
408 | def sanitize_settings_and_apply_defaults(settings): | |
408 | """ |
|
409 | """ | |
409 | Applies settings defaults and does all type conversion. |
|
410 | Applies settings defaults and does all type conversion. | |
410 |
|
411 | |||
411 | We would move all settings parsing and preparation into this place, so that |
|
412 | We would move all settings parsing and preparation into this place, so that | |
412 | we have only one place left which deals with this part. The remaining parts |
|
413 | we have only one place left which deals with this part. The remaining parts | |
413 | of the application would start to rely fully on well prepared settings. |
|
414 | of the application would start to rely fully on well prepared settings. | |
414 |
|
415 | |||
415 | This piece would later be split up per topic to avoid a big fat monster |
|
416 | This piece would later be split up per topic to avoid a big fat monster | |
416 | function. |
|
417 | function. | |
417 | """ |
|
418 | """ | |
418 |
|
419 | |||
419 | # Pyramid's mako renderer has to search in the templates folder so that the |
|
420 | # Pyramid's mako renderer has to search in the templates folder so that the | |
420 | # old templates still work. Ported and new templates are expected to use |
|
421 | # old templates still work. Ported and new templates are expected to use | |
421 | # real asset specifications for the includes. |
|
422 | # real asset specifications for the includes. | |
422 | mako_directories = settings.setdefault('mako.directories', [ |
|
423 | mako_directories = settings.setdefault('mako.directories', [ | |
423 | # Base templates of the original Pylons application |
|
424 | # Base templates of the original Pylons application | |
424 | 'rhodecode:templates', |
|
425 | 'rhodecode:templates', | |
425 | ]) |
|
426 | ]) | |
426 | log.debug( |
|
427 | log.debug( | |
427 | "Using the following Mako template directories: %s", |
|
428 | "Using the following Mako template directories: %s", | |
428 | mako_directories) |
|
429 | mako_directories) | |
429 |
|
430 | |||
430 | # Default includes, possible to change as a user |
|
431 | # Default includes, possible to change as a user | |
431 | pyramid_includes = settings.setdefault('pyramid.includes', [ |
|
432 | pyramid_includes = settings.setdefault('pyramid.includes', [ | |
432 | 'rhodecode.lib.middleware.request_wrapper', |
|
433 | 'rhodecode.lib.middleware.request_wrapper', | |
433 | ]) |
|
434 | ]) | |
434 | log.debug( |
|
435 | log.debug( | |
435 | "Using the following pyramid.includes: %s", |
|
436 | "Using the following pyramid.includes: %s", | |
436 | pyramid_includes) |
|
437 | pyramid_includes) | |
437 |
|
438 | |||
438 | # TODO: johbo: Re-think this, usually the call to config.include |
|
439 | # TODO: johbo: Re-think this, usually the call to config.include | |
439 | # should allow to pass in a prefix. |
|
440 | # should allow to pass in a prefix. | |
440 | settings.setdefault('rhodecode.api.url', '/_admin/api') |
|
441 | settings.setdefault('rhodecode.api.url', '/_admin/api') | |
441 |
|
442 | |||
442 | # Sanitize generic settings. |
|
443 | # Sanitize generic settings. | |
443 | _list_setting(settings, 'default_encoding', 'UTF-8') |
|
444 | _list_setting(settings, 'default_encoding', 'UTF-8') | |
444 | _bool_setting(settings, 'is_test', 'false') |
|
445 | _bool_setting(settings, 'is_test', 'false') | |
445 | _bool_setting(settings, 'gzip_responses', 'false') |
|
446 | _bool_setting(settings, 'gzip_responses', 'false') | |
446 |
|
447 | |||
447 | # Call split out functions that sanitize settings for each topic. |
|
448 | # Call split out functions that sanitize settings for each topic. | |
448 | _sanitize_appenlight_settings(settings) |
|
449 | _sanitize_appenlight_settings(settings) | |
449 | _sanitize_vcs_settings(settings) |
|
450 | _sanitize_vcs_settings(settings) | |
450 |
|
451 | |||
451 | return settings |
|
452 | return settings | |
452 |
|
453 | |||
453 |
|
454 | |||
454 | def _sanitize_appenlight_settings(settings): |
|
455 | def _sanitize_appenlight_settings(settings): | |
455 | _bool_setting(settings, 'appenlight', 'false') |
|
456 | _bool_setting(settings, 'appenlight', 'false') | |
456 |
|
457 | |||
457 |
|
458 | |||
458 | def _sanitize_vcs_settings(settings): |
|
459 | def _sanitize_vcs_settings(settings): | |
459 | """ |
|
460 | """ | |
460 | Applies settings defaults and does type conversion for all VCS related |
|
461 | Applies settings defaults and does type conversion for all VCS related | |
461 | settings. |
|
462 | settings. | |
462 | """ |
|
463 | """ | |
463 | _string_setting(settings, 'vcs.svn.compatible_version', '') |
|
464 | _string_setting(settings, 'vcs.svn.compatible_version', '') | |
464 | _string_setting(settings, 'git_rev_filter', '--all') |
|
465 | _string_setting(settings, 'git_rev_filter', '--all') | |
465 | _string_setting(settings, 'vcs.hooks.protocol', 'http') |
|
466 | _string_setting(settings, 'vcs.hooks.protocol', 'http') | |
466 | _string_setting(settings, 'vcs.scm_app_implementation', 'http') |
|
467 | _string_setting(settings, 'vcs.scm_app_implementation', 'http') | |
467 | _string_setting(settings, 'vcs.server', '') |
|
468 | _string_setting(settings, 'vcs.server', '') | |
468 | _string_setting(settings, 'vcs.server.log_level', 'debug') |
|
469 | _string_setting(settings, 'vcs.server.log_level', 'debug') | |
469 | _string_setting(settings, 'vcs.server.protocol', 'http') |
|
470 | _string_setting(settings, 'vcs.server.protocol', 'http') | |
470 | _bool_setting(settings, 'startup.import_repos', 'false') |
|
471 | _bool_setting(settings, 'startup.import_repos', 'false') | |
471 | _bool_setting(settings, 'vcs.hooks.direct_calls', 'false') |
|
472 | _bool_setting(settings, 'vcs.hooks.direct_calls', 'false') | |
472 | _bool_setting(settings, 'vcs.server.enable', 'true') |
|
473 | _bool_setting(settings, 'vcs.server.enable', 'true') | |
473 | _bool_setting(settings, 'vcs.start_server', 'false') |
|
474 | _bool_setting(settings, 'vcs.start_server', 'false') | |
474 | _list_setting(settings, 'vcs.backends', 'hg, git, svn') |
|
475 | _list_setting(settings, 'vcs.backends', 'hg, git, svn') | |
475 | _int_setting(settings, 'vcs.connection_timeout', 3600) |
|
476 | _int_setting(settings, 'vcs.connection_timeout', 3600) | |
476 |
|
477 | |||
477 | # Support legacy values of vcs.scm_app_implementation. Legacy |
|
478 | # Support legacy values of vcs.scm_app_implementation. Legacy | |
478 | # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http' |
|
479 | # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http' | |
479 | # which is now mapped to 'http'. |
|
480 | # which is now mapped to 'http'. | |
480 | scm_app_impl = settings['vcs.scm_app_implementation'] |
|
481 | scm_app_impl = settings['vcs.scm_app_implementation'] | |
481 | if scm_app_impl == 'rhodecode.lib.middleware.utils.scm_app_http': |
|
482 | if scm_app_impl == 'rhodecode.lib.middleware.utils.scm_app_http': | |
482 | settings['vcs.scm_app_implementation'] = 'http' |
|
483 | settings['vcs.scm_app_implementation'] = 'http' | |
483 |
|
484 | |||
484 |
|
485 | |||
485 | def _int_setting(settings, name, default): |
|
486 | def _int_setting(settings, name, default): | |
486 | settings[name] = int(settings.get(name, default)) |
|
487 | settings[name] = int(settings.get(name, default)) | |
487 |
|
488 | |||
488 |
|
489 | |||
489 | def _bool_setting(settings, name, default): |
|
490 | def _bool_setting(settings, name, default): | |
490 | input = settings.get(name, default) |
|
491 | input = settings.get(name, default) | |
491 | if isinstance(input, unicode): |
|
492 | if isinstance(input, unicode): | |
492 | input = input.encode('utf8') |
|
493 | input = input.encode('utf8') | |
493 | settings[name] = asbool(input) |
|
494 | settings[name] = asbool(input) | |
494 |
|
495 | |||
495 |
|
496 | |||
496 | def _list_setting(settings, name, default): |
|
497 | def _list_setting(settings, name, default): | |
497 | raw_value = settings.get(name, default) |
|
498 | raw_value = settings.get(name, default) | |
498 |
|
499 | |||
499 | old_separator = ',' |
|
500 | old_separator = ',' | |
500 | if old_separator in raw_value: |
|
501 | if old_separator in raw_value: | |
501 | # If we get a comma separated list, pass it to our own function. |
|
502 | # If we get a comma separated list, pass it to our own function. | |
502 | settings[name] = rhodecode_aslist(raw_value, sep=old_separator) |
|
503 | settings[name] = rhodecode_aslist(raw_value, sep=old_separator) | |
503 | else: |
|
504 | else: | |
504 | # Otherwise we assume it uses pyramids space/newline separation. |
|
505 | # Otherwise we assume it uses pyramids space/newline separation. | |
505 | settings[name] = aslist(raw_value) |
|
506 | settings[name] = aslist(raw_value) | |
506 |
|
507 | |||
507 |
|
508 | |||
508 | def _string_setting(settings, name, default, lower=True): |
|
509 | def _string_setting(settings, name, default, lower=True): | |
509 | value = settings.get(name, default) |
|
510 | value = settings.get(name, default) | |
510 | if lower: |
|
511 | if lower: | |
511 | value = value.lower() |
|
512 | value = value.lower() | |
512 | settings[name] = value |
|
513 | settings[name] = value |
@@ -1,1153 +1,1148 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2017 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | Routes configuration |
|
22 | Routes configuration | |
23 |
|
23 | |||
24 | The more specific and detailed routes should be defined first so they |
|
24 | The more specific and detailed routes should be defined first so they | |
25 | may take precedent over the more generic routes. For more information |
|
25 | may take precedent over the more generic routes. For more information | |
26 | refer to the routes manual at http://routes.groovie.org/docs/ |
|
26 | refer to the routes manual at http://routes.groovie.org/docs/ | |
27 |
|
27 | |||
28 | IMPORTANT: if you change any routing here, make sure to take a look at lib/base.py |
|
28 | IMPORTANT: if you change any routing here, make sure to take a look at lib/base.py | |
29 | and _route_name variable which uses some of stored naming here to do redirects. |
|
29 | and _route_name variable which uses some of stored naming here to do redirects. | |
30 | """ |
|
30 | """ | |
31 | import os |
|
31 | import os | |
32 | import re |
|
32 | import re | |
33 | from routes import Mapper |
|
33 | from routes import Mapper | |
34 |
|
34 | |||
35 | from rhodecode.config import routing_links |
|
35 | from rhodecode.config import routing_links | |
36 |
|
36 | |||
37 | # prefix for non repository related links needs to be prefixed with `/` |
|
37 | # prefix for non repository related links needs to be prefixed with `/` | |
38 | ADMIN_PREFIX = '/_admin' |
|
38 | ADMIN_PREFIX = '/_admin' | |
39 | STATIC_FILE_PREFIX = '/_static' |
|
39 | STATIC_FILE_PREFIX = '/_static' | |
40 |
|
40 | |||
41 | # Default requirements for URL parts |
|
41 | # Default requirements for URL parts | |
42 | URL_NAME_REQUIREMENTS = { |
|
42 | URL_NAME_REQUIREMENTS = { | |
43 | # group name can have a slash in them, but they must not end with a slash |
|
43 | # group name can have a slash in them, but they must not end with a slash | |
44 | 'group_name': r'.*?[^/]', |
|
44 | 'group_name': r'.*?[^/]', | |
45 | 'repo_group_name': r'.*?[^/]', |
|
45 | 'repo_group_name': r'.*?[^/]', | |
46 | # repo names can have a slash in them, but they must not end with a slash |
|
46 | # repo names can have a slash in them, but they must not end with a slash | |
47 | 'repo_name': r'.*?[^/]', |
|
47 | 'repo_name': r'.*?[^/]', | |
48 | # file path eats up everything at the end |
|
48 | # file path eats up everything at the end | |
49 | 'f_path': r'.*', |
|
49 | 'f_path': r'.*', | |
50 | # reference types |
|
50 | # reference types | |
51 | 'source_ref_type': '(branch|book|tag|rev|\%\(source_ref_type\)s)', |
|
51 | 'source_ref_type': '(branch|book|tag|rev|\%\(source_ref_type\)s)', | |
52 | 'target_ref_type': '(branch|book|tag|rev|\%\(target_ref_type\)s)', |
|
52 | 'target_ref_type': '(branch|book|tag|rev|\%\(target_ref_type\)s)', | |
53 | } |
|
53 | } | |
54 |
|
54 | |||
55 |
|
55 | |||
56 | def add_route_requirements(route_path, requirements): |
|
56 | def add_route_requirements(route_path, requirements): | |
57 | """ |
|
57 | """ | |
58 | Adds regex requirements to pyramid routes using a mapping dict |
|
58 | Adds regex requirements to pyramid routes using a mapping dict | |
59 |
|
59 | |||
60 | >>> add_route_requirements('/{action}/{id}', {'id': r'\d+'}) |
|
60 | >>> add_route_requirements('/{action}/{id}', {'id': r'\d+'}) | |
61 | '/{action}/{id:\d+}' |
|
61 | '/{action}/{id:\d+}' | |
62 |
|
62 | |||
63 | """ |
|
63 | """ | |
64 | for key, regex in requirements.items(): |
|
64 | for key, regex in requirements.items(): | |
65 | route_path = route_path.replace('{%s}' % key, '{%s:%s}' % (key, regex)) |
|
65 | route_path = route_path.replace('{%s}' % key, '{%s:%s}' % (key, regex)) | |
66 | return route_path |
|
66 | return route_path | |
67 |
|
67 | |||
68 |
|
68 | |||
69 | class JSRoutesMapper(Mapper): |
|
69 | class JSRoutesMapper(Mapper): | |
70 | """ |
|
70 | """ | |
71 | Wrapper for routes.Mapper to make pyroutes compatible url definitions |
|
71 | Wrapper for routes.Mapper to make pyroutes compatible url definitions | |
72 | """ |
|
72 | """ | |
73 | _named_route_regex = re.compile(r'^[a-z-_0-9A-Z]+$') |
|
73 | _named_route_regex = re.compile(r'^[a-z-_0-9A-Z]+$') | |
74 | _argument_prog = re.compile('\{(.*?)\}|:\((.*)\)') |
|
74 | _argument_prog = re.compile('\{(.*?)\}|:\((.*)\)') | |
75 | def __init__(self, *args, **kw): |
|
75 | def __init__(self, *args, **kw): | |
76 | super(JSRoutesMapper, self).__init__(*args, **kw) |
|
76 | super(JSRoutesMapper, self).__init__(*args, **kw) | |
77 | self._jsroutes = [] |
|
77 | self._jsroutes = [] | |
78 |
|
78 | |||
79 | def connect(self, *args, **kw): |
|
79 | def connect(self, *args, **kw): | |
80 | """ |
|
80 | """ | |
81 | Wrapper for connect to take an extra argument jsroute=True |
|
81 | Wrapper for connect to take an extra argument jsroute=True | |
82 |
|
82 | |||
83 | :param jsroute: boolean, if True will add the route to the pyroutes list |
|
83 | :param jsroute: boolean, if True will add the route to the pyroutes list | |
84 | """ |
|
84 | """ | |
85 | if kw.pop('jsroute', False): |
|
85 | if kw.pop('jsroute', False): | |
86 | if not self._named_route_regex.match(args[0]): |
|
86 | if not self._named_route_regex.match(args[0]): | |
87 | raise Exception('only named routes can be added to pyroutes') |
|
87 | raise Exception('only named routes can be added to pyroutes') | |
88 | self._jsroutes.append(args[0]) |
|
88 | self._jsroutes.append(args[0]) | |
89 |
|
89 | |||
90 | super(JSRoutesMapper, self).connect(*args, **kw) |
|
90 | super(JSRoutesMapper, self).connect(*args, **kw) | |
91 |
|
91 | |||
92 | def _extract_route_information(self, route): |
|
92 | def _extract_route_information(self, route): | |
93 | """ |
|
93 | """ | |
94 | Convert a route into tuple(name, path, args), eg: |
|
94 | Convert a route into tuple(name, path, args), eg: | |
95 | ('show_user', '/profile/%(username)s', ['username']) |
|
95 | ('show_user', '/profile/%(username)s', ['username']) | |
96 | """ |
|
96 | """ | |
97 | routepath = route.routepath |
|
97 | routepath = route.routepath | |
98 | def replace(matchobj): |
|
98 | def replace(matchobj): | |
99 | if matchobj.group(1): |
|
99 | if matchobj.group(1): | |
100 | return "%%(%s)s" % matchobj.group(1).split(':')[0] |
|
100 | return "%%(%s)s" % matchobj.group(1).split(':')[0] | |
101 | else: |
|
101 | else: | |
102 | return "%%(%s)s" % matchobj.group(2) |
|
102 | return "%%(%s)s" % matchobj.group(2) | |
103 |
|
103 | |||
104 | routepath = self._argument_prog.sub(replace, routepath) |
|
104 | routepath = self._argument_prog.sub(replace, routepath) | |
105 | return ( |
|
105 | return ( | |
106 | route.name, |
|
106 | route.name, | |
107 | routepath, |
|
107 | routepath, | |
108 | [(arg[0].split(':')[0] if arg[0] != '' else arg[1]) |
|
108 | [(arg[0].split(':')[0] if arg[0] != '' else arg[1]) | |
109 | for arg in self._argument_prog.findall(route.routepath)] |
|
109 | for arg in self._argument_prog.findall(route.routepath)] | |
110 | ) |
|
110 | ) | |
111 |
|
111 | |||
112 | def jsroutes(self): |
|
112 | def jsroutes(self): | |
113 | """ |
|
113 | """ | |
114 | Return a list of pyroutes.js compatible routes |
|
114 | Return a list of pyroutes.js compatible routes | |
115 | """ |
|
115 | """ | |
116 | for route_name in self._jsroutes: |
|
116 | for route_name in self._jsroutes: | |
117 | yield self._extract_route_information(self._routenames[route_name]) |
|
117 | yield self._extract_route_information(self._routenames[route_name]) | |
118 |
|
118 | |||
119 |
|
119 | |||
120 | def make_map(config): |
|
120 | def make_map(config): | |
121 | """Create, configure and return the routes Mapper""" |
|
121 | """Create, configure and return the routes Mapper""" | |
122 | rmap = JSRoutesMapper(directory=config['pylons.paths']['controllers'], |
|
122 | rmap = JSRoutesMapper(directory=config['pylons.paths']['controllers'], | |
123 | always_scan=config['debug']) |
|
123 | always_scan=config['debug']) | |
124 | rmap.minimization = False |
|
124 | rmap.minimization = False | |
125 | rmap.explicit = False |
|
125 | rmap.explicit = False | |
126 |
|
126 | |||
127 | from rhodecode.lib.utils2 import str2bool |
|
127 | from rhodecode.lib.utils2 import str2bool | |
128 | from rhodecode.model import repo, repo_group |
|
128 | from rhodecode.model import repo, repo_group | |
129 |
|
129 | |||
130 | def check_repo(environ, match_dict): |
|
130 | def check_repo(environ, match_dict): | |
131 | """ |
|
131 | """ | |
132 | check for valid repository for proper 404 handling |
|
132 | check for valid repository for proper 404 handling | |
133 |
|
133 | |||
134 | :param environ: |
|
134 | :param environ: | |
135 | :param match_dict: |
|
135 | :param match_dict: | |
136 | """ |
|
136 | """ | |
137 | repo_name = match_dict.get('repo_name') |
|
137 | repo_name = match_dict.get('repo_name') | |
138 |
|
138 | |||
139 | if match_dict.get('f_path'): |
|
139 | if match_dict.get('f_path'): | |
140 | # fix for multiple initial slashes that causes errors |
|
140 | # fix for multiple initial slashes that causes errors | |
141 | match_dict['f_path'] = match_dict['f_path'].lstrip('/') |
|
141 | match_dict['f_path'] = match_dict['f_path'].lstrip('/') | |
142 | repo_model = repo.RepoModel() |
|
142 | repo_model = repo.RepoModel() | |
143 | by_name_match = repo_model.get_by_repo_name(repo_name) |
|
143 | by_name_match = repo_model.get_by_repo_name(repo_name) | |
144 | # if we match quickly from database, short circuit the operation, |
|
144 | # if we match quickly from database, short circuit the operation, | |
145 | # and validate repo based on the type. |
|
145 | # and validate repo based on the type. | |
146 | if by_name_match: |
|
146 | if by_name_match: | |
147 | return True |
|
147 | return True | |
148 |
|
148 | |||
149 | by_id_match = repo_model.get_repo_by_id(repo_name) |
|
149 | by_id_match = repo_model.get_repo_by_id(repo_name) | |
150 | if by_id_match: |
|
150 | if by_id_match: | |
151 | repo_name = by_id_match.repo_name |
|
151 | repo_name = by_id_match.repo_name | |
152 | match_dict['repo_name'] = repo_name |
|
152 | match_dict['repo_name'] = repo_name | |
153 | return True |
|
153 | return True | |
154 |
|
154 | |||
155 | return False |
|
155 | return False | |
156 |
|
156 | |||
157 | def check_group(environ, match_dict): |
|
157 | def check_group(environ, match_dict): | |
158 | """ |
|
158 | """ | |
159 | check for valid repository group path for proper 404 handling |
|
159 | check for valid repository group path for proper 404 handling | |
160 |
|
160 | |||
161 | :param environ: |
|
161 | :param environ: | |
162 | :param match_dict: |
|
162 | :param match_dict: | |
163 | """ |
|
163 | """ | |
164 | repo_group_name = match_dict.get('group_name') |
|
164 | repo_group_name = match_dict.get('group_name') | |
165 | repo_group_model = repo_group.RepoGroupModel() |
|
165 | repo_group_model = repo_group.RepoGroupModel() | |
166 | by_name_match = repo_group_model.get_by_group_name(repo_group_name) |
|
166 | by_name_match = repo_group_model.get_by_group_name(repo_group_name) | |
167 | if by_name_match: |
|
167 | if by_name_match: | |
168 | return True |
|
168 | return True | |
169 |
|
169 | |||
170 | return False |
|
170 | return False | |
171 |
|
171 | |||
172 | def check_user_group(environ, match_dict): |
|
172 | def check_user_group(environ, match_dict): | |
173 | """ |
|
173 | """ | |
174 | check for valid user group for proper 404 handling |
|
174 | check for valid user group for proper 404 handling | |
175 |
|
175 | |||
176 | :param environ: |
|
176 | :param environ: | |
177 | :param match_dict: |
|
177 | :param match_dict: | |
178 | """ |
|
178 | """ | |
179 | return True |
|
179 | return True | |
180 |
|
180 | |||
181 | def check_int(environ, match_dict): |
|
181 | def check_int(environ, match_dict): | |
182 | return match_dict.get('id').isdigit() |
|
182 | return match_dict.get('id').isdigit() | |
183 |
|
183 | |||
184 |
|
184 | |||
185 | #========================================================================== |
|
185 | #========================================================================== | |
186 | # CUSTOM ROUTES HERE |
|
186 | # CUSTOM ROUTES HERE | |
187 | #========================================================================== |
|
187 | #========================================================================== | |
188 |
|
188 | |||
189 | # MAIN PAGE |
|
189 | # MAIN PAGE | |
190 | rmap.connect('home', '/', controller='home', action='index', jsroute=True) |
|
190 | rmap.connect('home', '/', controller='home', action='index', jsroute=True) | |
191 | rmap.connect('goto_switcher_data', '/_goto_data', controller='home', |
|
191 | rmap.connect('goto_switcher_data', '/_goto_data', controller='home', | |
192 | action='goto_switcher_data') |
|
192 | action='goto_switcher_data') | |
193 | rmap.connect('repo_list_data', '/_repos', controller='home', |
|
193 | rmap.connect('repo_list_data', '/_repos', controller='home', | |
194 | action='repo_list_data') |
|
194 | action='repo_list_data') | |
195 |
|
195 | |||
196 | rmap.connect('user_autocomplete_data', '/_users', controller='home', |
|
|||
197 | action='user_autocomplete_data', jsroute=True) |
|
|||
198 | rmap.connect('user_group_autocomplete_data', '/_user_groups', controller='home', |
|
|||
199 | action='user_group_autocomplete_data', jsroute=True) |
|
|||
200 |
|
||||
201 | # TODO: johbo: Static links, to be replaced by our redirection mechanism |
|
196 | # TODO: johbo: Static links, to be replaced by our redirection mechanism | |
202 | rmap.connect('rst_help', |
|
197 | rmap.connect('rst_help', | |
203 | 'http://docutils.sourceforge.net/docs/user/rst/quickref.html', |
|
198 | 'http://docutils.sourceforge.net/docs/user/rst/quickref.html', | |
204 | _static=True) |
|
199 | _static=True) | |
205 | rmap.connect('markdown_help', |
|
200 | rmap.connect('markdown_help', | |
206 | 'http://daringfireball.net/projects/markdown/syntax', |
|
201 | 'http://daringfireball.net/projects/markdown/syntax', | |
207 | _static=True) |
|
202 | _static=True) | |
208 | rmap.connect('rhodecode_official', 'https://rhodecode.com', _static=True) |
|
203 | rmap.connect('rhodecode_official', 'https://rhodecode.com', _static=True) | |
209 | rmap.connect('rhodecode_support', 'https://rhodecode.com/help/', _static=True) |
|
204 | rmap.connect('rhodecode_support', 'https://rhodecode.com/help/', _static=True) | |
210 | rmap.connect('rhodecode_translations', 'https://rhodecode.com/translate/enterprise', _static=True) |
|
205 | rmap.connect('rhodecode_translations', 'https://rhodecode.com/translate/enterprise', _static=True) | |
211 | # TODO: anderson - making this a static link since redirect won't play |
|
206 | # TODO: anderson - making this a static link since redirect won't play | |
212 | # nice with POST requests |
|
207 | # nice with POST requests | |
213 | rmap.connect('enterprise_license_convert_from_old', |
|
208 | rmap.connect('enterprise_license_convert_from_old', | |
214 | 'https://rhodecode.com/u/license-upgrade', |
|
209 | 'https://rhodecode.com/u/license-upgrade', | |
215 | _static=True) |
|
210 | _static=True) | |
216 |
|
211 | |||
217 | routing_links.connect_redirection_links(rmap) |
|
212 | routing_links.connect_redirection_links(rmap) | |
218 |
|
213 | |||
219 | rmap.connect('ping', '%s/ping' % (ADMIN_PREFIX,), controller='home', action='ping') |
|
214 | rmap.connect('ping', '%s/ping' % (ADMIN_PREFIX,), controller='home', action='ping') | |
220 | rmap.connect('error_test', '%s/error_test' % (ADMIN_PREFIX,), controller='home', action='error_test') |
|
215 | rmap.connect('error_test', '%s/error_test' % (ADMIN_PREFIX,), controller='home', action='error_test') | |
221 |
|
216 | |||
222 | # ADMIN REPOSITORY ROUTES |
|
217 | # ADMIN REPOSITORY ROUTES | |
223 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
218 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
224 | controller='admin/repos') as m: |
|
219 | controller='admin/repos') as m: | |
225 | m.connect('repos', '/repos', |
|
220 | m.connect('repos', '/repos', | |
226 | action='create', conditions={'method': ['POST']}) |
|
221 | action='create', conditions={'method': ['POST']}) | |
227 | m.connect('repos', '/repos', |
|
222 | m.connect('repos', '/repos', | |
228 | action='index', conditions={'method': ['GET']}) |
|
223 | action='index', conditions={'method': ['GET']}) | |
229 | m.connect('new_repo', '/create_repository', jsroute=True, |
|
224 | m.connect('new_repo', '/create_repository', jsroute=True, | |
230 | action='create_repository', conditions={'method': ['GET']}) |
|
225 | action='create_repository', conditions={'method': ['GET']}) | |
231 | m.connect('/repos/{repo_name}', |
|
226 | m.connect('/repos/{repo_name}', | |
232 | action='update', conditions={'method': ['PUT'], |
|
227 | action='update', conditions={'method': ['PUT'], | |
233 | 'function': check_repo}, |
|
228 | 'function': check_repo}, | |
234 | requirements=URL_NAME_REQUIREMENTS) |
|
229 | requirements=URL_NAME_REQUIREMENTS) | |
235 | m.connect('delete_repo', '/repos/{repo_name}', |
|
230 | m.connect('delete_repo', '/repos/{repo_name}', | |
236 | action='delete', conditions={'method': ['DELETE']}, |
|
231 | action='delete', conditions={'method': ['DELETE']}, | |
237 | requirements=URL_NAME_REQUIREMENTS) |
|
232 | requirements=URL_NAME_REQUIREMENTS) | |
238 | m.connect('repo', '/repos/{repo_name}', |
|
233 | m.connect('repo', '/repos/{repo_name}', | |
239 | action='show', conditions={'method': ['GET'], |
|
234 | action='show', conditions={'method': ['GET'], | |
240 | 'function': check_repo}, |
|
235 | 'function': check_repo}, | |
241 | requirements=URL_NAME_REQUIREMENTS) |
|
236 | requirements=URL_NAME_REQUIREMENTS) | |
242 |
|
237 | |||
243 | # ADMIN REPOSITORY GROUPS ROUTES |
|
238 | # ADMIN REPOSITORY GROUPS ROUTES | |
244 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
239 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
245 | controller='admin/repo_groups') as m: |
|
240 | controller='admin/repo_groups') as m: | |
246 | m.connect('repo_groups', '/repo_groups', |
|
241 | m.connect('repo_groups', '/repo_groups', | |
247 | action='create', conditions={'method': ['POST']}) |
|
242 | action='create', conditions={'method': ['POST']}) | |
248 | m.connect('repo_groups', '/repo_groups', |
|
243 | m.connect('repo_groups', '/repo_groups', | |
249 | action='index', conditions={'method': ['GET']}) |
|
244 | action='index', conditions={'method': ['GET']}) | |
250 | m.connect('new_repo_group', '/repo_groups/new', |
|
245 | m.connect('new_repo_group', '/repo_groups/new', | |
251 | action='new', conditions={'method': ['GET']}) |
|
246 | action='new', conditions={'method': ['GET']}) | |
252 | m.connect('update_repo_group', '/repo_groups/{group_name}', |
|
247 | m.connect('update_repo_group', '/repo_groups/{group_name}', | |
253 | action='update', conditions={'method': ['PUT'], |
|
248 | action='update', conditions={'method': ['PUT'], | |
254 | 'function': check_group}, |
|
249 | 'function': check_group}, | |
255 | requirements=URL_NAME_REQUIREMENTS) |
|
250 | requirements=URL_NAME_REQUIREMENTS) | |
256 |
|
251 | |||
257 | # EXTRAS REPO GROUP ROUTES |
|
252 | # EXTRAS REPO GROUP ROUTES | |
258 | m.connect('edit_repo_group', '/repo_groups/{group_name}/edit', |
|
253 | m.connect('edit_repo_group', '/repo_groups/{group_name}/edit', | |
259 | action='edit', |
|
254 | action='edit', | |
260 | conditions={'method': ['GET'], 'function': check_group}, |
|
255 | conditions={'method': ['GET'], 'function': check_group}, | |
261 | requirements=URL_NAME_REQUIREMENTS) |
|
256 | requirements=URL_NAME_REQUIREMENTS) | |
262 | m.connect('edit_repo_group', '/repo_groups/{group_name}/edit', |
|
257 | m.connect('edit_repo_group', '/repo_groups/{group_name}/edit', | |
263 | action='edit', |
|
258 | action='edit', | |
264 | conditions={'method': ['PUT'], 'function': check_group}, |
|
259 | conditions={'method': ['PUT'], 'function': check_group}, | |
265 | requirements=URL_NAME_REQUIREMENTS) |
|
260 | requirements=URL_NAME_REQUIREMENTS) | |
266 |
|
261 | |||
267 | m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced', |
|
262 | m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced', | |
268 | action='edit_repo_group_advanced', |
|
263 | action='edit_repo_group_advanced', | |
269 | conditions={'method': ['GET'], 'function': check_group}, |
|
264 | conditions={'method': ['GET'], 'function': check_group}, | |
270 | requirements=URL_NAME_REQUIREMENTS) |
|
265 | requirements=URL_NAME_REQUIREMENTS) | |
271 | m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced', |
|
266 | m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced', | |
272 | action='edit_repo_group_advanced', |
|
267 | action='edit_repo_group_advanced', | |
273 | conditions={'method': ['PUT'], 'function': check_group}, |
|
268 | conditions={'method': ['PUT'], 'function': check_group}, | |
274 | requirements=URL_NAME_REQUIREMENTS) |
|
269 | requirements=URL_NAME_REQUIREMENTS) | |
275 |
|
270 | |||
276 | m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions', |
|
271 | m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions', | |
277 | action='edit_repo_group_perms', |
|
272 | action='edit_repo_group_perms', | |
278 | conditions={'method': ['GET'], 'function': check_group}, |
|
273 | conditions={'method': ['GET'], 'function': check_group}, | |
279 | requirements=URL_NAME_REQUIREMENTS) |
|
274 | requirements=URL_NAME_REQUIREMENTS) | |
280 | m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions', |
|
275 | m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions', | |
281 | action='update_perms', |
|
276 | action='update_perms', | |
282 | conditions={'method': ['PUT'], 'function': check_group}, |
|
277 | conditions={'method': ['PUT'], 'function': check_group}, | |
283 | requirements=URL_NAME_REQUIREMENTS) |
|
278 | requirements=URL_NAME_REQUIREMENTS) | |
284 |
|
279 | |||
285 | m.connect('delete_repo_group', '/repo_groups/{group_name}', |
|
280 | m.connect('delete_repo_group', '/repo_groups/{group_name}', | |
286 | action='delete', conditions={'method': ['DELETE'], |
|
281 | action='delete', conditions={'method': ['DELETE'], | |
287 | 'function': check_group}, |
|
282 | 'function': check_group}, | |
288 | requirements=URL_NAME_REQUIREMENTS) |
|
283 | requirements=URL_NAME_REQUIREMENTS) | |
289 |
|
284 | |||
290 | # ADMIN USER ROUTES |
|
285 | # ADMIN USER ROUTES | |
291 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
286 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
292 | controller='admin/users') as m: |
|
287 | controller='admin/users') as m: | |
293 | m.connect('users', '/users', |
|
288 | m.connect('users', '/users', | |
294 | action='create', conditions={'method': ['POST']}) |
|
289 | action='create', conditions={'method': ['POST']}) | |
295 | m.connect('new_user', '/users/new', |
|
290 | m.connect('new_user', '/users/new', | |
296 | action='new', conditions={'method': ['GET']}) |
|
291 | action='new', conditions={'method': ['GET']}) | |
297 | m.connect('update_user', '/users/{user_id}', |
|
292 | m.connect('update_user', '/users/{user_id}', | |
298 | action='update', conditions={'method': ['PUT']}) |
|
293 | action='update', conditions={'method': ['PUT']}) | |
299 | m.connect('delete_user', '/users/{user_id}', |
|
294 | m.connect('delete_user', '/users/{user_id}', | |
300 | action='delete', conditions={'method': ['DELETE']}) |
|
295 | action='delete', conditions={'method': ['DELETE']}) | |
301 | m.connect('edit_user', '/users/{user_id}/edit', |
|
296 | m.connect('edit_user', '/users/{user_id}/edit', | |
302 | action='edit', conditions={'method': ['GET']}, jsroute=True) |
|
297 | action='edit', conditions={'method': ['GET']}, jsroute=True) | |
303 | m.connect('user', '/users/{user_id}', |
|
298 | m.connect('user', '/users/{user_id}', | |
304 | action='show', conditions={'method': ['GET']}) |
|
299 | action='show', conditions={'method': ['GET']}) | |
305 | m.connect('force_password_reset_user', '/users/{user_id}/password_reset', |
|
300 | m.connect('force_password_reset_user', '/users/{user_id}/password_reset', | |
306 | action='reset_password', conditions={'method': ['POST']}) |
|
301 | action='reset_password', conditions={'method': ['POST']}) | |
307 | m.connect('create_personal_repo_group', '/users/{user_id}/create_repo_group', |
|
302 | m.connect('create_personal_repo_group', '/users/{user_id}/create_repo_group', | |
308 | action='create_personal_repo_group', conditions={'method': ['POST']}) |
|
303 | action='create_personal_repo_group', conditions={'method': ['POST']}) | |
309 |
|
304 | |||
310 | # EXTRAS USER ROUTES |
|
305 | # EXTRAS USER ROUTES | |
311 | m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced', |
|
306 | m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced', | |
312 | action='edit_advanced', conditions={'method': ['GET']}) |
|
307 | action='edit_advanced', conditions={'method': ['GET']}) | |
313 | m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced', |
|
308 | m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced', | |
314 | action='update_advanced', conditions={'method': ['PUT']}) |
|
309 | action='update_advanced', conditions={'method': ['PUT']}) | |
315 |
|
310 | |||
316 | m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions', |
|
311 | m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions', | |
317 | action='edit_global_perms', conditions={'method': ['GET']}) |
|
312 | action='edit_global_perms', conditions={'method': ['GET']}) | |
318 | m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions', |
|
313 | m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions', | |
319 | action='update_global_perms', conditions={'method': ['PUT']}) |
|
314 | action='update_global_perms', conditions={'method': ['PUT']}) | |
320 |
|
315 | |||
321 | m.connect('edit_user_perms_summary', '/users/{user_id}/edit/permissions_summary', |
|
316 | m.connect('edit_user_perms_summary', '/users/{user_id}/edit/permissions_summary', | |
322 | action='edit_perms_summary', conditions={'method': ['GET']}) |
|
317 | action='edit_perms_summary', conditions={'method': ['GET']}) | |
323 |
|
318 | |||
324 | m.connect('edit_user_emails', '/users/{user_id}/edit/emails', |
|
319 | m.connect('edit_user_emails', '/users/{user_id}/edit/emails', | |
325 | action='edit_emails', conditions={'method': ['GET']}) |
|
320 | action='edit_emails', conditions={'method': ['GET']}) | |
326 | m.connect('edit_user_emails', '/users/{user_id}/edit/emails', |
|
321 | m.connect('edit_user_emails', '/users/{user_id}/edit/emails', | |
327 | action='add_email', conditions={'method': ['PUT']}) |
|
322 | action='add_email', conditions={'method': ['PUT']}) | |
328 | m.connect('edit_user_emails', '/users/{user_id}/edit/emails', |
|
323 | m.connect('edit_user_emails', '/users/{user_id}/edit/emails', | |
329 | action='delete_email', conditions={'method': ['DELETE']}) |
|
324 | action='delete_email', conditions={'method': ['DELETE']}) | |
330 |
|
325 | |||
331 | m.connect('edit_user_ips', '/users/{user_id}/edit/ips', |
|
326 | m.connect('edit_user_ips', '/users/{user_id}/edit/ips', | |
332 | action='edit_ips', conditions={'method': ['GET']}) |
|
327 | action='edit_ips', conditions={'method': ['GET']}) | |
333 | m.connect('edit_user_ips', '/users/{user_id}/edit/ips', |
|
328 | m.connect('edit_user_ips', '/users/{user_id}/edit/ips', | |
334 | action='add_ip', conditions={'method': ['PUT']}) |
|
329 | action='add_ip', conditions={'method': ['PUT']}) | |
335 | m.connect('edit_user_ips', '/users/{user_id}/edit/ips', |
|
330 | m.connect('edit_user_ips', '/users/{user_id}/edit/ips', | |
336 | action='delete_ip', conditions={'method': ['DELETE']}) |
|
331 | action='delete_ip', conditions={'method': ['DELETE']}) | |
337 |
|
332 | |||
338 | # ADMIN USER GROUPS REST ROUTES |
|
333 | # ADMIN USER GROUPS REST ROUTES | |
339 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
334 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
340 | controller='admin/user_groups') as m: |
|
335 | controller='admin/user_groups') as m: | |
341 | m.connect('users_groups', '/user_groups', |
|
336 | m.connect('users_groups', '/user_groups', | |
342 | action='create', conditions={'method': ['POST']}) |
|
337 | action='create', conditions={'method': ['POST']}) | |
343 | m.connect('users_groups', '/user_groups', |
|
338 | m.connect('users_groups', '/user_groups', | |
344 | action='index', conditions={'method': ['GET']}) |
|
339 | action='index', conditions={'method': ['GET']}) | |
345 | m.connect('new_users_group', '/user_groups/new', |
|
340 | m.connect('new_users_group', '/user_groups/new', | |
346 | action='new', conditions={'method': ['GET']}) |
|
341 | action='new', conditions={'method': ['GET']}) | |
347 | m.connect('update_users_group', '/user_groups/{user_group_id}', |
|
342 | m.connect('update_users_group', '/user_groups/{user_group_id}', | |
348 | action='update', conditions={'method': ['PUT']}) |
|
343 | action='update', conditions={'method': ['PUT']}) | |
349 | m.connect('delete_users_group', '/user_groups/{user_group_id}', |
|
344 | m.connect('delete_users_group', '/user_groups/{user_group_id}', | |
350 | action='delete', conditions={'method': ['DELETE']}) |
|
345 | action='delete', conditions={'method': ['DELETE']}) | |
351 | m.connect('edit_users_group', '/user_groups/{user_group_id}/edit', |
|
346 | m.connect('edit_users_group', '/user_groups/{user_group_id}/edit', | |
352 | action='edit', conditions={'method': ['GET']}, |
|
347 | action='edit', conditions={'method': ['GET']}, | |
353 | function=check_user_group) |
|
348 | function=check_user_group) | |
354 |
|
349 | |||
355 | # EXTRAS USER GROUP ROUTES |
|
350 | # EXTRAS USER GROUP ROUTES | |
356 | m.connect('edit_user_group_global_perms', |
|
351 | m.connect('edit_user_group_global_perms', | |
357 | '/user_groups/{user_group_id}/edit/global_permissions', |
|
352 | '/user_groups/{user_group_id}/edit/global_permissions', | |
358 | action='edit_global_perms', conditions={'method': ['GET']}) |
|
353 | action='edit_global_perms', conditions={'method': ['GET']}) | |
359 | m.connect('edit_user_group_global_perms', |
|
354 | m.connect('edit_user_group_global_perms', | |
360 | '/user_groups/{user_group_id}/edit/global_permissions', |
|
355 | '/user_groups/{user_group_id}/edit/global_permissions', | |
361 | action='update_global_perms', conditions={'method': ['PUT']}) |
|
356 | action='update_global_perms', conditions={'method': ['PUT']}) | |
362 | m.connect('edit_user_group_perms_summary', |
|
357 | m.connect('edit_user_group_perms_summary', | |
363 | '/user_groups/{user_group_id}/edit/permissions_summary', |
|
358 | '/user_groups/{user_group_id}/edit/permissions_summary', | |
364 | action='edit_perms_summary', conditions={'method': ['GET']}) |
|
359 | action='edit_perms_summary', conditions={'method': ['GET']}) | |
365 |
|
360 | |||
366 | m.connect('edit_user_group_perms', |
|
361 | m.connect('edit_user_group_perms', | |
367 | '/user_groups/{user_group_id}/edit/permissions', |
|
362 | '/user_groups/{user_group_id}/edit/permissions', | |
368 | action='edit_perms', conditions={'method': ['GET']}) |
|
363 | action='edit_perms', conditions={'method': ['GET']}) | |
369 | m.connect('edit_user_group_perms', |
|
364 | m.connect('edit_user_group_perms', | |
370 | '/user_groups/{user_group_id}/edit/permissions', |
|
365 | '/user_groups/{user_group_id}/edit/permissions', | |
371 | action='update_perms', conditions={'method': ['PUT']}) |
|
366 | action='update_perms', conditions={'method': ['PUT']}) | |
372 |
|
367 | |||
373 | m.connect('edit_user_group_advanced', |
|
368 | m.connect('edit_user_group_advanced', | |
374 | '/user_groups/{user_group_id}/edit/advanced', |
|
369 | '/user_groups/{user_group_id}/edit/advanced', | |
375 | action='edit_advanced', conditions={'method': ['GET']}) |
|
370 | action='edit_advanced', conditions={'method': ['GET']}) | |
376 |
|
371 | |||
377 | m.connect('edit_user_group_advanced_sync', |
|
372 | m.connect('edit_user_group_advanced_sync', | |
378 | '/user_groups/{user_group_id}/edit/advanced/sync', |
|
373 | '/user_groups/{user_group_id}/edit/advanced/sync', | |
379 | action='edit_advanced_set_synchronization', conditions={'method': ['POST']}) |
|
374 | action='edit_advanced_set_synchronization', conditions={'method': ['POST']}) | |
380 |
|
375 | |||
381 | m.connect('edit_user_group_members', |
|
376 | m.connect('edit_user_group_members', | |
382 | '/user_groups/{user_group_id}/edit/members', jsroute=True, |
|
377 | '/user_groups/{user_group_id}/edit/members', jsroute=True, | |
383 | action='user_group_members', conditions={'method': ['GET']}) |
|
378 | action='user_group_members', conditions={'method': ['GET']}) | |
384 |
|
379 | |||
385 | # ADMIN PERMISSIONS ROUTES |
|
380 | # ADMIN PERMISSIONS ROUTES | |
386 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
381 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
387 | controller='admin/permissions') as m: |
|
382 | controller='admin/permissions') as m: | |
388 | m.connect('admin_permissions_application', '/permissions/application', |
|
383 | m.connect('admin_permissions_application', '/permissions/application', | |
389 | action='permission_application_update', conditions={'method': ['POST']}) |
|
384 | action='permission_application_update', conditions={'method': ['POST']}) | |
390 | m.connect('admin_permissions_application', '/permissions/application', |
|
385 | m.connect('admin_permissions_application', '/permissions/application', | |
391 | action='permission_application', conditions={'method': ['GET']}) |
|
386 | action='permission_application', conditions={'method': ['GET']}) | |
392 |
|
387 | |||
393 | m.connect('admin_permissions_global', '/permissions/global', |
|
388 | m.connect('admin_permissions_global', '/permissions/global', | |
394 | action='permission_global_update', conditions={'method': ['POST']}) |
|
389 | action='permission_global_update', conditions={'method': ['POST']}) | |
395 | m.connect('admin_permissions_global', '/permissions/global', |
|
390 | m.connect('admin_permissions_global', '/permissions/global', | |
396 | action='permission_global', conditions={'method': ['GET']}) |
|
391 | action='permission_global', conditions={'method': ['GET']}) | |
397 |
|
392 | |||
398 | m.connect('admin_permissions_object', '/permissions/object', |
|
393 | m.connect('admin_permissions_object', '/permissions/object', | |
399 | action='permission_objects_update', conditions={'method': ['POST']}) |
|
394 | action='permission_objects_update', conditions={'method': ['POST']}) | |
400 | m.connect('admin_permissions_object', '/permissions/object', |
|
395 | m.connect('admin_permissions_object', '/permissions/object', | |
401 | action='permission_objects', conditions={'method': ['GET']}) |
|
396 | action='permission_objects', conditions={'method': ['GET']}) | |
402 |
|
397 | |||
403 | m.connect('admin_permissions_ips', '/permissions/ips', |
|
398 | m.connect('admin_permissions_ips', '/permissions/ips', | |
404 | action='permission_ips', conditions={'method': ['POST']}) |
|
399 | action='permission_ips', conditions={'method': ['POST']}) | |
405 | m.connect('admin_permissions_ips', '/permissions/ips', |
|
400 | m.connect('admin_permissions_ips', '/permissions/ips', | |
406 | action='permission_ips', conditions={'method': ['GET']}) |
|
401 | action='permission_ips', conditions={'method': ['GET']}) | |
407 |
|
402 | |||
408 | m.connect('admin_permissions_overview', '/permissions/overview', |
|
403 | m.connect('admin_permissions_overview', '/permissions/overview', | |
409 | action='permission_perms', conditions={'method': ['GET']}) |
|
404 | action='permission_perms', conditions={'method': ['GET']}) | |
410 |
|
405 | |||
411 | # ADMIN DEFAULTS REST ROUTES |
|
406 | # ADMIN DEFAULTS REST ROUTES | |
412 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
407 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
413 | controller='admin/defaults') as m: |
|
408 | controller='admin/defaults') as m: | |
414 | m.connect('admin_defaults_repositories', '/defaults/repositories', |
|
409 | m.connect('admin_defaults_repositories', '/defaults/repositories', | |
415 | action='update_repository_defaults', conditions={'method': ['POST']}) |
|
410 | action='update_repository_defaults', conditions={'method': ['POST']}) | |
416 | m.connect('admin_defaults_repositories', '/defaults/repositories', |
|
411 | m.connect('admin_defaults_repositories', '/defaults/repositories', | |
417 | action='index', conditions={'method': ['GET']}) |
|
412 | action='index', conditions={'method': ['GET']}) | |
418 |
|
413 | |||
419 | # ADMIN DEBUG STYLE ROUTES |
|
414 | # ADMIN DEBUG STYLE ROUTES | |
420 | if str2bool(config.get('debug_style')): |
|
415 | if str2bool(config.get('debug_style')): | |
421 | with rmap.submapper(path_prefix=ADMIN_PREFIX + '/debug_style', |
|
416 | with rmap.submapper(path_prefix=ADMIN_PREFIX + '/debug_style', | |
422 | controller='debug_style') as m: |
|
417 | controller='debug_style') as m: | |
423 | m.connect('debug_style_home', '', |
|
418 | m.connect('debug_style_home', '', | |
424 | action='index', conditions={'method': ['GET']}) |
|
419 | action='index', conditions={'method': ['GET']}) | |
425 | m.connect('debug_style_template', '/t/{t_path}', |
|
420 | m.connect('debug_style_template', '/t/{t_path}', | |
426 | action='template', conditions={'method': ['GET']}) |
|
421 | action='template', conditions={'method': ['GET']}) | |
427 |
|
422 | |||
428 | # ADMIN SETTINGS ROUTES |
|
423 | # ADMIN SETTINGS ROUTES | |
429 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
424 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
430 | controller='admin/settings') as m: |
|
425 | controller='admin/settings') as m: | |
431 |
|
426 | |||
432 | # default |
|
427 | # default | |
433 | m.connect('admin_settings', '/settings', |
|
428 | m.connect('admin_settings', '/settings', | |
434 | action='settings_global_update', |
|
429 | action='settings_global_update', | |
435 | conditions={'method': ['POST']}) |
|
430 | conditions={'method': ['POST']}) | |
436 | m.connect('admin_settings', '/settings', |
|
431 | m.connect('admin_settings', '/settings', | |
437 | action='settings_global', conditions={'method': ['GET']}) |
|
432 | action='settings_global', conditions={'method': ['GET']}) | |
438 |
|
433 | |||
439 | m.connect('admin_settings_vcs', '/settings/vcs', |
|
434 | m.connect('admin_settings_vcs', '/settings/vcs', | |
440 | action='settings_vcs_update', |
|
435 | action='settings_vcs_update', | |
441 | conditions={'method': ['POST']}) |
|
436 | conditions={'method': ['POST']}) | |
442 | m.connect('admin_settings_vcs', '/settings/vcs', |
|
437 | m.connect('admin_settings_vcs', '/settings/vcs', | |
443 | action='settings_vcs', |
|
438 | action='settings_vcs', | |
444 | conditions={'method': ['GET']}) |
|
439 | conditions={'method': ['GET']}) | |
445 | m.connect('admin_settings_vcs', '/settings/vcs', |
|
440 | m.connect('admin_settings_vcs', '/settings/vcs', | |
446 | action='delete_svn_pattern', |
|
441 | action='delete_svn_pattern', | |
447 | conditions={'method': ['DELETE']}) |
|
442 | conditions={'method': ['DELETE']}) | |
448 |
|
443 | |||
449 | m.connect('admin_settings_mapping', '/settings/mapping', |
|
444 | m.connect('admin_settings_mapping', '/settings/mapping', | |
450 | action='settings_mapping_update', |
|
445 | action='settings_mapping_update', | |
451 | conditions={'method': ['POST']}) |
|
446 | conditions={'method': ['POST']}) | |
452 | m.connect('admin_settings_mapping', '/settings/mapping', |
|
447 | m.connect('admin_settings_mapping', '/settings/mapping', | |
453 | action='settings_mapping', conditions={'method': ['GET']}) |
|
448 | action='settings_mapping', conditions={'method': ['GET']}) | |
454 |
|
449 | |||
455 | m.connect('admin_settings_global', '/settings/global', |
|
450 | m.connect('admin_settings_global', '/settings/global', | |
456 | action='settings_global_update', |
|
451 | action='settings_global_update', | |
457 | conditions={'method': ['POST']}) |
|
452 | conditions={'method': ['POST']}) | |
458 | m.connect('admin_settings_global', '/settings/global', |
|
453 | m.connect('admin_settings_global', '/settings/global', | |
459 | action='settings_global', conditions={'method': ['GET']}) |
|
454 | action='settings_global', conditions={'method': ['GET']}) | |
460 |
|
455 | |||
461 | m.connect('admin_settings_visual', '/settings/visual', |
|
456 | m.connect('admin_settings_visual', '/settings/visual', | |
462 | action='settings_visual_update', |
|
457 | action='settings_visual_update', | |
463 | conditions={'method': ['POST']}) |
|
458 | conditions={'method': ['POST']}) | |
464 | m.connect('admin_settings_visual', '/settings/visual', |
|
459 | m.connect('admin_settings_visual', '/settings/visual', | |
465 | action='settings_visual', conditions={'method': ['GET']}) |
|
460 | action='settings_visual', conditions={'method': ['GET']}) | |
466 |
|
461 | |||
467 | m.connect('admin_settings_issuetracker', |
|
462 | m.connect('admin_settings_issuetracker', | |
468 | '/settings/issue-tracker', action='settings_issuetracker', |
|
463 | '/settings/issue-tracker', action='settings_issuetracker', | |
469 | conditions={'method': ['GET']}) |
|
464 | conditions={'method': ['GET']}) | |
470 | m.connect('admin_settings_issuetracker_save', |
|
465 | m.connect('admin_settings_issuetracker_save', | |
471 | '/settings/issue-tracker/save', |
|
466 | '/settings/issue-tracker/save', | |
472 | action='settings_issuetracker_save', |
|
467 | action='settings_issuetracker_save', | |
473 | conditions={'method': ['POST']}) |
|
468 | conditions={'method': ['POST']}) | |
474 | m.connect('admin_issuetracker_test', '/settings/issue-tracker/test', |
|
469 | m.connect('admin_issuetracker_test', '/settings/issue-tracker/test', | |
475 | action='settings_issuetracker_test', |
|
470 | action='settings_issuetracker_test', | |
476 | conditions={'method': ['POST']}) |
|
471 | conditions={'method': ['POST']}) | |
477 | m.connect('admin_issuetracker_delete', |
|
472 | m.connect('admin_issuetracker_delete', | |
478 | '/settings/issue-tracker/delete', |
|
473 | '/settings/issue-tracker/delete', | |
479 | action='settings_issuetracker_delete', |
|
474 | action='settings_issuetracker_delete', | |
480 | conditions={'method': ['DELETE']}) |
|
475 | conditions={'method': ['DELETE']}) | |
481 |
|
476 | |||
482 | m.connect('admin_settings_email', '/settings/email', |
|
477 | m.connect('admin_settings_email', '/settings/email', | |
483 | action='settings_email_update', |
|
478 | action='settings_email_update', | |
484 | conditions={'method': ['POST']}) |
|
479 | conditions={'method': ['POST']}) | |
485 | m.connect('admin_settings_email', '/settings/email', |
|
480 | m.connect('admin_settings_email', '/settings/email', | |
486 | action='settings_email', conditions={'method': ['GET']}) |
|
481 | action='settings_email', conditions={'method': ['GET']}) | |
487 |
|
482 | |||
488 | m.connect('admin_settings_hooks', '/settings/hooks', |
|
483 | m.connect('admin_settings_hooks', '/settings/hooks', | |
489 | action='settings_hooks_update', |
|
484 | action='settings_hooks_update', | |
490 | conditions={'method': ['POST', 'DELETE']}) |
|
485 | conditions={'method': ['POST', 'DELETE']}) | |
491 | m.connect('admin_settings_hooks', '/settings/hooks', |
|
486 | m.connect('admin_settings_hooks', '/settings/hooks', | |
492 | action='settings_hooks', conditions={'method': ['GET']}) |
|
487 | action='settings_hooks', conditions={'method': ['GET']}) | |
493 |
|
488 | |||
494 | m.connect('admin_settings_search', '/settings/search', |
|
489 | m.connect('admin_settings_search', '/settings/search', | |
495 | action='settings_search', conditions={'method': ['GET']}) |
|
490 | action='settings_search', conditions={'method': ['GET']}) | |
496 |
|
491 | |||
497 | m.connect('admin_settings_supervisor', '/settings/supervisor', |
|
492 | m.connect('admin_settings_supervisor', '/settings/supervisor', | |
498 | action='settings_supervisor', conditions={'method': ['GET']}) |
|
493 | action='settings_supervisor', conditions={'method': ['GET']}) | |
499 | m.connect('admin_settings_supervisor_log', '/settings/supervisor/{procid}/log', |
|
494 | m.connect('admin_settings_supervisor_log', '/settings/supervisor/{procid}/log', | |
500 | action='settings_supervisor_log', conditions={'method': ['GET']}) |
|
495 | action='settings_supervisor_log', conditions={'method': ['GET']}) | |
501 |
|
496 | |||
502 | m.connect('admin_settings_labs', '/settings/labs', |
|
497 | m.connect('admin_settings_labs', '/settings/labs', | |
503 | action='settings_labs_update', |
|
498 | action='settings_labs_update', | |
504 | conditions={'method': ['POST']}) |
|
499 | conditions={'method': ['POST']}) | |
505 | m.connect('admin_settings_labs', '/settings/labs', |
|
500 | m.connect('admin_settings_labs', '/settings/labs', | |
506 | action='settings_labs', conditions={'method': ['GET']}) |
|
501 | action='settings_labs', conditions={'method': ['GET']}) | |
507 |
|
502 | |||
508 | # ADMIN MY ACCOUNT |
|
503 | # ADMIN MY ACCOUNT | |
509 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
504 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
510 | controller='admin/my_account') as m: |
|
505 | controller='admin/my_account') as m: | |
511 |
|
506 | |||
512 | m.connect('my_account_edit', '/my_account/edit', |
|
507 | m.connect('my_account_edit', '/my_account/edit', | |
513 | action='my_account_edit', conditions={'method': ['GET']}) |
|
508 | action='my_account_edit', conditions={'method': ['GET']}) | |
514 | m.connect('my_account', '/my_account/update', |
|
509 | m.connect('my_account', '/my_account/update', | |
515 | action='my_account_update', conditions={'method': ['POST']}) |
|
510 | action='my_account_update', conditions={'method': ['POST']}) | |
516 |
|
511 | |||
517 | # NOTE(marcink): this needs to be kept for password force flag to be |
|
512 | # NOTE(marcink): this needs to be kept for password force flag to be | |
518 | # handler, remove after migration to pyramid |
|
513 | # handler, remove after migration to pyramid | |
519 | m.connect('my_account_password', '/my_account/password', |
|
514 | m.connect('my_account_password', '/my_account/password', | |
520 | action='my_account_password', conditions={'method': ['GET']}) |
|
515 | action='my_account_password', conditions={'method': ['GET']}) | |
521 |
|
516 | |||
522 | m.connect('my_account_repos', '/my_account/repos', |
|
517 | m.connect('my_account_repos', '/my_account/repos', | |
523 | action='my_account_repos', conditions={'method': ['GET']}) |
|
518 | action='my_account_repos', conditions={'method': ['GET']}) | |
524 |
|
519 | |||
525 | m.connect('my_account_watched', '/my_account/watched', |
|
520 | m.connect('my_account_watched', '/my_account/watched', | |
526 | action='my_account_watched', conditions={'method': ['GET']}) |
|
521 | action='my_account_watched', conditions={'method': ['GET']}) | |
527 |
|
522 | |||
528 | m.connect('my_account_pullrequests', '/my_account/pull_requests', |
|
523 | m.connect('my_account_pullrequests', '/my_account/pull_requests', | |
529 | action='my_account_pullrequests', conditions={'method': ['GET']}) |
|
524 | action='my_account_pullrequests', conditions={'method': ['GET']}) | |
530 |
|
525 | |||
531 | m.connect('my_account_perms', '/my_account/perms', |
|
526 | m.connect('my_account_perms', '/my_account/perms', | |
532 | action='my_account_perms', conditions={'method': ['GET']}) |
|
527 | action='my_account_perms', conditions={'method': ['GET']}) | |
533 |
|
528 | |||
534 | m.connect('my_account_emails', '/my_account/emails', |
|
529 | m.connect('my_account_emails', '/my_account/emails', | |
535 | action='my_account_emails', conditions={'method': ['GET']}) |
|
530 | action='my_account_emails', conditions={'method': ['GET']}) | |
536 | m.connect('my_account_emails', '/my_account/emails', |
|
531 | m.connect('my_account_emails', '/my_account/emails', | |
537 | action='my_account_emails_add', conditions={'method': ['POST']}) |
|
532 | action='my_account_emails_add', conditions={'method': ['POST']}) | |
538 | m.connect('my_account_emails', '/my_account/emails', |
|
533 | m.connect('my_account_emails', '/my_account/emails', | |
539 | action='my_account_emails_delete', conditions={'method': ['DELETE']}) |
|
534 | action='my_account_emails_delete', conditions={'method': ['DELETE']}) | |
540 |
|
535 | |||
541 | m.connect('my_account_notifications', '/my_account/notifications', |
|
536 | m.connect('my_account_notifications', '/my_account/notifications', | |
542 | action='my_notifications', |
|
537 | action='my_notifications', | |
543 | conditions={'method': ['GET']}) |
|
538 | conditions={'method': ['GET']}) | |
544 | m.connect('my_account_notifications_toggle_visibility', |
|
539 | m.connect('my_account_notifications_toggle_visibility', | |
545 | '/my_account/toggle_visibility', |
|
540 | '/my_account/toggle_visibility', | |
546 | action='my_notifications_toggle_visibility', |
|
541 | action='my_notifications_toggle_visibility', | |
547 | conditions={'method': ['POST']}) |
|
542 | conditions={'method': ['POST']}) | |
548 | m.connect('my_account_notifications_test_channelstream', |
|
543 | m.connect('my_account_notifications_test_channelstream', | |
549 | '/my_account/test_channelstream', |
|
544 | '/my_account/test_channelstream', | |
550 | action='my_account_notifications_test_channelstream', |
|
545 | action='my_account_notifications_test_channelstream', | |
551 | conditions={'method': ['POST']}) |
|
546 | conditions={'method': ['POST']}) | |
552 |
|
547 | |||
553 | # NOTIFICATION REST ROUTES |
|
548 | # NOTIFICATION REST ROUTES | |
554 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
549 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
555 | controller='admin/notifications') as m: |
|
550 | controller='admin/notifications') as m: | |
556 | m.connect('notifications', '/notifications', |
|
551 | m.connect('notifications', '/notifications', | |
557 | action='index', conditions={'method': ['GET']}) |
|
552 | action='index', conditions={'method': ['GET']}) | |
558 | m.connect('notifications_mark_all_read', '/notifications/mark_all_read', |
|
553 | m.connect('notifications_mark_all_read', '/notifications/mark_all_read', | |
559 | action='mark_all_read', conditions={'method': ['POST']}) |
|
554 | action='mark_all_read', conditions={'method': ['POST']}) | |
560 | m.connect('/notifications/{notification_id}', |
|
555 | m.connect('/notifications/{notification_id}', | |
561 | action='update', conditions={'method': ['PUT']}) |
|
556 | action='update', conditions={'method': ['PUT']}) | |
562 | m.connect('/notifications/{notification_id}', |
|
557 | m.connect('/notifications/{notification_id}', | |
563 | action='delete', conditions={'method': ['DELETE']}) |
|
558 | action='delete', conditions={'method': ['DELETE']}) | |
564 | m.connect('notification', '/notifications/{notification_id}', |
|
559 | m.connect('notification', '/notifications/{notification_id}', | |
565 | action='show', conditions={'method': ['GET']}) |
|
560 | action='show', conditions={'method': ['GET']}) | |
566 |
|
561 | |||
567 | # ADMIN GIST |
|
562 | # ADMIN GIST | |
568 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
563 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
569 | controller='admin/gists') as m: |
|
564 | controller='admin/gists') as m: | |
570 | m.connect('gists', '/gists', |
|
565 | m.connect('gists', '/gists', | |
571 | action='create', conditions={'method': ['POST']}) |
|
566 | action='create', conditions={'method': ['POST']}) | |
572 | m.connect('gists', '/gists', jsroute=True, |
|
567 | m.connect('gists', '/gists', jsroute=True, | |
573 | action='index', conditions={'method': ['GET']}) |
|
568 | action='index', conditions={'method': ['GET']}) | |
574 | m.connect('new_gist', '/gists/new', jsroute=True, |
|
569 | m.connect('new_gist', '/gists/new', jsroute=True, | |
575 | action='new', conditions={'method': ['GET']}) |
|
570 | action='new', conditions={'method': ['GET']}) | |
576 |
|
571 | |||
577 | m.connect('/gists/{gist_id}', |
|
572 | m.connect('/gists/{gist_id}', | |
578 | action='delete', conditions={'method': ['DELETE']}) |
|
573 | action='delete', conditions={'method': ['DELETE']}) | |
579 | m.connect('edit_gist', '/gists/{gist_id}/edit', |
|
574 | m.connect('edit_gist', '/gists/{gist_id}/edit', | |
580 | action='edit_form', conditions={'method': ['GET']}) |
|
575 | action='edit_form', conditions={'method': ['GET']}) | |
581 | m.connect('edit_gist', '/gists/{gist_id}/edit', |
|
576 | m.connect('edit_gist', '/gists/{gist_id}/edit', | |
582 | action='edit', conditions={'method': ['POST']}) |
|
577 | action='edit', conditions={'method': ['POST']}) | |
583 | m.connect( |
|
578 | m.connect( | |
584 | 'edit_gist_check_revision', '/gists/{gist_id}/edit/check_revision', |
|
579 | 'edit_gist_check_revision', '/gists/{gist_id}/edit/check_revision', | |
585 | action='check_revision', conditions={'method': ['GET']}) |
|
580 | action='check_revision', conditions={'method': ['GET']}) | |
586 |
|
581 | |||
587 | m.connect('gist', '/gists/{gist_id}', |
|
582 | m.connect('gist', '/gists/{gist_id}', | |
588 | action='show', conditions={'method': ['GET']}) |
|
583 | action='show', conditions={'method': ['GET']}) | |
589 | m.connect('gist_rev', '/gists/{gist_id}/{revision}', |
|
584 | m.connect('gist_rev', '/gists/{gist_id}/{revision}', | |
590 | revision='tip', |
|
585 | revision='tip', | |
591 | action='show', conditions={'method': ['GET']}) |
|
586 | action='show', conditions={'method': ['GET']}) | |
592 | m.connect('formatted_gist', '/gists/{gist_id}/{revision}/{format}', |
|
587 | m.connect('formatted_gist', '/gists/{gist_id}/{revision}/{format}', | |
593 | revision='tip', |
|
588 | revision='tip', | |
594 | action='show', conditions={'method': ['GET']}) |
|
589 | action='show', conditions={'method': ['GET']}) | |
595 | m.connect('formatted_gist_file', '/gists/{gist_id}/{revision}/{format}/{f_path}', |
|
590 | m.connect('formatted_gist_file', '/gists/{gist_id}/{revision}/{format}/{f_path}', | |
596 | revision='tip', |
|
591 | revision='tip', | |
597 | action='show', conditions={'method': ['GET']}, |
|
592 | action='show', conditions={'method': ['GET']}, | |
598 | requirements=URL_NAME_REQUIREMENTS) |
|
593 | requirements=URL_NAME_REQUIREMENTS) | |
599 |
|
594 | |||
600 | # ADMIN MAIN PAGES |
|
595 | # ADMIN MAIN PAGES | |
601 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
596 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
602 | controller='admin/admin') as m: |
|
597 | controller='admin/admin') as m: | |
603 | m.connect('admin_home', '', action='index') |
|
598 | m.connect('admin_home', '', action='index') | |
604 | m.connect('admin_add_repo', '/add_repo/{new_repo:[a-z0-9\. _-]*}', |
|
599 | m.connect('admin_add_repo', '/add_repo/{new_repo:[a-z0-9\. _-]*}', | |
605 | action='add_repo') |
|
600 | action='add_repo') | |
606 | m.connect( |
|
601 | m.connect( | |
607 | 'pull_requests_global_0', '/pull_requests/{pull_request_id:[0-9]+}', |
|
602 | 'pull_requests_global_0', '/pull_requests/{pull_request_id:[0-9]+}', | |
608 | action='pull_requests') |
|
603 | action='pull_requests') | |
609 | m.connect( |
|
604 | m.connect( | |
610 | 'pull_requests_global_1', '/pull-requests/{pull_request_id:[0-9]+}', |
|
605 | 'pull_requests_global_1', '/pull-requests/{pull_request_id:[0-9]+}', | |
611 | action='pull_requests') |
|
606 | action='pull_requests') | |
612 | m.connect( |
|
607 | m.connect( | |
613 | 'pull_requests_global', '/pull-request/{pull_request_id:[0-9]+}', |
|
608 | 'pull_requests_global', '/pull-request/{pull_request_id:[0-9]+}', | |
614 | action='pull_requests') |
|
609 | action='pull_requests') | |
615 |
|
610 | |||
616 | # USER JOURNAL |
|
611 | # USER JOURNAL | |
617 | rmap.connect('journal', '%s/journal' % (ADMIN_PREFIX,), |
|
612 | rmap.connect('journal', '%s/journal' % (ADMIN_PREFIX,), | |
618 | controller='journal', action='index') |
|
613 | controller='journal', action='index') | |
619 | rmap.connect('journal_rss', '%s/journal/rss' % (ADMIN_PREFIX,), |
|
614 | rmap.connect('journal_rss', '%s/journal/rss' % (ADMIN_PREFIX,), | |
620 | controller='journal', action='journal_rss') |
|
615 | controller='journal', action='journal_rss') | |
621 | rmap.connect('journal_atom', '%s/journal/atom' % (ADMIN_PREFIX,), |
|
616 | rmap.connect('journal_atom', '%s/journal/atom' % (ADMIN_PREFIX,), | |
622 | controller='journal', action='journal_atom') |
|
617 | controller='journal', action='journal_atom') | |
623 |
|
618 | |||
624 | rmap.connect('public_journal', '%s/public_journal' % (ADMIN_PREFIX,), |
|
619 | rmap.connect('public_journal', '%s/public_journal' % (ADMIN_PREFIX,), | |
625 | controller='journal', action='public_journal') |
|
620 | controller='journal', action='public_journal') | |
626 |
|
621 | |||
627 | rmap.connect('public_journal_rss', '%s/public_journal/rss' % (ADMIN_PREFIX,), |
|
622 | rmap.connect('public_journal_rss', '%s/public_journal/rss' % (ADMIN_PREFIX,), | |
628 | controller='journal', action='public_journal_rss') |
|
623 | controller='journal', action='public_journal_rss') | |
629 |
|
624 | |||
630 | rmap.connect('public_journal_rss_old', '%s/public_journal_rss' % (ADMIN_PREFIX,), |
|
625 | rmap.connect('public_journal_rss_old', '%s/public_journal_rss' % (ADMIN_PREFIX,), | |
631 | controller='journal', action='public_journal_rss') |
|
626 | controller='journal', action='public_journal_rss') | |
632 |
|
627 | |||
633 | rmap.connect('public_journal_atom', |
|
628 | rmap.connect('public_journal_atom', | |
634 | '%s/public_journal/atom' % (ADMIN_PREFIX,), controller='journal', |
|
629 | '%s/public_journal/atom' % (ADMIN_PREFIX,), controller='journal', | |
635 | action='public_journal_atom') |
|
630 | action='public_journal_atom') | |
636 |
|
631 | |||
637 | rmap.connect('public_journal_atom_old', |
|
632 | rmap.connect('public_journal_atom_old', | |
638 | '%s/public_journal_atom' % (ADMIN_PREFIX,), controller='journal', |
|
633 | '%s/public_journal_atom' % (ADMIN_PREFIX,), controller='journal', | |
639 | action='public_journal_atom') |
|
634 | action='public_journal_atom') | |
640 |
|
635 | |||
641 | rmap.connect('toggle_following', '%s/toggle_following' % (ADMIN_PREFIX,), |
|
636 | rmap.connect('toggle_following', '%s/toggle_following' % (ADMIN_PREFIX,), | |
642 | controller='journal', action='toggle_following', jsroute=True, |
|
637 | controller='journal', action='toggle_following', jsroute=True, | |
643 | conditions={'method': ['POST']}) |
|
638 | conditions={'method': ['POST']}) | |
644 |
|
639 | |||
645 | # FULL TEXT SEARCH |
|
640 | # FULL TEXT SEARCH | |
646 | rmap.connect('search', '%s/search' % (ADMIN_PREFIX,), |
|
641 | rmap.connect('search', '%s/search' % (ADMIN_PREFIX,), | |
647 | controller='search') |
|
642 | controller='search') | |
648 | rmap.connect('search_repo_home', '/{repo_name}/search', |
|
643 | rmap.connect('search_repo_home', '/{repo_name}/search', | |
649 | controller='search', |
|
644 | controller='search', | |
650 | action='index', |
|
645 | action='index', | |
651 | conditions={'function': check_repo}, |
|
646 | conditions={'function': check_repo}, | |
652 | requirements=URL_NAME_REQUIREMENTS) |
|
647 | requirements=URL_NAME_REQUIREMENTS) | |
653 |
|
648 | |||
654 | # FEEDS |
|
649 | # FEEDS | |
655 | rmap.connect('rss_feed_home', '/{repo_name}/feed/rss', |
|
650 | rmap.connect('rss_feed_home', '/{repo_name}/feed/rss', | |
656 | controller='feed', action='rss', |
|
651 | controller='feed', action='rss', | |
657 | conditions={'function': check_repo}, |
|
652 | conditions={'function': check_repo}, | |
658 | requirements=URL_NAME_REQUIREMENTS) |
|
653 | requirements=URL_NAME_REQUIREMENTS) | |
659 |
|
654 | |||
660 | rmap.connect('atom_feed_home', '/{repo_name}/feed/atom', |
|
655 | rmap.connect('atom_feed_home', '/{repo_name}/feed/atom', | |
661 | controller='feed', action='atom', |
|
656 | controller='feed', action='atom', | |
662 | conditions={'function': check_repo}, |
|
657 | conditions={'function': check_repo}, | |
663 | requirements=URL_NAME_REQUIREMENTS) |
|
658 | requirements=URL_NAME_REQUIREMENTS) | |
664 |
|
659 | |||
665 | #========================================================================== |
|
660 | #========================================================================== | |
666 | # REPOSITORY ROUTES |
|
661 | # REPOSITORY ROUTES | |
667 | #========================================================================== |
|
662 | #========================================================================== | |
668 |
|
663 | |||
669 | rmap.connect('repo_creating_home', '/{repo_name}/repo_creating', |
|
664 | rmap.connect('repo_creating_home', '/{repo_name}/repo_creating', | |
670 | controller='admin/repos', action='repo_creating', |
|
665 | controller='admin/repos', action='repo_creating', | |
671 | requirements=URL_NAME_REQUIREMENTS) |
|
666 | requirements=URL_NAME_REQUIREMENTS) | |
672 | rmap.connect('repo_check_home', '/{repo_name}/crepo_check', |
|
667 | rmap.connect('repo_check_home', '/{repo_name}/crepo_check', | |
673 | controller='admin/repos', action='repo_check', |
|
668 | controller='admin/repos', action='repo_check', | |
674 | requirements=URL_NAME_REQUIREMENTS) |
|
669 | requirements=URL_NAME_REQUIREMENTS) | |
675 |
|
670 | |||
676 | rmap.connect('repo_stats', '/{repo_name}/repo_stats/{commit_id}', |
|
671 | rmap.connect('repo_stats', '/{repo_name}/repo_stats/{commit_id}', | |
677 | controller='summary', action='repo_stats', |
|
672 | controller='summary', action='repo_stats', | |
678 | conditions={'function': check_repo}, |
|
673 | conditions={'function': check_repo}, | |
679 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
674 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
680 |
|
675 | |||
681 | rmap.connect('repo_refs_data', '/{repo_name}/refs-data', |
|
676 | rmap.connect('repo_refs_data', '/{repo_name}/refs-data', | |
682 | controller='summary', action='repo_refs_data', |
|
677 | controller='summary', action='repo_refs_data', | |
683 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
678 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
684 | rmap.connect('repo_refs_changelog_data', '/{repo_name}/refs-data-changelog', |
|
679 | rmap.connect('repo_refs_changelog_data', '/{repo_name}/refs-data-changelog', | |
685 | controller='summary', action='repo_refs_changelog_data', |
|
680 | controller='summary', action='repo_refs_changelog_data', | |
686 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
681 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
687 | rmap.connect('repo_default_reviewers_data', '/{repo_name}/default-reviewers', |
|
682 | rmap.connect('repo_default_reviewers_data', '/{repo_name}/default-reviewers', | |
688 | controller='summary', action='repo_default_reviewers_data', |
|
683 | controller='summary', action='repo_default_reviewers_data', | |
689 | jsroute=True, requirements=URL_NAME_REQUIREMENTS) |
|
684 | jsroute=True, requirements=URL_NAME_REQUIREMENTS) | |
690 |
|
685 | |||
691 | rmap.connect('changeset_home', '/{repo_name}/changeset/{revision}', |
|
686 | rmap.connect('changeset_home', '/{repo_name}/changeset/{revision}', | |
692 | controller='changeset', revision='tip', |
|
687 | controller='changeset', revision='tip', | |
693 | conditions={'function': check_repo}, |
|
688 | conditions={'function': check_repo}, | |
694 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
689 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
695 | rmap.connect('changeset_children', '/{repo_name}/changeset_children/{revision}', |
|
690 | rmap.connect('changeset_children', '/{repo_name}/changeset_children/{revision}', | |
696 | controller='changeset', revision='tip', action='changeset_children', |
|
691 | controller='changeset', revision='tip', action='changeset_children', | |
697 | conditions={'function': check_repo}, |
|
692 | conditions={'function': check_repo}, | |
698 | requirements=URL_NAME_REQUIREMENTS) |
|
693 | requirements=URL_NAME_REQUIREMENTS) | |
699 | rmap.connect('changeset_parents', '/{repo_name}/changeset_parents/{revision}', |
|
694 | rmap.connect('changeset_parents', '/{repo_name}/changeset_parents/{revision}', | |
700 | controller='changeset', revision='tip', action='changeset_parents', |
|
695 | controller='changeset', revision='tip', action='changeset_parents', | |
701 | conditions={'function': check_repo}, |
|
696 | conditions={'function': check_repo}, | |
702 | requirements=URL_NAME_REQUIREMENTS) |
|
697 | requirements=URL_NAME_REQUIREMENTS) | |
703 |
|
698 | |||
704 | # repo edit options |
|
699 | # repo edit options | |
705 | rmap.connect('edit_repo', '/{repo_name}/settings', jsroute=True, |
|
700 | rmap.connect('edit_repo', '/{repo_name}/settings', jsroute=True, | |
706 | controller='admin/repos', action='edit', |
|
701 | controller='admin/repos', action='edit', | |
707 | conditions={'method': ['GET'], 'function': check_repo}, |
|
702 | conditions={'method': ['GET'], 'function': check_repo}, | |
708 | requirements=URL_NAME_REQUIREMENTS) |
|
703 | requirements=URL_NAME_REQUIREMENTS) | |
709 |
|
704 | |||
710 | rmap.connect('edit_repo_perms', '/{repo_name}/settings/permissions', |
|
705 | rmap.connect('edit_repo_perms', '/{repo_name}/settings/permissions', | |
711 | jsroute=True, |
|
706 | jsroute=True, | |
712 | controller='admin/repos', action='edit_permissions', |
|
707 | controller='admin/repos', action='edit_permissions', | |
713 | conditions={'method': ['GET'], 'function': check_repo}, |
|
708 | conditions={'method': ['GET'], 'function': check_repo}, | |
714 | requirements=URL_NAME_REQUIREMENTS) |
|
709 | requirements=URL_NAME_REQUIREMENTS) | |
715 | rmap.connect('edit_repo_perms_update', '/{repo_name}/settings/permissions', |
|
710 | rmap.connect('edit_repo_perms_update', '/{repo_name}/settings/permissions', | |
716 | controller='admin/repos', action='edit_permissions_update', |
|
711 | controller='admin/repos', action='edit_permissions_update', | |
717 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
712 | conditions={'method': ['PUT'], 'function': check_repo}, | |
718 | requirements=URL_NAME_REQUIREMENTS) |
|
713 | requirements=URL_NAME_REQUIREMENTS) | |
719 |
|
714 | |||
720 | rmap.connect('edit_repo_fields', '/{repo_name}/settings/fields', |
|
715 | rmap.connect('edit_repo_fields', '/{repo_name}/settings/fields', | |
721 | controller='admin/repos', action='edit_fields', |
|
716 | controller='admin/repos', action='edit_fields', | |
722 | conditions={'method': ['GET'], 'function': check_repo}, |
|
717 | conditions={'method': ['GET'], 'function': check_repo}, | |
723 | requirements=URL_NAME_REQUIREMENTS) |
|
718 | requirements=URL_NAME_REQUIREMENTS) | |
724 | rmap.connect('create_repo_fields', '/{repo_name}/settings/fields/new', |
|
719 | rmap.connect('create_repo_fields', '/{repo_name}/settings/fields/new', | |
725 | controller='admin/repos', action='create_repo_field', |
|
720 | controller='admin/repos', action='create_repo_field', | |
726 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
721 | conditions={'method': ['PUT'], 'function': check_repo}, | |
727 | requirements=URL_NAME_REQUIREMENTS) |
|
722 | requirements=URL_NAME_REQUIREMENTS) | |
728 | rmap.connect('delete_repo_fields', '/{repo_name}/settings/fields/{field_id}', |
|
723 | rmap.connect('delete_repo_fields', '/{repo_name}/settings/fields/{field_id}', | |
729 | controller='admin/repos', action='delete_repo_field', |
|
724 | controller='admin/repos', action='delete_repo_field', | |
730 | conditions={'method': ['DELETE'], 'function': check_repo}, |
|
725 | conditions={'method': ['DELETE'], 'function': check_repo}, | |
731 | requirements=URL_NAME_REQUIREMENTS) |
|
726 | requirements=URL_NAME_REQUIREMENTS) | |
732 |
|
727 | |||
733 | rmap.connect('edit_repo_advanced', '/{repo_name}/settings/advanced', |
|
728 | rmap.connect('edit_repo_advanced', '/{repo_name}/settings/advanced', | |
734 | controller='admin/repos', action='edit_advanced', |
|
729 | controller='admin/repos', action='edit_advanced', | |
735 | conditions={'method': ['GET'], 'function': check_repo}, |
|
730 | conditions={'method': ['GET'], 'function': check_repo}, | |
736 | requirements=URL_NAME_REQUIREMENTS) |
|
731 | requirements=URL_NAME_REQUIREMENTS) | |
737 |
|
732 | |||
738 | rmap.connect('edit_repo_advanced_locking', '/{repo_name}/settings/advanced/locking', |
|
733 | rmap.connect('edit_repo_advanced_locking', '/{repo_name}/settings/advanced/locking', | |
739 | controller='admin/repos', action='edit_advanced_locking', |
|
734 | controller='admin/repos', action='edit_advanced_locking', | |
740 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
735 | conditions={'method': ['PUT'], 'function': check_repo}, | |
741 | requirements=URL_NAME_REQUIREMENTS) |
|
736 | requirements=URL_NAME_REQUIREMENTS) | |
742 | rmap.connect('toggle_locking', '/{repo_name}/settings/advanced/locking_toggle', |
|
737 | rmap.connect('toggle_locking', '/{repo_name}/settings/advanced/locking_toggle', | |
743 | controller='admin/repos', action='toggle_locking', |
|
738 | controller='admin/repos', action='toggle_locking', | |
744 | conditions={'method': ['GET'], 'function': check_repo}, |
|
739 | conditions={'method': ['GET'], 'function': check_repo}, | |
745 | requirements=URL_NAME_REQUIREMENTS) |
|
740 | requirements=URL_NAME_REQUIREMENTS) | |
746 |
|
741 | |||
747 | rmap.connect('edit_repo_advanced_journal', '/{repo_name}/settings/advanced/journal', |
|
742 | rmap.connect('edit_repo_advanced_journal', '/{repo_name}/settings/advanced/journal', | |
748 | controller='admin/repos', action='edit_advanced_journal', |
|
743 | controller='admin/repos', action='edit_advanced_journal', | |
749 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
744 | conditions={'method': ['PUT'], 'function': check_repo}, | |
750 | requirements=URL_NAME_REQUIREMENTS) |
|
745 | requirements=URL_NAME_REQUIREMENTS) | |
751 |
|
746 | |||
752 | rmap.connect('edit_repo_advanced_fork', '/{repo_name}/settings/advanced/fork', |
|
747 | rmap.connect('edit_repo_advanced_fork', '/{repo_name}/settings/advanced/fork', | |
753 | controller='admin/repos', action='edit_advanced_fork', |
|
748 | controller='admin/repos', action='edit_advanced_fork', | |
754 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
749 | conditions={'method': ['PUT'], 'function': check_repo}, | |
755 | requirements=URL_NAME_REQUIREMENTS) |
|
750 | requirements=URL_NAME_REQUIREMENTS) | |
756 |
|
751 | |||
757 | rmap.connect('edit_repo_caches', '/{repo_name}/settings/caches', |
|
752 | rmap.connect('edit_repo_caches', '/{repo_name}/settings/caches', | |
758 | controller='admin/repos', action='edit_caches_form', |
|
753 | controller='admin/repos', action='edit_caches_form', | |
759 | conditions={'method': ['GET'], 'function': check_repo}, |
|
754 | conditions={'method': ['GET'], 'function': check_repo}, | |
760 | requirements=URL_NAME_REQUIREMENTS) |
|
755 | requirements=URL_NAME_REQUIREMENTS) | |
761 | rmap.connect('edit_repo_caches', '/{repo_name}/settings/caches', |
|
756 | rmap.connect('edit_repo_caches', '/{repo_name}/settings/caches', | |
762 | controller='admin/repos', action='edit_caches', |
|
757 | controller='admin/repos', action='edit_caches', | |
763 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
758 | conditions={'method': ['PUT'], 'function': check_repo}, | |
764 | requirements=URL_NAME_REQUIREMENTS) |
|
759 | requirements=URL_NAME_REQUIREMENTS) | |
765 |
|
760 | |||
766 | rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote', |
|
761 | rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote', | |
767 | controller='admin/repos', action='edit_remote_form', |
|
762 | controller='admin/repos', action='edit_remote_form', | |
768 | conditions={'method': ['GET'], 'function': check_repo}, |
|
763 | conditions={'method': ['GET'], 'function': check_repo}, | |
769 | requirements=URL_NAME_REQUIREMENTS) |
|
764 | requirements=URL_NAME_REQUIREMENTS) | |
770 | rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote', |
|
765 | rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote', | |
771 | controller='admin/repos', action='edit_remote', |
|
766 | controller='admin/repos', action='edit_remote', | |
772 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
767 | conditions={'method': ['PUT'], 'function': check_repo}, | |
773 | requirements=URL_NAME_REQUIREMENTS) |
|
768 | requirements=URL_NAME_REQUIREMENTS) | |
774 |
|
769 | |||
775 | rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics', |
|
770 | rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics', | |
776 | controller='admin/repos', action='edit_statistics_form', |
|
771 | controller='admin/repos', action='edit_statistics_form', | |
777 | conditions={'method': ['GET'], 'function': check_repo}, |
|
772 | conditions={'method': ['GET'], 'function': check_repo}, | |
778 | requirements=URL_NAME_REQUIREMENTS) |
|
773 | requirements=URL_NAME_REQUIREMENTS) | |
779 | rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics', |
|
774 | rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics', | |
780 | controller='admin/repos', action='edit_statistics', |
|
775 | controller='admin/repos', action='edit_statistics', | |
781 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
776 | conditions={'method': ['PUT'], 'function': check_repo}, | |
782 | requirements=URL_NAME_REQUIREMENTS) |
|
777 | requirements=URL_NAME_REQUIREMENTS) | |
783 | rmap.connect('repo_settings_issuetracker', |
|
778 | rmap.connect('repo_settings_issuetracker', | |
784 | '/{repo_name}/settings/issue-tracker', |
|
779 | '/{repo_name}/settings/issue-tracker', | |
785 | controller='admin/repos', action='repo_issuetracker', |
|
780 | controller='admin/repos', action='repo_issuetracker', | |
786 | conditions={'method': ['GET'], 'function': check_repo}, |
|
781 | conditions={'method': ['GET'], 'function': check_repo}, | |
787 | requirements=URL_NAME_REQUIREMENTS) |
|
782 | requirements=URL_NAME_REQUIREMENTS) | |
788 | rmap.connect('repo_issuetracker_test', |
|
783 | rmap.connect('repo_issuetracker_test', | |
789 | '/{repo_name}/settings/issue-tracker/test', |
|
784 | '/{repo_name}/settings/issue-tracker/test', | |
790 | controller='admin/repos', action='repo_issuetracker_test', |
|
785 | controller='admin/repos', action='repo_issuetracker_test', | |
791 | conditions={'method': ['POST'], 'function': check_repo}, |
|
786 | conditions={'method': ['POST'], 'function': check_repo}, | |
792 | requirements=URL_NAME_REQUIREMENTS) |
|
787 | requirements=URL_NAME_REQUIREMENTS) | |
793 | rmap.connect('repo_issuetracker_delete', |
|
788 | rmap.connect('repo_issuetracker_delete', | |
794 | '/{repo_name}/settings/issue-tracker/delete', |
|
789 | '/{repo_name}/settings/issue-tracker/delete', | |
795 | controller='admin/repos', action='repo_issuetracker_delete', |
|
790 | controller='admin/repos', action='repo_issuetracker_delete', | |
796 | conditions={'method': ['DELETE'], 'function': check_repo}, |
|
791 | conditions={'method': ['DELETE'], 'function': check_repo}, | |
797 | requirements=URL_NAME_REQUIREMENTS) |
|
792 | requirements=URL_NAME_REQUIREMENTS) | |
798 | rmap.connect('repo_issuetracker_save', |
|
793 | rmap.connect('repo_issuetracker_save', | |
799 | '/{repo_name}/settings/issue-tracker/save', |
|
794 | '/{repo_name}/settings/issue-tracker/save', | |
800 | controller='admin/repos', action='repo_issuetracker_save', |
|
795 | controller='admin/repos', action='repo_issuetracker_save', | |
801 | conditions={'method': ['POST'], 'function': check_repo}, |
|
796 | conditions={'method': ['POST'], 'function': check_repo}, | |
802 | requirements=URL_NAME_REQUIREMENTS) |
|
797 | requirements=URL_NAME_REQUIREMENTS) | |
803 | rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs', |
|
798 | rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs', | |
804 | controller='admin/repos', action='repo_settings_vcs_update', |
|
799 | controller='admin/repos', action='repo_settings_vcs_update', | |
805 | conditions={'method': ['POST'], 'function': check_repo}, |
|
800 | conditions={'method': ['POST'], 'function': check_repo}, | |
806 | requirements=URL_NAME_REQUIREMENTS) |
|
801 | requirements=URL_NAME_REQUIREMENTS) | |
807 | rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs', |
|
802 | rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs', | |
808 | controller='admin/repos', action='repo_settings_vcs', |
|
803 | controller='admin/repos', action='repo_settings_vcs', | |
809 | conditions={'method': ['GET'], 'function': check_repo}, |
|
804 | conditions={'method': ['GET'], 'function': check_repo}, | |
810 | requirements=URL_NAME_REQUIREMENTS) |
|
805 | requirements=URL_NAME_REQUIREMENTS) | |
811 | rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs', |
|
806 | rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs', | |
812 | controller='admin/repos', action='repo_delete_svn_pattern', |
|
807 | controller='admin/repos', action='repo_delete_svn_pattern', | |
813 | conditions={'method': ['DELETE'], 'function': check_repo}, |
|
808 | conditions={'method': ['DELETE'], 'function': check_repo}, | |
814 | requirements=URL_NAME_REQUIREMENTS) |
|
809 | requirements=URL_NAME_REQUIREMENTS) | |
815 | rmap.connect('repo_pullrequest_settings', '/{repo_name}/settings/pullrequest', |
|
810 | rmap.connect('repo_pullrequest_settings', '/{repo_name}/settings/pullrequest', | |
816 | controller='admin/repos', action='repo_settings_pullrequest', |
|
811 | controller='admin/repos', action='repo_settings_pullrequest', | |
817 | conditions={'method': ['GET', 'POST'], 'function': check_repo}, |
|
812 | conditions={'method': ['GET', 'POST'], 'function': check_repo}, | |
818 | requirements=URL_NAME_REQUIREMENTS) |
|
813 | requirements=URL_NAME_REQUIREMENTS) | |
819 |
|
814 | |||
820 | # still working url for backward compat. |
|
815 | # still working url for backward compat. | |
821 | rmap.connect('raw_changeset_home_depraced', |
|
816 | rmap.connect('raw_changeset_home_depraced', | |
822 | '/{repo_name}/raw-changeset/{revision}', |
|
817 | '/{repo_name}/raw-changeset/{revision}', | |
823 | controller='changeset', action='changeset_raw', |
|
818 | controller='changeset', action='changeset_raw', | |
824 | revision='tip', conditions={'function': check_repo}, |
|
819 | revision='tip', conditions={'function': check_repo}, | |
825 | requirements=URL_NAME_REQUIREMENTS) |
|
820 | requirements=URL_NAME_REQUIREMENTS) | |
826 |
|
821 | |||
827 | # new URLs |
|
822 | # new URLs | |
828 | rmap.connect('changeset_raw_home', |
|
823 | rmap.connect('changeset_raw_home', | |
829 | '/{repo_name}/changeset-diff/{revision}', |
|
824 | '/{repo_name}/changeset-diff/{revision}', | |
830 | controller='changeset', action='changeset_raw', |
|
825 | controller='changeset', action='changeset_raw', | |
831 | revision='tip', conditions={'function': check_repo}, |
|
826 | revision='tip', conditions={'function': check_repo}, | |
832 | requirements=URL_NAME_REQUIREMENTS) |
|
827 | requirements=URL_NAME_REQUIREMENTS) | |
833 |
|
828 | |||
834 | rmap.connect('changeset_patch_home', |
|
829 | rmap.connect('changeset_patch_home', | |
835 | '/{repo_name}/changeset-patch/{revision}', |
|
830 | '/{repo_name}/changeset-patch/{revision}', | |
836 | controller='changeset', action='changeset_patch', |
|
831 | controller='changeset', action='changeset_patch', | |
837 | revision='tip', conditions={'function': check_repo}, |
|
832 | revision='tip', conditions={'function': check_repo}, | |
838 | requirements=URL_NAME_REQUIREMENTS) |
|
833 | requirements=URL_NAME_REQUIREMENTS) | |
839 |
|
834 | |||
840 | rmap.connect('changeset_download_home', |
|
835 | rmap.connect('changeset_download_home', | |
841 | '/{repo_name}/changeset-download/{revision}', |
|
836 | '/{repo_name}/changeset-download/{revision}', | |
842 | controller='changeset', action='changeset_download', |
|
837 | controller='changeset', action='changeset_download', | |
843 | revision='tip', conditions={'function': check_repo}, |
|
838 | revision='tip', conditions={'function': check_repo}, | |
844 | requirements=URL_NAME_REQUIREMENTS) |
|
839 | requirements=URL_NAME_REQUIREMENTS) | |
845 |
|
840 | |||
846 | rmap.connect('changeset_comment', |
|
841 | rmap.connect('changeset_comment', | |
847 | '/{repo_name}/changeset/{revision}/comment', jsroute=True, |
|
842 | '/{repo_name}/changeset/{revision}/comment', jsroute=True, | |
848 | controller='changeset', revision='tip', action='comment', |
|
843 | controller='changeset', revision='tip', action='comment', | |
849 | conditions={'function': check_repo}, |
|
844 | conditions={'function': check_repo}, | |
850 | requirements=URL_NAME_REQUIREMENTS) |
|
845 | requirements=URL_NAME_REQUIREMENTS) | |
851 |
|
846 | |||
852 | rmap.connect('changeset_comment_preview', |
|
847 | rmap.connect('changeset_comment_preview', | |
853 | '/{repo_name}/changeset/comment/preview', jsroute=True, |
|
848 | '/{repo_name}/changeset/comment/preview', jsroute=True, | |
854 | controller='changeset', action='preview_comment', |
|
849 | controller='changeset', action='preview_comment', | |
855 | conditions={'function': check_repo, 'method': ['POST']}, |
|
850 | conditions={'function': check_repo, 'method': ['POST']}, | |
856 | requirements=URL_NAME_REQUIREMENTS) |
|
851 | requirements=URL_NAME_REQUIREMENTS) | |
857 |
|
852 | |||
858 | rmap.connect('changeset_comment_delete', |
|
853 | rmap.connect('changeset_comment_delete', | |
859 | '/{repo_name}/changeset/comment/{comment_id}/delete', |
|
854 | '/{repo_name}/changeset/comment/{comment_id}/delete', | |
860 | controller='changeset', action='delete_comment', |
|
855 | controller='changeset', action='delete_comment', | |
861 | conditions={'function': check_repo, 'method': ['DELETE']}, |
|
856 | conditions={'function': check_repo, 'method': ['DELETE']}, | |
862 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
857 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
863 |
|
858 | |||
864 | rmap.connect('changeset_info', '/{repo_name}/changeset_info/{revision}', |
|
859 | rmap.connect('changeset_info', '/{repo_name}/changeset_info/{revision}', | |
865 | controller='changeset', action='changeset_info', |
|
860 | controller='changeset', action='changeset_info', | |
866 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
861 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
867 |
|
862 | |||
868 | rmap.connect('compare_home', |
|
863 | rmap.connect('compare_home', | |
869 | '/{repo_name}/compare', |
|
864 | '/{repo_name}/compare', | |
870 | controller='compare', action='index', |
|
865 | controller='compare', action='index', | |
871 | conditions={'function': check_repo}, |
|
866 | conditions={'function': check_repo}, | |
872 | requirements=URL_NAME_REQUIREMENTS) |
|
867 | requirements=URL_NAME_REQUIREMENTS) | |
873 |
|
868 | |||
874 | rmap.connect('compare_url', |
|
869 | rmap.connect('compare_url', | |
875 | '/{repo_name}/compare/{source_ref_type}@{source_ref:.*?}...{target_ref_type}@{target_ref:.*?}', |
|
870 | '/{repo_name}/compare/{source_ref_type}@{source_ref:.*?}...{target_ref_type}@{target_ref:.*?}', | |
876 | controller='compare', action='compare', |
|
871 | controller='compare', action='compare', | |
877 | conditions={'function': check_repo}, |
|
872 | conditions={'function': check_repo}, | |
878 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
873 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
879 |
|
874 | |||
880 | rmap.connect('pullrequest_home', |
|
875 | rmap.connect('pullrequest_home', | |
881 | '/{repo_name}/pull-request/new', controller='pullrequests', |
|
876 | '/{repo_name}/pull-request/new', controller='pullrequests', | |
882 | action='index', conditions={'function': check_repo, |
|
877 | action='index', conditions={'function': check_repo, | |
883 | 'method': ['GET']}, |
|
878 | 'method': ['GET']}, | |
884 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
879 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
885 |
|
880 | |||
886 | rmap.connect('pullrequest', |
|
881 | rmap.connect('pullrequest', | |
887 | '/{repo_name}/pull-request/new', controller='pullrequests', |
|
882 | '/{repo_name}/pull-request/new', controller='pullrequests', | |
888 | action='create', conditions={'function': check_repo, |
|
883 | action='create', conditions={'function': check_repo, | |
889 | 'method': ['POST']}, |
|
884 | 'method': ['POST']}, | |
890 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
885 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
891 |
|
886 | |||
892 | rmap.connect('pullrequest_repo_refs', |
|
887 | rmap.connect('pullrequest_repo_refs', | |
893 | '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}', |
|
888 | '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}', | |
894 | controller='pullrequests', |
|
889 | controller='pullrequests', | |
895 | action='get_repo_refs', |
|
890 | action='get_repo_refs', | |
896 | conditions={'function': check_repo, 'method': ['GET']}, |
|
891 | conditions={'function': check_repo, 'method': ['GET']}, | |
897 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
892 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
898 |
|
893 | |||
899 | rmap.connect('pullrequest_repo_destinations', |
|
894 | rmap.connect('pullrequest_repo_destinations', | |
900 | '/{repo_name}/pull-request/repo-destinations', |
|
895 | '/{repo_name}/pull-request/repo-destinations', | |
901 | controller='pullrequests', |
|
896 | controller='pullrequests', | |
902 | action='get_repo_destinations', |
|
897 | action='get_repo_destinations', | |
903 | conditions={'function': check_repo, 'method': ['GET']}, |
|
898 | conditions={'function': check_repo, 'method': ['GET']}, | |
904 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
899 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
905 |
|
900 | |||
906 | rmap.connect('pullrequest_show', |
|
901 | rmap.connect('pullrequest_show', | |
907 | '/{repo_name}/pull-request/{pull_request_id}', |
|
902 | '/{repo_name}/pull-request/{pull_request_id}', | |
908 | controller='pullrequests', |
|
903 | controller='pullrequests', | |
909 | action='show', conditions={'function': check_repo, |
|
904 | action='show', conditions={'function': check_repo, | |
910 | 'method': ['GET']}, |
|
905 | 'method': ['GET']}, | |
911 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
906 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
912 |
|
907 | |||
913 | rmap.connect('pullrequest_update', |
|
908 | rmap.connect('pullrequest_update', | |
914 | '/{repo_name}/pull-request/{pull_request_id}', |
|
909 | '/{repo_name}/pull-request/{pull_request_id}', | |
915 | controller='pullrequests', |
|
910 | controller='pullrequests', | |
916 | action='update', conditions={'function': check_repo, |
|
911 | action='update', conditions={'function': check_repo, | |
917 | 'method': ['PUT']}, |
|
912 | 'method': ['PUT']}, | |
918 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
913 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
919 |
|
914 | |||
920 | rmap.connect('pullrequest_merge', |
|
915 | rmap.connect('pullrequest_merge', | |
921 | '/{repo_name}/pull-request/{pull_request_id}', |
|
916 | '/{repo_name}/pull-request/{pull_request_id}', | |
922 | controller='pullrequests', |
|
917 | controller='pullrequests', | |
923 | action='merge', conditions={'function': check_repo, |
|
918 | action='merge', conditions={'function': check_repo, | |
924 | 'method': ['POST']}, |
|
919 | 'method': ['POST']}, | |
925 | requirements=URL_NAME_REQUIREMENTS) |
|
920 | requirements=URL_NAME_REQUIREMENTS) | |
926 |
|
921 | |||
927 | rmap.connect('pullrequest_delete', |
|
922 | rmap.connect('pullrequest_delete', | |
928 | '/{repo_name}/pull-request/{pull_request_id}', |
|
923 | '/{repo_name}/pull-request/{pull_request_id}', | |
929 | controller='pullrequests', |
|
924 | controller='pullrequests', | |
930 | action='delete', conditions={'function': check_repo, |
|
925 | action='delete', conditions={'function': check_repo, | |
931 | 'method': ['DELETE']}, |
|
926 | 'method': ['DELETE']}, | |
932 | requirements=URL_NAME_REQUIREMENTS) |
|
927 | requirements=URL_NAME_REQUIREMENTS) | |
933 |
|
928 | |||
934 | rmap.connect('pullrequest_show_all', |
|
929 | rmap.connect('pullrequest_show_all', | |
935 | '/{repo_name}/pull-request', |
|
930 | '/{repo_name}/pull-request', | |
936 | controller='pullrequests', |
|
931 | controller='pullrequests', | |
937 | action='show_all', conditions={'function': check_repo, |
|
932 | action='show_all', conditions={'function': check_repo, | |
938 | 'method': ['GET']}, |
|
933 | 'method': ['GET']}, | |
939 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
934 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
940 |
|
935 | |||
941 | rmap.connect('pullrequest_comment', |
|
936 | rmap.connect('pullrequest_comment', | |
942 | '/{repo_name}/pull-request-comment/{pull_request_id}', |
|
937 | '/{repo_name}/pull-request-comment/{pull_request_id}', | |
943 | controller='pullrequests', |
|
938 | controller='pullrequests', | |
944 | action='comment', conditions={'function': check_repo, |
|
939 | action='comment', conditions={'function': check_repo, | |
945 | 'method': ['POST']}, |
|
940 | 'method': ['POST']}, | |
946 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
941 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
947 |
|
942 | |||
948 | rmap.connect('pullrequest_comment_delete', |
|
943 | rmap.connect('pullrequest_comment_delete', | |
949 | '/{repo_name}/pull-request-comment/{comment_id}/delete', |
|
944 | '/{repo_name}/pull-request-comment/{comment_id}/delete', | |
950 | controller='pullrequests', action='delete_comment', |
|
945 | controller='pullrequests', action='delete_comment', | |
951 | conditions={'function': check_repo, 'method': ['DELETE']}, |
|
946 | conditions={'function': check_repo, 'method': ['DELETE']}, | |
952 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
947 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
953 |
|
948 | |||
954 | rmap.connect('summary_home_explicit', '/{repo_name}/summary', |
|
949 | rmap.connect('summary_home_explicit', '/{repo_name}/summary', | |
955 | controller='summary', conditions={'function': check_repo}, |
|
950 | controller='summary', conditions={'function': check_repo}, | |
956 | requirements=URL_NAME_REQUIREMENTS) |
|
951 | requirements=URL_NAME_REQUIREMENTS) | |
957 |
|
952 | |||
958 | rmap.connect('branches_home', '/{repo_name}/branches', |
|
953 | rmap.connect('branches_home', '/{repo_name}/branches', | |
959 | controller='branches', conditions={'function': check_repo}, |
|
954 | controller='branches', conditions={'function': check_repo}, | |
960 | requirements=URL_NAME_REQUIREMENTS) |
|
955 | requirements=URL_NAME_REQUIREMENTS) | |
961 |
|
956 | |||
962 | rmap.connect('tags_home', '/{repo_name}/tags', |
|
957 | rmap.connect('tags_home', '/{repo_name}/tags', | |
963 | controller='tags', conditions={'function': check_repo}, |
|
958 | controller='tags', conditions={'function': check_repo}, | |
964 | requirements=URL_NAME_REQUIREMENTS) |
|
959 | requirements=URL_NAME_REQUIREMENTS) | |
965 |
|
960 | |||
966 | rmap.connect('bookmarks_home', '/{repo_name}/bookmarks', |
|
961 | rmap.connect('bookmarks_home', '/{repo_name}/bookmarks', | |
967 | controller='bookmarks', conditions={'function': check_repo}, |
|
962 | controller='bookmarks', conditions={'function': check_repo}, | |
968 | requirements=URL_NAME_REQUIREMENTS) |
|
963 | requirements=URL_NAME_REQUIREMENTS) | |
969 |
|
964 | |||
970 | rmap.connect('changelog_home', '/{repo_name}/changelog', jsroute=True, |
|
965 | rmap.connect('changelog_home', '/{repo_name}/changelog', jsroute=True, | |
971 | controller='changelog', conditions={'function': check_repo}, |
|
966 | controller='changelog', conditions={'function': check_repo}, | |
972 | requirements=URL_NAME_REQUIREMENTS) |
|
967 | requirements=URL_NAME_REQUIREMENTS) | |
973 |
|
968 | |||
974 | rmap.connect('changelog_summary_home', '/{repo_name}/changelog_summary', |
|
969 | rmap.connect('changelog_summary_home', '/{repo_name}/changelog_summary', | |
975 | controller='changelog', action='changelog_summary', |
|
970 | controller='changelog', action='changelog_summary', | |
976 | conditions={'function': check_repo}, |
|
971 | conditions={'function': check_repo}, | |
977 | requirements=URL_NAME_REQUIREMENTS) |
|
972 | requirements=URL_NAME_REQUIREMENTS) | |
978 |
|
973 | |||
979 | rmap.connect('changelog_file_home', |
|
974 | rmap.connect('changelog_file_home', | |
980 | '/{repo_name}/changelog/{revision}/{f_path}', |
|
975 | '/{repo_name}/changelog/{revision}/{f_path}', | |
981 | controller='changelog', f_path=None, |
|
976 | controller='changelog', f_path=None, | |
982 | conditions={'function': check_repo}, |
|
977 | conditions={'function': check_repo}, | |
983 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
978 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
984 |
|
979 | |||
985 | rmap.connect('changelog_elements', '/{repo_name}/changelog_details', |
|
980 | rmap.connect('changelog_elements', '/{repo_name}/changelog_details', | |
986 | controller='changelog', action='changelog_elements', |
|
981 | controller='changelog', action='changelog_elements', | |
987 | conditions={'function': check_repo}, |
|
982 | conditions={'function': check_repo}, | |
988 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
983 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
989 |
|
984 | |||
990 | rmap.connect('files_home', '/{repo_name}/files/{revision}/{f_path}', |
|
985 | rmap.connect('files_home', '/{repo_name}/files/{revision}/{f_path}', | |
991 | controller='files', revision='tip', f_path='', |
|
986 | controller='files', revision='tip', f_path='', | |
992 | conditions={'function': check_repo}, |
|
987 | conditions={'function': check_repo}, | |
993 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
988 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
994 |
|
989 | |||
995 | rmap.connect('files_home_simple_catchrev', |
|
990 | rmap.connect('files_home_simple_catchrev', | |
996 | '/{repo_name}/files/{revision}', |
|
991 | '/{repo_name}/files/{revision}', | |
997 | controller='files', revision='tip', f_path='', |
|
992 | controller='files', revision='tip', f_path='', | |
998 | conditions={'function': check_repo}, |
|
993 | conditions={'function': check_repo}, | |
999 | requirements=URL_NAME_REQUIREMENTS) |
|
994 | requirements=URL_NAME_REQUIREMENTS) | |
1000 |
|
995 | |||
1001 | rmap.connect('files_home_simple_catchall', |
|
996 | rmap.connect('files_home_simple_catchall', | |
1002 | '/{repo_name}/files', |
|
997 | '/{repo_name}/files', | |
1003 | controller='files', revision='tip', f_path='', |
|
998 | controller='files', revision='tip', f_path='', | |
1004 | conditions={'function': check_repo}, |
|
999 | conditions={'function': check_repo}, | |
1005 | requirements=URL_NAME_REQUIREMENTS) |
|
1000 | requirements=URL_NAME_REQUIREMENTS) | |
1006 |
|
1001 | |||
1007 | rmap.connect('files_history_home', |
|
1002 | rmap.connect('files_history_home', | |
1008 | '/{repo_name}/history/{revision}/{f_path}', |
|
1003 | '/{repo_name}/history/{revision}/{f_path}', | |
1009 | controller='files', action='history', revision='tip', f_path='', |
|
1004 | controller='files', action='history', revision='tip', f_path='', | |
1010 | conditions={'function': check_repo}, |
|
1005 | conditions={'function': check_repo}, | |
1011 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
1006 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
1012 |
|
1007 | |||
1013 | rmap.connect('files_authors_home', |
|
1008 | rmap.connect('files_authors_home', | |
1014 | '/{repo_name}/authors/{revision}/{f_path}', |
|
1009 | '/{repo_name}/authors/{revision}/{f_path}', | |
1015 | controller='files', action='authors', revision='tip', f_path='', |
|
1010 | controller='files', action='authors', revision='tip', f_path='', | |
1016 | conditions={'function': check_repo}, |
|
1011 | conditions={'function': check_repo}, | |
1017 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
1012 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
1018 |
|
1013 | |||
1019 | rmap.connect('files_diff_home', '/{repo_name}/diff/{f_path}', |
|
1014 | rmap.connect('files_diff_home', '/{repo_name}/diff/{f_path}', | |
1020 | controller='files', action='diff', f_path='', |
|
1015 | controller='files', action='diff', f_path='', | |
1021 | conditions={'function': check_repo}, |
|
1016 | conditions={'function': check_repo}, | |
1022 | requirements=URL_NAME_REQUIREMENTS) |
|
1017 | requirements=URL_NAME_REQUIREMENTS) | |
1023 |
|
1018 | |||
1024 | rmap.connect('files_diff_2way_home', |
|
1019 | rmap.connect('files_diff_2way_home', | |
1025 | '/{repo_name}/diff-2way/{f_path}', |
|
1020 | '/{repo_name}/diff-2way/{f_path}', | |
1026 | controller='files', action='diff_2way', f_path='', |
|
1021 | controller='files', action='diff_2way', f_path='', | |
1027 | conditions={'function': check_repo}, |
|
1022 | conditions={'function': check_repo}, | |
1028 | requirements=URL_NAME_REQUIREMENTS) |
|
1023 | requirements=URL_NAME_REQUIREMENTS) | |
1029 |
|
1024 | |||
1030 | rmap.connect('files_rawfile_home', |
|
1025 | rmap.connect('files_rawfile_home', | |
1031 | '/{repo_name}/rawfile/{revision}/{f_path}', |
|
1026 | '/{repo_name}/rawfile/{revision}/{f_path}', | |
1032 | controller='files', action='rawfile', revision='tip', |
|
1027 | controller='files', action='rawfile', revision='tip', | |
1033 | f_path='', conditions={'function': check_repo}, |
|
1028 | f_path='', conditions={'function': check_repo}, | |
1034 | requirements=URL_NAME_REQUIREMENTS) |
|
1029 | requirements=URL_NAME_REQUIREMENTS) | |
1035 |
|
1030 | |||
1036 | rmap.connect('files_raw_home', |
|
1031 | rmap.connect('files_raw_home', | |
1037 | '/{repo_name}/raw/{revision}/{f_path}', |
|
1032 | '/{repo_name}/raw/{revision}/{f_path}', | |
1038 | controller='files', action='raw', revision='tip', f_path='', |
|
1033 | controller='files', action='raw', revision='tip', f_path='', | |
1039 | conditions={'function': check_repo}, |
|
1034 | conditions={'function': check_repo}, | |
1040 | requirements=URL_NAME_REQUIREMENTS) |
|
1035 | requirements=URL_NAME_REQUIREMENTS) | |
1041 |
|
1036 | |||
1042 | rmap.connect('files_render_home', |
|
1037 | rmap.connect('files_render_home', | |
1043 | '/{repo_name}/render/{revision}/{f_path}', |
|
1038 | '/{repo_name}/render/{revision}/{f_path}', | |
1044 | controller='files', action='index', revision='tip', f_path='', |
|
1039 | controller='files', action='index', revision='tip', f_path='', | |
1045 | rendered=True, conditions={'function': check_repo}, |
|
1040 | rendered=True, conditions={'function': check_repo}, | |
1046 | requirements=URL_NAME_REQUIREMENTS) |
|
1041 | requirements=URL_NAME_REQUIREMENTS) | |
1047 |
|
1042 | |||
1048 | rmap.connect('files_annotate_home', |
|
1043 | rmap.connect('files_annotate_home', | |
1049 | '/{repo_name}/annotate/{revision}/{f_path}', |
|
1044 | '/{repo_name}/annotate/{revision}/{f_path}', | |
1050 | controller='files', action='index', revision='tip', |
|
1045 | controller='files', action='index', revision='tip', | |
1051 | f_path='', annotate=True, conditions={'function': check_repo}, |
|
1046 | f_path='', annotate=True, conditions={'function': check_repo}, | |
1052 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
1047 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
1053 |
|
1048 | |||
1054 | rmap.connect('files_annotate_previous', |
|
1049 | rmap.connect('files_annotate_previous', | |
1055 | '/{repo_name}/annotate-previous/{revision}/{f_path}', |
|
1050 | '/{repo_name}/annotate-previous/{revision}/{f_path}', | |
1056 | controller='files', action='annotate_previous', revision='tip', |
|
1051 | controller='files', action='annotate_previous', revision='tip', | |
1057 | f_path='', annotate=True, conditions={'function': check_repo}, |
|
1052 | f_path='', annotate=True, conditions={'function': check_repo}, | |
1058 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
1053 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
1059 |
|
1054 | |||
1060 | rmap.connect('files_edit', |
|
1055 | rmap.connect('files_edit', | |
1061 | '/{repo_name}/edit/{revision}/{f_path}', |
|
1056 | '/{repo_name}/edit/{revision}/{f_path}', | |
1062 | controller='files', action='edit', revision='tip', |
|
1057 | controller='files', action='edit', revision='tip', | |
1063 | f_path='', |
|
1058 | f_path='', | |
1064 | conditions={'function': check_repo, 'method': ['POST']}, |
|
1059 | conditions={'function': check_repo, 'method': ['POST']}, | |
1065 | requirements=URL_NAME_REQUIREMENTS) |
|
1060 | requirements=URL_NAME_REQUIREMENTS) | |
1066 |
|
1061 | |||
1067 | rmap.connect('files_edit_home', |
|
1062 | rmap.connect('files_edit_home', | |
1068 | '/{repo_name}/edit/{revision}/{f_path}', |
|
1063 | '/{repo_name}/edit/{revision}/{f_path}', | |
1069 | controller='files', action='edit_home', revision='tip', |
|
1064 | controller='files', action='edit_home', revision='tip', | |
1070 | f_path='', conditions={'function': check_repo}, |
|
1065 | f_path='', conditions={'function': check_repo}, | |
1071 | requirements=URL_NAME_REQUIREMENTS) |
|
1066 | requirements=URL_NAME_REQUIREMENTS) | |
1072 |
|
1067 | |||
1073 | rmap.connect('files_add', |
|
1068 | rmap.connect('files_add', | |
1074 | '/{repo_name}/add/{revision}/{f_path}', |
|
1069 | '/{repo_name}/add/{revision}/{f_path}', | |
1075 | controller='files', action='add', revision='tip', |
|
1070 | controller='files', action='add', revision='tip', | |
1076 | f_path='', |
|
1071 | f_path='', | |
1077 | conditions={'function': check_repo, 'method': ['POST']}, |
|
1072 | conditions={'function': check_repo, 'method': ['POST']}, | |
1078 | requirements=URL_NAME_REQUIREMENTS) |
|
1073 | requirements=URL_NAME_REQUIREMENTS) | |
1079 |
|
1074 | |||
1080 | rmap.connect('files_add_home', |
|
1075 | rmap.connect('files_add_home', | |
1081 | '/{repo_name}/add/{revision}/{f_path}', |
|
1076 | '/{repo_name}/add/{revision}/{f_path}', | |
1082 | controller='files', action='add_home', revision='tip', |
|
1077 | controller='files', action='add_home', revision='tip', | |
1083 | f_path='', conditions={'function': check_repo}, |
|
1078 | f_path='', conditions={'function': check_repo}, | |
1084 | requirements=URL_NAME_REQUIREMENTS) |
|
1079 | requirements=URL_NAME_REQUIREMENTS) | |
1085 |
|
1080 | |||
1086 | rmap.connect('files_delete', |
|
1081 | rmap.connect('files_delete', | |
1087 | '/{repo_name}/delete/{revision}/{f_path}', |
|
1082 | '/{repo_name}/delete/{revision}/{f_path}', | |
1088 | controller='files', action='delete', revision='tip', |
|
1083 | controller='files', action='delete', revision='tip', | |
1089 | f_path='', |
|
1084 | f_path='', | |
1090 | conditions={'function': check_repo, 'method': ['POST']}, |
|
1085 | conditions={'function': check_repo, 'method': ['POST']}, | |
1091 | requirements=URL_NAME_REQUIREMENTS) |
|
1086 | requirements=URL_NAME_REQUIREMENTS) | |
1092 |
|
1087 | |||
1093 | rmap.connect('files_delete_home', |
|
1088 | rmap.connect('files_delete_home', | |
1094 | '/{repo_name}/delete/{revision}/{f_path}', |
|
1089 | '/{repo_name}/delete/{revision}/{f_path}', | |
1095 | controller='files', action='delete_home', revision='tip', |
|
1090 | controller='files', action='delete_home', revision='tip', | |
1096 | f_path='', conditions={'function': check_repo}, |
|
1091 | f_path='', conditions={'function': check_repo}, | |
1097 | requirements=URL_NAME_REQUIREMENTS) |
|
1092 | requirements=URL_NAME_REQUIREMENTS) | |
1098 |
|
1093 | |||
1099 | rmap.connect('files_archive_home', '/{repo_name}/archive/{fname}', |
|
1094 | rmap.connect('files_archive_home', '/{repo_name}/archive/{fname}', | |
1100 | controller='files', action='archivefile', |
|
1095 | controller='files', action='archivefile', | |
1101 | conditions={'function': check_repo}, |
|
1096 | conditions={'function': check_repo}, | |
1102 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
1097 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
1103 |
|
1098 | |||
1104 | rmap.connect('files_nodelist_home', |
|
1099 | rmap.connect('files_nodelist_home', | |
1105 | '/{repo_name}/nodelist/{revision}/{f_path}', |
|
1100 | '/{repo_name}/nodelist/{revision}/{f_path}', | |
1106 | controller='files', action='nodelist', |
|
1101 | controller='files', action='nodelist', | |
1107 | conditions={'function': check_repo}, |
|
1102 | conditions={'function': check_repo}, | |
1108 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
1103 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
1109 |
|
1104 | |||
1110 | rmap.connect('files_nodetree_full', |
|
1105 | rmap.connect('files_nodetree_full', | |
1111 | '/{repo_name}/nodetree_full/{commit_id}/{f_path}', |
|
1106 | '/{repo_name}/nodetree_full/{commit_id}/{f_path}', | |
1112 | controller='files', action='nodetree_full', |
|
1107 | controller='files', action='nodetree_full', | |
1113 | conditions={'function': check_repo}, |
|
1108 | conditions={'function': check_repo}, | |
1114 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
1109 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
1115 |
|
1110 | |||
1116 | rmap.connect('repo_fork_create_home', '/{repo_name}/fork', |
|
1111 | rmap.connect('repo_fork_create_home', '/{repo_name}/fork', | |
1117 | controller='forks', action='fork_create', |
|
1112 | controller='forks', action='fork_create', | |
1118 | conditions={'function': check_repo, 'method': ['POST']}, |
|
1113 | conditions={'function': check_repo, 'method': ['POST']}, | |
1119 | requirements=URL_NAME_REQUIREMENTS) |
|
1114 | requirements=URL_NAME_REQUIREMENTS) | |
1120 |
|
1115 | |||
1121 | rmap.connect('repo_fork_home', '/{repo_name}/fork', |
|
1116 | rmap.connect('repo_fork_home', '/{repo_name}/fork', | |
1122 | controller='forks', action='fork', |
|
1117 | controller='forks', action='fork', | |
1123 | conditions={'function': check_repo}, |
|
1118 | conditions={'function': check_repo}, | |
1124 | requirements=URL_NAME_REQUIREMENTS) |
|
1119 | requirements=URL_NAME_REQUIREMENTS) | |
1125 |
|
1120 | |||
1126 | rmap.connect('repo_forks_home', '/{repo_name}/forks', |
|
1121 | rmap.connect('repo_forks_home', '/{repo_name}/forks', | |
1127 | controller='forks', action='forks', |
|
1122 | controller='forks', action='forks', | |
1128 | conditions={'function': check_repo}, |
|
1123 | conditions={'function': check_repo}, | |
1129 | requirements=URL_NAME_REQUIREMENTS) |
|
1124 | requirements=URL_NAME_REQUIREMENTS) | |
1130 |
|
1125 | |||
1131 | # must be here for proper group/repo catching pattern |
|
1126 | # must be here for proper group/repo catching pattern | |
1132 | _connect_with_slash( |
|
1127 | _connect_with_slash( | |
1133 | rmap, 'repo_group_home', '/{group_name}', |
|
1128 | rmap, 'repo_group_home', '/{group_name}', | |
1134 | controller='home', action='index_repo_group', |
|
1129 | controller='home', action='index_repo_group', | |
1135 | conditions={'function': check_group}, |
|
1130 | conditions={'function': check_group}, | |
1136 | requirements=URL_NAME_REQUIREMENTS) |
|
1131 | requirements=URL_NAME_REQUIREMENTS) | |
1137 |
|
1132 | |||
1138 | # catch all, at the end |
|
1133 | # catch all, at the end | |
1139 | _connect_with_slash( |
|
1134 | _connect_with_slash( | |
1140 | rmap, 'summary_home', '/{repo_name}', jsroute=True, |
|
1135 | rmap, 'summary_home', '/{repo_name}', jsroute=True, | |
1141 | controller='summary', action='index', |
|
1136 | controller='summary', action='index', | |
1142 | conditions={'function': check_repo}, |
|
1137 | conditions={'function': check_repo}, | |
1143 | requirements=URL_NAME_REQUIREMENTS) |
|
1138 | requirements=URL_NAME_REQUIREMENTS) | |
1144 |
|
1139 | |||
1145 | return rmap |
|
1140 | return rmap | |
1146 |
|
1141 | |||
1147 |
|
1142 | |||
1148 | def _connect_with_slash(mapper, name, path, *args, **kwargs): |
|
1143 | def _connect_with_slash(mapper, name, path, *args, **kwargs): | |
1149 | """ |
|
1144 | """ | |
1150 | Connect a route with an optional trailing slash in `path`. |
|
1145 | Connect a route with an optional trailing slash in `path`. | |
1151 | """ |
|
1146 | """ | |
1152 | mapper.connect(name + '_slash', path + '/', *args, **kwargs) |
|
1147 | mapper.connect(name + '_slash', path + '/', *args, **kwargs) | |
1153 | mapper.connect(name, path, *args, **kwargs) |
|
1148 | mapper.connect(name, path, *args, **kwargs) |
@@ -1,290 +1,257 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2017 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | Home controller for RhodeCode Enterprise |
|
22 | Home controller for RhodeCode Enterprise | |
23 | """ |
|
23 | """ | |
24 |
|
24 | |||
25 | import logging |
|
25 | import logging | |
26 | import time |
|
26 | import time | |
27 | import re |
|
27 | import re | |
28 |
|
28 | |||
29 | from pylons import tmpl_context as c, request, url, config |
|
29 | from pylons import tmpl_context as c, request, url, config | |
30 | from pylons.i18n.translation import _ |
|
30 | from pylons.i18n.translation import _ | |
31 | from sqlalchemy.sql import func |
|
31 | from sqlalchemy.sql import func | |
32 |
|
32 | |||
33 | from rhodecode.lib.auth import ( |
|
33 | from rhodecode.lib.auth import ( | |
34 | LoginRequired, HasPermissionAllDecorator, AuthUser, |
|
34 | LoginRequired, HasPermissionAllDecorator, AuthUser, | |
35 | HasRepoGroupPermissionAnyDecorator, XHRRequired) |
|
35 | HasRepoGroupPermissionAnyDecorator, XHRRequired) | |
36 | from rhodecode.lib.base import BaseController, render |
|
36 | from rhodecode.lib.base import BaseController, render | |
37 | from rhodecode.lib.index import searcher_from_config |
|
37 | from rhodecode.lib.index import searcher_from_config | |
38 | from rhodecode.lib.ext_json import json |
|
38 | from rhodecode.lib.ext_json import json | |
39 | from rhodecode.lib.utils import jsonify |
|
39 | from rhodecode.lib.utils import jsonify | |
40 | from rhodecode.lib.utils2 import safe_unicode, str2bool |
|
40 | from rhodecode.lib.utils2 import safe_unicode, str2bool | |
41 | from rhodecode.model.db import Repository, RepoGroup |
|
41 | from rhodecode.model.db import Repository, RepoGroup | |
42 | from rhodecode.model.repo import RepoModel |
|
42 | from rhodecode.model.repo import RepoModel | |
43 | from rhodecode.model.repo_group import RepoGroupModel |
|
43 | from rhodecode.model.repo_group import RepoGroupModel | |
44 | from rhodecode.model.scm import RepoList, RepoGroupList |
|
44 | from rhodecode.model.scm import RepoList, RepoGroupList | |
45 |
|
45 | |||
46 |
|
46 | |||
47 | log = logging.getLogger(__name__) |
|
47 | log = logging.getLogger(__name__) | |
48 |
|
48 | |||
49 |
|
49 | |||
50 | class HomeController(BaseController): |
|
50 | class HomeController(BaseController): | |
51 | def __before__(self): |
|
51 | def __before__(self): | |
52 | super(HomeController, self).__before__() |
|
52 | super(HomeController, self).__before__() | |
53 |
|
53 | |||
54 | def ping(self): |
|
54 | def ping(self): | |
55 | """ |
|
55 | """ | |
56 | Ping, doesn't require login, good for checking out the platform |
|
56 | Ping, doesn't require login, good for checking out the platform | |
57 | """ |
|
57 | """ | |
58 | instance_id = getattr(c, 'rhodecode_instanceid', '') |
|
58 | instance_id = getattr(c, 'rhodecode_instanceid', '') | |
59 | return 'pong[%s] => %s' % (instance_id, self.ip_addr,) |
|
59 | return 'pong[%s] => %s' % (instance_id, self.ip_addr,) | |
60 |
|
60 | |||
61 | @LoginRequired() |
|
61 | @LoginRequired() | |
62 | @HasPermissionAllDecorator('hg.admin') |
|
62 | @HasPermissionAllDecorator('hg.admin') | |
63 | def error_test(self): |
|
63 | def error_test(self): | |
64 | """ |
|
64 | """ | |
65 | Test exception handling and emails on errors |
|
65 | Test exception handling and emails on errors | |
66 | """ |
|
66 | """ | |
67 | class TestException(Exception): |
|
67 | class TestException(Exception): | |
68 | pass |
|
68 | pass | |
69 |
|
69 | |||
70 | msg = ('RhodeCode Enterprise %s test exception. Generation time: %s' |
|
70 | msg = ('RhodeCode Enterprise %s test exception. Generation time: %s' | |
71 | % (c.rhodecode_name, time.time())) |
|
71 | % (c.rhodecode_name, time.time())) | |
72 | raise TestException(msg) |
|
72 | raise TestException(msg) | |
73 |
|
73 | |||
74 | def _get_groups_and_repos(self, repo_group_id=None): |
|
74 | def _get_groups_and_repos(self, repo_group_id=None): | |
75 | # repo groups groups |
|
75 | # repo groups groups | |
76 | repo_group_list = RepoGroup.get_all_repo_groups(group_id=repo_group_id) |
|
76 | repo_group_list = RepoGroup.get_all_repo_groups(group_id=repo_group_id) | |
77 | _perms = ['group.read', 'group.write', 'group.admin'] |
|
77 | _perms = ['group.read', 'group.write', 'group.admin'] | |
78 | repo_group_list_acl = RepoGroupList(repo_group_list, perm_set=_perms) |
|
78 | repo_group_list_acl = RepoGroupList(repo_group_list, perm_set=_perms) | |
79 | repo_group_data = RepoGroupModel().get_repo_groups_as_dict( |
|
79 | repo_group_data = RepoGroupModel().get_repo_groups_as_dict( | |
80 | repo_group_list=repo_group_list_acl, admin=False) |
|
80 | repo_group_list=repo_group_list_acl, admin=False) | |
81 |
|
81 | |||
82 | # repositories |
|
82 | # repositories | |
83 | repo_list = Repository.get_all_repos(group_id=repo_group_id) |
|
83 | repo_list = Repository.get_all_repos(group_id=repo_group_id) | |
84 | _perms = ['repository.read', 'repository.write', 'repository.admin'] |
|
84 | _perms = ['repository.read', 'repository.write', 'repository.admin'] | |
85 | repo_list_acl = RepoList(repo_list, perm_set=_perms) |
|
85 | repo_list_acl = RepoList(repo_list, perm_set=_perms) | |
86 | repo_data = RepoModel().get_repos_as_dict( |
|
86 | repo_data = RepoModel().get_repos_as_dict( | |
87 | repo_list=repo_list_acl, admin=False) |
|
87 | repo_list=repo_list_acl, admin=False) | |
88 |
|
88 | |||
89 | return repo_data, repo_group_data |
|
89 | return repo_data, repo_group_data | |
90 |
|
90 | |||
91 | @LoginRequired() |
|
91 | @LoginRequired() | |
92 | def index(self): |
|
92 | def index(self): | |
93 | c.repo_group = None |
|
93 | c.repo_group = None | |
94 |
|
94 | |||
95 | repo_data, repo_group_data = self._get_groups_and_repos() |
|
95 | repo_data, repo_group_data = self._get_groups_and_repos() | |
96 | # json used to render the grids |
|
96 | # json used to render the grids | |
97 | c.repos_data = json.dumps(repo_data) |
|
97 | c.repos_data = json.dumps(repo_data) | |
98 | c.repo_groups_data = json.dumps(repo_group_data) |
|
98 | c.repo_groups_data = json.dumps(repo_group_data) | |
99 |
|
99 | |||
100 | return render('/index.mako') |
|
100 | return render('/index.mako') | |
101 |
|
101 | |||
102 | @LoginRequired() |
|
102 | @LoginRequired() | |
103 | @HasRepoGroupPermissionAnyDecorator('group.read', 'group.write', |
|
103 | @HasRepoGroupPermissionAnyDecorator('group.read', 'group.write', | |
104 | 'group.admin') |
|
104 | 'group.admin') | |
105 | def index_repo_group(self, group_name): |
|
105 | def index_repo_group(self, group_name): | |
106 | """GET /repo_group_name: Show a specific item""" |
|
106 | """GET /repo_group_name: Show a specific item""" | |
107 | c.repo_group = RepoGroupModel()._get_repo_group(group_name) |
|
107 | c.repo_group = RepoGroupModel()._get_repo_group(group_name) | |
108 | repo_data, repo_group_data = self._get_groups_and_repos( |
|
108 | repo_data, repo_group_data = self._get_groups_and_repos( | |
109 | c.repo_group.group_id) |
|
109 | c.repo_group.group_id) | |
110 |
|
110 | |||
111 | # json used to render the grids |
|
111 | # json used to render the grids | |
112 | c.repos_data = json.dumps(repo_data) |
|
112 | c.repos_data = json.dumps(repo_data) | |
113 | c.repo_groups_data = json.dumps(repo_group_data) |
|
113 | c.repo_groups_data = json.dumps(repo_group_data) | |
114 |
|
114 | |||
115 | return render('index_repo_group.mako') |
|
115 | return render('index_repo_group.mako') | |
116 |
|
116 | |||
117 | def _get_repo_list(self, name_contains=None, repo_type=None, limit=20): |
|
117 | def _get_repo_list(self, name_contains=None, repo_type=None, limit=20): | |
118 | query = Repository.query()\ |
|
118 | query = Repository.query()\ | |
119 | .order_by(func.length(Repository.repo_name))\ |
|
119 | .order_by(func.length(Repository.repo_name))\ | |
120 | .order_by(Repository.repo_name) |
|
120 | .order_by(Repository.repo_name) | |
121 |
|
121 | |||
122 | if repo_type: |
|
122 | if repo_type: | |
123 | query = query.filter(Repository.repo_type == repo_type) |
|
123 | query = query.filter(Repository.repo_type == repo_type) | |
124 |
|
124 | |||
125 | if name_contains: |
|
125 | if name_contains: | |
126 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) |
|
126 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) | |
127 | query = query.filter( |
|
127 | query = query.filter( | |
128 | Repository.repo_name.ilike(ilike_expression)) |
|
128 | Repository.repo_name.ilike(ilike_expression)) | |
129 | query = query.limit(limit) |
|
129 | query = query.limit(limit) | |
130 |
|
130 | |||
131 | all_repos = query.all() |
|
131 | all_repos = query.all() | |
132 | repo_iter = self.scm_model.get_repos(all_repos) |
|
132 | repo_iter = self.scm_model.get_repos(all_repos) | |
133 | return [ |
|
133 | return [ | |
134 | { |
|
134 | { | |
135 | 'id': obj['name'], |
|
135 | 'id': obj['name'], | |
136 | 'text': obj['name'], |
|
136 | 'text': obj['name'], | |
137 | 'type': 'repo', |
|
137 | 'type': 'repo', | |
138 | 'obj': obj['dbrepo'], |
|
138 | 'obj': obj['dbrepo'], | |
139 | 'url': url('summary_home', repo_name=obj['name']) |
|
139 | 'url': url('summary_home', repo_name=obj['name']) | |
140 | } |
|
140 | } | |
141 | for obj in repo_iter] |
|
141 | for obj in repo_iter] | |
142 |
|
142 | |||
143 | def _get_repo_group_list(self, name_contains=None, limit=20): |
|
143 | def _get_repo_group_list(self, name_contains=None, limit=20): | |
144 | query = RepoGroup.query()\ |
|
144 | query = RepoGroup.query()\ | |
145 | .order_by(func.length(RepoGroup.group_name))\ |
|
145 | .order_by(func.length(RepoGroup.group_name))\ | |
146 | .order_by(RepoGroup.group_name) |
|
146 | .order_by(RepoGroup.group_name) | |
147 |
|
147 | |||
148 | if name_contains: |
|
148 | if name_contains: | |
149 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) |
|
149 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) | |
150 | query = query.filter( |
|
150 | query = query.filter( | |
151 | RepoGroup.group_name.ilike(ilike_expression)) |
|
151 | RepoGroup.group_name.ilike(ilike_expression)) | |
152 | query = query.limit(limit) |
|
152 | query = query.limit(limit) | |
153 |
|
153 | |||
154 | all_groups = query.all() |
|
154 | all_groups = query.all() | |
155 | repo_groups_iter = self.scm_model.get_repo_groups(all_groups) |
|
155 | repo_groups_iter = self.scm_model.get_repo_groups(all_groups) | |
156 | return [ |
|
156 | return [ | |
157 | { |
|
157 | { | |
158 | 'id': obj.group_name, |
|
158 | 'id': obj.group_name, | |
159 | 'text': obj.group_name, |
|
159 | 'text': obj.group_name, | |
160 | 'type': 'group', |
|
160 | 'type': 'group', | |
161 | 'obj': {}, |
|
161 | 'obj': {}, | |
162 | 'url': url('repo_group_home', group_name=obj.group_name) |
|
162 | 'url': url('repo_group_home', group_name=obj.group_name) | |
163 | } |
|
163 | } | |
164 | for obj in repo_groups_iter] |
|
164 | for obj in repo_groups_iter] | |
165 |
|
165 | |||
166 | def _get_hash_commit_list(self, hash_starts_with=None, limit=20): |
|
166 | def _get_hash_commit_list(self, hash_starts_with=None, limit=20): | |
167 | if not hash_starts_with or len(hash_starts_with) < 3: |
|
167 | if not hash_starts_with or len(hash_starts_with) < 3: | |
168 | return [] |
|
168 | return [] | |
169 |
|
169 | |||
170 | commit_hashes = re.compile('([0-9a-f]{2,40})').findall(hash_starts_with) |
|
170 | commit_hashes = re.compile('([0-9a-f]{2,40})').findall(hash_starts_with) | |
171 |
|
171 | |||
172 | if len(commit_hashes) != 1: |
|
172 | if len(commit_hashes) != 1: | |
173 | return [] |
|
173 | return [] | |
174 |
|
174 | |||
175 | commit_hash_prefix = commit_hashes[0] |
|
175 | commit_hash_prefix = commit_hashes[0] | |
176 |
|
176 | |||
177 | auth_user = AuthUser( |
|
177 | auth_user = AuthUser( | |
178 | user_id=c.rhodecode_user.user_id, ip_addr=self.ip_addr) |
|
178 | user_id=c.rhodecode_user.user_id, ip_addr=self.ip_addr) | |
179 | searcher = searcher_from_config(config) |
|
179 | searcher = searcher_from_config(config) | |
180 | result = searcher.search( |
|
180 | result = searcher.search( | |
181 | 'commit_id:%s*' % commit_hash_prefix, 'commit', auth_user, |
|
181 | 'commit_id:%s*' % commit_hash_prefix, 'commit', auth_user, | |
182 | raise_on_exc=False) |
|
182 | raise_on_exc=False) | |
183 |
|
183 | |||
184 | return [ |
|
184 | return [ | |
185 | { |
|
185 | { | |
186 | 'id': entry['commit_id'], |
|
186 | 'id': entry['commit_id'], | |
187 | 'text': entry['commit_id'], |
|
187 | 'text': entry['commit_id'], | |
188 | 'type': 'commit', |
|
188 | 'type': 'commit', | |
189 | 'obj': {'repo': entry['repository']}, |
|
189 | 'obj': {'repo': entry['repository']}, | |
190 | 'url': url('changeset_home', |
|
190 | 'url': url('changeset_home', | |
191 | repo_name=entry['repository'], |
|
191 | repo_name=entry['repository'], | |
192 | revision=entry['commit_id']) |
|
192 | revision=entry['commit_id']) | |
193 | } |
|
193 | } | |
194 | for entry in result['results']] |
|
194 | for entry in result['results']] | |
195 |
|
195 | |||
196 | @LoginRequired() |
|
196 | @LoginRequired() | |
197 | @XHRRequired() |
|
197 | @XHRRequired() | |
198 | @jsonify |
|
198 | @jsonify | |
199 | def goto_switcher_data(self): |
|
199 | def goto_switcher_data(self): | |
200 | query = request.GET.get('query') |
|
200 | query = request.GET.get('query') | |
201 | log.debug('generating goto switcher list, query %s', query) |
|
201 | log.debug('generating goto switcher list, query %s', query) | |
202 |
|
202 | |||
203 | res = [] |
|
203 | res = [] | |
204 | repo_groups = self._get_repo_group_list(query) |
|
204 | repo_groups = self._get_repo_group_list(query) | |
205 | if repo_groups: |
|
205 | if repo_groups: | |
206 | res.append({ |
|
206 | res.append({ | |
207 | 'text': _('Groups'), |
|
207 | 'text': _('Groups'), | |
208 | 'children': repo_groups |
|
208 | 'children': repo_groups | |
209 | }) |
|
209 | }) | |
210 |
|
210 | |||
211 | repos = self._get_repo_list(query) |
|
211 | repos = self._get_repo_list(query) | |
212 | if repos: |
|
212 | if repos: | |
213 | res.append({ |
|
213 | res.append({ | |
214 | 'text': _('Repositories'), |
|
214 | 'text': _('Repositories'), | |
215 | 'children': repos |
|
215 | 'children': repos | |
216 | }) |
|
216 | }) | |
217 |
|
217 | |||
218 | commits = self._get_hash_commit_list(query) |
|
218 | commits = self._get_hash_commit_list(query) | |
219 | if commits: |
|
219 | if commits: | |
220 | unique_repos = {} |
|
220 | unique_repos = {} | |
221 | for commit in commits: |
|
221 | for commit in commits: | |
222 | unique_repos.setdefault(commit['obj']['repo'], [] |
|
222 | unique_repos.setdefault(commit['obj']['repo'], [] | |
223 | ).append(commit) |
|
223 | ).append(commit) | |
224 |
|
224 | |||
225 | for repo in unique_repos: |
|
225 | for repo in unique_repos: | |
226 | res.append({ |
|
226 | res.append({ | |
227 | 'text': _('Commits in %(repo)s') % {'repo': repo}, |
|
227 | 'text': _('Commits in %(repo)s') % {'repo': repo}, | |
228 | 'children': unique_repos[repo] |
|
228 | 'children': unique_repos[repo] | |
229 | }) |
|
229 | }) | |
230 |
|
230 | |||
231 | data = { |
|
231 | data = { | |
232 | 'more': False, |
|
232 | 'more': False, | |
233 | 'results': res |
|
233 | 'results': res | |
234 | } |
|
234 | } | |
235 | return data |
|
235 | return data | |
236 |
|
236 | |||
237 | @LoginRequired() |
|
237 | @LoginRequired() | |
238 | @XHRRequired() |
|
238 | @XHRRequired() | |
239 | @jsonify |
|
239 | @jsonify | |
240 | def repo_list_data(self): |
|
240 | def repo_list_data(self): | |
241 | query = request.GET.get('query') |
|
241 | query = request.GET.get('query') | |
242 | repo_type = request.GET.get('repo_type') |
|
242 | repo_type = request.GET.get('repo_type') | |
243 | log.debug('generating repo list, query:%s', query) |
|
243 | log.debug('generating repo list, query:%s', query) | |
244 |
|
244 | |||
245 | res = [] |
|
245 | res = [] | |
246 | repos = self._get_repo_list(query, repo_type=repo_type) |
|
246 | repos = self._get_repo_list(query, repo_type=repo_type) | |
247 | if repos: |
|
247 | if repos: | |
248 | res.append({ |
|
248 | res.append({ | |
249 | 'text': _('Repositories'), |
|
249 | 'text': _('Repositories'), | |
250 | 'children': repos |
|
250 | 'children': repos | |
251 | }) |
|
251 | }) | |
252 |
|
252 | |||
253 | data = { |
|
253 | data = { | |
254 | 'more': False, |
|
254 | 'more': False, | |
255 | 'results': res |
|
255 | 'results': res | |
256 | } |
|
256 | } | |
257 | return data |
|
257 | return data | |
258 |
|
||||
259 | @LoginRequired() |
|
|||
260 | @XHRRequired() |
|
|||
261 | @jsonify |
|
|||
262 | def user_autocomplete_data(self): |
|
|||
263 | query = request.GET.get('query') |
|
|||
264 | active = str2bool(request.GET.get('active') or True) |
|
|||
265 |
|
||||
266 | repo_model = RepoModel() |
|
|||
267 | _users = repo_model.get_users( |
|
|||
268 | name_contains=query, only_active=active) |
|
|||
269 |
|
||||
270 | if request.GET.get('user_groups'): |
|
|||
271 | # extend with user groups |
|
|||
272 | _user_groups = repo_model.get_user_groups( |
|
|||
273 | name_contains=query, only_active=active) |
|
|||
274 | _users = _users + _user_groups |
|
|||
275 |
|
||||
276 | return {'suggestions': _users} |
|
|||
277 |
|
||||
278 | @LoginRequired() |
|
|||
279 | @XHRRequired() |
|
|||
280 | @jsonify |
|
|||
281 | def user_group_autocomplete_data(self): |
|
|||
282 | query = request.GET.get('query') |
|
|||
283 | active = str2bool(request.GET.get('active') or True) |
|
|||
284 |
|
||||
285 | repo_model = RepoModel() |
|
|||
286 | _user_groups = repo_model.get_user_groups( |
|
|||
287 | name_contains=query, only_active=active) |
|
|||
288 | _user_groups = _user_groups |
|
|||
289 |
|
||||
290 | return {'suggestions': _user_groups} |
|
@@ -1,337 +1,341 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2017 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | Helpers for fixture generation |
|
22 | Helpers for fixture generation | |
23 | """ |
|
23 | """ | |
24 |
|
24 | |||
25 | import os |
|
25 | import os | |
26 | import time |
|
26 | import time | |
27 | import tempfile |
|
27 | import tempfile | |
28 | import shutil |
|
28 | import shutil | |
29 |
|
29 | |||
30 | import configobj |
|
30 | import configobj | |
31 |
|
31 | |||
32 | from rhodecode.tests import * |
|
32 | from rhodecode.tests import * | |
33 | from rhodecode.model.db import Repository, User, RepoGroup, UserGroup, Gist |
|
33 | from rhodecode.model.db import Repository, User, RepoGroup, UserGroup, Gist | |
34 | from rhodecode.model.meta import Session |
|
34 | from rhodecode.model.meta import Session | |
35 | from rhodecode.model.repo import RepoModel |
|
35 | from rhodecode.model.repo import RepoModel | |
36 | from rhodecode.model.user import UserModel |
|
36 | from rhodecode.model.user import UserModel | |
37 | from rhodecode.model.repo_group import RepoGroupModel |
|
37 | from rhodecode.model.repo_group import RepoGroupModel | |
38 | from rhodecode.model.user_group import UserGroupModel |
|
38 | from rhodecode.model.user_group import UserGroupModel | |
39 | from rhodecode.model.gist import GistModel |
|
39 | from rhodecode.model.gist import GistModel | |
40 | from rhodecode.model.auth_token import AuthTokenModel |
|
40 | from rhodecode.model.auth_token import AuthTokenModel | |
41 |
|
41 | |||
42 | dn = os.path.dirname |
|
42 | dn = os.path.dirname | |
43 | FIXTURES = os.path.join(dn(dn(os.path.abspath(__file__))), 'tests', 'fixtures') |
|
43 | FIXTURES = os.path.join(dn(dn(os.path.abspath(__file__))), 'tests', 'fixtures') | |
44 |
|
44 | |||
45 |
|
45 | |||
46 | def error_function(*args, **kwargs): |
|
46 | def error_function(*args, **kwargs): | |
47 | raise Exception('Total Crash !') |
|
47 | raise Exception('Total Crash !') | |
48 |
|
48 | |||
49 |
|
49 | |||
50 | class TestINI(object): |
|
50 | class TestINI(object): | |
51 | """ |
|
51 | """ | |
52 | Allows to create a new test.ini file as a copy of existing one with edited |
|
52 | Allows to create a new test.ini file as a copy of existing one with edited | |
53 | data. Example usage:: |
|
53 | data. Example usage:: | |
54 |
|
54 | |||
55 | with TestINI('test.ini', [{'section':{'key':val'}]) as new_test_ini_path: |
|
55 | with TestINI('test.ini', [{'section':{'key':val'}]) as new_test_ini_path: | |
56 | print 'paster server %s' % new_test_ini |
|
56 | print 'paster server %s' % new_test_ini | |
57 | """ |
|
57 | """ | |
58 |
|
58 | |||
59 | def __init__(self, ini_file_path, ini_params, new_file_prefix='DEFAULT', |
|
59 | def __init__(self, ini_file_path, ini_params, new_file_prefix='DEFAULT', | |
60 | destroy=True, dir=None): |
|
60 | destroy=True, dir=None): | |
61 | self.ini_file_path = ini_file_path |
|
61 | self.ini_file_path = ini_file_path | |
62 | self.ini_params = ini_params |
|
62 | self.ini_params = ini_params | |
63 | self.new_path = None |
|
63 | self.new_path = None | |
64 | self.new_path_prefix = new_file_prefix |
|
64 | self.new_path_prefix = new_file_prefix | |
65 | self._destroy = destroy |
|
65 | self._destroy = destroy | |
66 | self._dir = dir |
|
66 | self._dir = dir | |
67 |
|
67 | |||
68 | def __enter__(self): |
|
68 | def __enter__(self): | |
69 | return self.create() |
|
69 | return self.create() | |
70 |
|
70 | |||
71 | def __exit__(self, exc_type, exc_val, exc_tb): |
|
71 | def __exit__(self, exc_type, exc_val, exc_tb): | |
72 | self.destroy() |
|
72 | self.destroy() | |
73 |
|
73 | |||
74 | def create(self): |
|
74 | def create(self): | |
75 | config = configobj.ConfigObj( |
|
75 | config = configobj.ConfigObj( | |
76 | self.ini_file_path, file_error=True, write_empty_values=True) |
|
76 | self.ini_file_path, file_error=True, write_empty_values=True) | |
77 |
|
77 | |||
78 | for data in self.ini_params: |
|
78 | for data in self.ini_params: | |
79 | section, ini_params = data.items()[0] |
|
79 | section, ini_params = data.items()[0] | |
80 | for key, val in ini_params.items(): |
|
80 | for key, val in ini_params.items(): | |
81 | config[section][key] = val |
|
81 | config[section][key] = val | |
82 | with tempfile.NamedTemporaryFile( |
|
82 | with tempfile.NamedTemporaryFile( | |
83 | prefix=self.new_path_prefix, suffix='.ini', dir=self._dir, |
|
83 | prefix=self.new_path_prefix, suffix='.ini', dir=self._dir, | |
84 | delete=False) as new_ini_file: |
|
84 | delete=False) as new_ini_file: | |
85 | config.write(new_ini_file) |
|
85 | config.write(new_ini_file) | |
86 | self.new_path = new_ini_file.name |
|
86 | self.new_path = new_ini_file.name | |
87 |
|
87 | |||
88 | return self.new_path |
|
88 | return self.new_path | |
89 |
|
89 | |||
90 | def destroy(self): |
|
90 | def destroy(self): | |
91 | if self._destroy: |
|
91 | if self._destroy: | |
92 | os.remove(self.new_path) |
|
92 | os.remove(self.new_path) | |
93 |
|
93 | |||
94 |
|
94 | |||
95 | class Fixture(object): |
|
95 | class Fixture(object): | |
96 |
|
96 | |||
97 | def anon_access(self, status): |
|
97 | def anon_access(self, status): | |
98 | """ |
|
98 | """ | |
99 | Context process for disabling anonymous access. use like: |
|
99 | Context process for disabling anonymous access. use like: | |
100 | fixture = Fixture() |
|
100 | fixture = Fixture() | |
101 | with fixture.anon_access(False): |
|
101 | with fixture.anon_access(False): | |
102 | #tests |
|
102 | #tests | |
103 |
|
103 | |||
104 | after this block anon access will be set to `not status` |
|
104 | after this block anon access will be set to `not status` | |
105 | """ |
|
105 | """ | |
106 |
|
106 | |||
107 | class context(object): |
|
107 | class context(object): | |
108 | def __enter__(self): |
|
108 | def __enter__(self): | |
109 | anon = User.get_default_user() |
|
109 | anon = User.get_default_user() | |
110 | anon.active = status |
|
110 | anon.active = status | |
111 | Session().add(anon) |
|
111 | Session().add(anon) | |
112 | Session().commit() |
|
112 | Session().commit() | |
113 | time.sleep(1.5) # must sleep for cache (1s to expire) |
|
113 | time.sleep(1.5) # must sleep for cache (1s to expire) | |
114 |
|
114 | |||
115 | def __exit__(self, exc_type, exc_val, exc_tb): |
|
115 | def __exit__(self, exc_type, exc_val, exc_tb): | |
116 | anon = User.get_default_user() |
|
116 | anon = User.get_default_user() | |
117 | anon.active = not status |
|
117 | anon.active = not status | |
118 | Session().add(anon) |
|
118 | Session().add(anon) | |
119 | Session().commit() |
|
119 | Session().commit() | |
120 |
|
120 | |||
121 | return context() |
|
121 | return context() | |
122 |
|
122 | |||
123 | def _get_repo_create_params(self, **custom): |
|
123 | def _get_repo_create_params(self, **custom): | |
124 | defs = { |
|
124 | defs = { | |
125 | 'repo_name': None, |
|
125 | 'repo_name': None, | |
126 | 'repo_type': 'hg', |
|
126 | 'repo_type': 'hg', | |
127 | 'clone_uri': '', |
|
127 | 'clone_uri': '', | |
128 | 'repo_group': '-1', |
|
128 | 'repo_group': '-1', | |
129 | 'repo_description': 'DESC', |
|
129 | 'repo_description': 'DESC', | |
130 | 'repo_private': False, |
|
130 | 'repo_private': False, | |
131 | 'repo_landing_rev': 'rev:tip', |
|
131 | 'repo_landing_rev': 'rev:tip', | |
132 | 'repo_copy_permissions': False, |
|
132 | 'repo_copy_permissions': False, | |
133 | 'repo_state': Repository.STATE_CREATED, |
|
133 | 'repo_state': Repository.STATE_CREATED, | |
134 | } |
|
134 | } | |
135 | defs.update(custom) |
|
135 | defs.update(custom) | |
136 | if 'repo_name_full' not in custom: |
|
136 | if 'repo_name_full' not in custom: | |
137 | defs.update({'repo_name_full': defs['repo_name']}) |
|
137 | defs.update({'repo_name_full': defs['repo_name']}) | |
138 |
|
138 | |||
139 | # fix the repo name if passed as repo_name_full |
|
139 | # fix the repo name if passed as repo_name_full | |
140 | if defs['repo_name']: |
|
140 | if defs['repo_name']: | |
141 | defs['repo_name'] = defs['repo_name'].split('/')[-1] |
|
141 | defs['repo_name'] = defs['repo_name'].split('/')[-1] | |
142 |
|
142 | |||
143 | return defs |
|
143 | return defs | |
144 |
|
144 | |||
145 | def _get_group_create_params(self, **custom): |
|
145 | def _get_group_create_params(self, **custom): | |
146 | defs = { |
|
146 | defs = { | |
147 | 'group_name': None, |
|
147 | 'group_name': None, | |
148 | 'group_description': 'DESC', |
|
148 | 'group_description': 'DESC', | |
149 | 'perm_updates': [], |
|
149 | 'perm_updates': [], | |
150 | 'perm_additions': [], |
|
150 | 'perm_additions': [], | |
151 | 'perm_deletions': [], |
|
151 | 'perm_deletions': [], | |
152 | 'group_parent_id': -1, |
|
152 | 'group_parent_id': -1, | |
153 | 'enable_locking': False, |
|
153 | 'enable_locking': False, | |
154 | 'recursive': False, |
|
154 | 'recursive': False, | |
155 | } |
|
155 | } | |
156 | defs.update(custom) |
|
156 | defs.update(custom) | |
157 |
|
157 | |||
158 | return defs |
|
158 | return defs | |
159 |
|
159 | |||
160 | def _get_user_create_params(self, name, **custom): |
|
160 | def _get_user_create_params(self, name, **custom): | |
161 | defs = { |
|
161 | defs = { | |
162 | 'username': name, |
|
162 | 'username': name, | |
163 | 'password': 'qweqwe', |
|
163 | 'password': 'qweqwe', | |
164 | 'email': '%s+test@rhodecode.org' % name, |
|
164 | 'email': '%s+test@rhodecode.org' % name, | |
165 | 'firstname': 'TestUser', |
|
165 | 'firstname': 'TestUser', | |
166 | 'lastname': 'Test', |
|
166 | 'lastname': 'Test', | |
167 | 'active': True, |
|
167 | 'active': True, | |
168 | 'admin': False, |
|
168 | 'admin': False, | |
169 | 'extern_type': 'rhodecode', |
|
169 | 'extern_type': 'rhodecode', | |
170 | 'extern_name': None, |
|
170 | 'extern_name': None, | |
171 | } |
|
171 | } | |
172 | defs.update(custom) |
|
172 | defs.update(custom) | |
173 |
|
173 | |||
174 | return defs |
|
174 | return defs | |
175 |
|
175 | |||
176 | def _get_user_group_create_params(self, name, **custom): |
|
176 | def _get_user_group_create_params(self, name, **custom): | |
177 | defs = { |
|
177 | defs = { | |
178 | 'users_group_name': name, |
|
178 | 'users_group_name': name, | |
179 | 'user_group_description': 'DESC', |
|
179 | 'user_group_description': 'DESC', | |
180 | 'users_group_active': True, |
|
180 | 'users_group_active': True, | |
181 | 'user_group_data': {}, |
|
181 | 'user_group_data': {}, | |
182 | } |
|
182 | } | |
183 | defs.update(custom) |
|
183 | defs.update(custom) | |
184 |
|
184 | |||
185 | return defs |
|
185 | return defs | |
186 |
|
186 | |||
187 | def create_repo(self, name, **kwargs): |
|
187 | def create_repo(self, name, **kwargs): | |
188 | repo_group = kwargs.get('repo_group') |
|
188 | repo_group = kwargs.get('repo_group') | |
189 | if isinstance(repo_group, RepoGroup): |
|
189 | if isinstance(repo_group, RepoGroup): | |
190 | kwargs['repo_group'] = repo_group.group_id |
|
190 | kwargs['repo_group'] = repo_group.group_id | |
191 | name = name.split(Repository.NAME_SEP)[-1] |
|
191 | name = name.split(Repository.NAME_SEP)[-1] | |
192 | name = Repository.NAME_SEP.join((repo_group.group_name, name)) |
|
192 | name = Repository.NAME_SEP.join((repo_group.group_name, name)) | |
193 |
|
193 | |||
194 | if 'skip_if_exists' in kwargs: |
|
194 | if 'skip_if_exists' in kwargs: | |
195 | del kwargs['skip_if_exists'] |
|
195 | del kwargs['skip_if_exists'] | |
196 | r = Repository.get_by_repo_name(name) |
|
196 | r = Repository.get_by_repo_name(name) | |
197 | if r: |
|
197 | if r: | |
198 | return r |
|
198 | return r | |
199 |
|
199 | |||
200 | form_data = self._get_repo_create_params(repo_name=name, **kwargs) |
|
200 | form_data = self._get_repo_create_params(repo_name=name, **kwargs) | |
201 | cur_user = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) |
|
201 | cur_user = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) | |
202 | RepoModel().create(form_data, cur_user) |
|
202 | RepoModel().create(form_data, cur_user) | |
203 | Session().commit() |
|
203 | Session().commit() | |
204 | repo = Repository.get_by_repo_name(name) |
|
204 | repo = Repository.get_by_repo_name(name) | |
205 | assert repo |
|
205 | assert repo | |
206 | return repo |
|
206 | return repo | |
207 |
|
207 | |||
208 | def create_fork(self, repo_to_fork, fork_name, **kwargs): |
|
208 | def create_fork(self, repo_to_fork, fork_name, **kwargs): | |
209 | repo_to_fork = Repository.get_by_repo_name(repo_to_fork) |
|
209 | repo_to_fork = Repository.get_by_repo_name(repo_to_fork) | |
210 |
|
210 | |||
211 | form_data = self._get_repo_create_params(repo_name=fork_name, |
|
211 | form_data = self._get_repo_create_params(repo_name=fork_name, | |
212 | fork_parent_id=repo_to_fork.repo_id, |
|
212 | fork_parent_id=repo_to_fork.repo_id, | |
213 | repo_type=repo_to_fork.repo_type, |
|
213 | repo_type=repo_to_fork.repo_type, | |
214 | **kwargs) |
|
214 | **kwargs) | |
215 | #TODO: fix it !! |
|
215 | #TODO: fix it !! | |
216 | form_data['description'] = form_data['repo_description'] |
|
216 | form_data['description'] = form_data['repo_description'] | |
217 | form_data['private'] = form_data['repo_private'] |
|
217 | form_data['private'] = form_data['repo_private'] | |
218 | form_data['landing_rev'] = form_data['repo_landing_rev'] |
|
218 | form_data['landing_rev'] = form_data['repo_landing_rev'] | |
219 |
|
219 | |||
220 | owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) |
|
220 | owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) | |
221 | RepoModel().create_fork(form_data, cur_user=owner) |
|
221 | RepoModel().create_fork(form_data, cur_user=owner) | |
222 | Session().commit() |
|
222 | Session().commit() | |
223 | r = Repository.get_by_repo_name(fork_name) |
|
223 | r = Repository.get_by_repo_name(fork_name) | |
224 | assert r |
|
224 | assert r | |
225 | return r |
|
225 | return r | |
226 |
|
226 | |||
227 | def destroy_repo(self, repo_name, **kwargs): |
|
227 | def destroy_repo(self, repo_name, **kwargs): | |
228 | RepoModel().delete(repo_name, **kwargs) |
|
228 | RepoModel().delete(repo_name, **kwargs) | |
229 | Session().commit() |
|
229 | Session().commit() | |
230 |
|
230 | |||
231 | def destroy_repo_on_filesystem(self, repo_name): |
|
231 | def destroy_repo_on_filesystem(self, repo_name): | |
232 | rm_path = os.path.join(RepoModel().repos_path, repo_name) |
|
232 | rm_path = os.path.join(RepoModel().repos_path, repo_name) | |
233 | if os.path.isdir(rm_path): |
|
233 | if os.path.isdir(rm_path): | |
234 | shutil.rmtree(rm_path) |
|
234 | shutil.rmtree(rm_path) | |
235 |
|
235 | |||
236 | def create_repo_group(self, name, **kwargs): |
|
236 | def create_repo_group(self, name, **kwargs): | |
237 | if 'skip_if_exists' in kwargs: |
|
237 | if 'skip_if_exists' in kwargs: | |
238 | del kwargs['skip_if_exists'] |
|
238 | del kwargs['skip_if_exists'] | |
239 | gr = RepoGroup.get_by_group_name(group_name=name) |
|
239 | gr = RepoGroup.get_by_group_name(group_name=name) | |
240 | if gr: |
|
240 | if gr: | |
241 | return gr |
|
241 | return gr | |
242 | form_data = self._get_group_create_params(group_name=name, **kwargs) |
|
242 | form_data = self._get_group_create_params(group_name=name, **kwargs) | |
243 | owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) |
|
243 | owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) | |
244 | gr = RepoGroupModel().create( |
|
244 | gr = RepoGroupModel().create( | |
245 | group_name=form_data['group_name'], |
|
245 | group_name=form_data['group_name'], | |
246 | group_description=form_data['group_name'], |
|
246 | group_description=form_data['group_name'], | |
247 | owner=owner) |
|
247 | owner=owner) | |
248 | Session().commit() |
|
248 | Session().commit() | |
249 | gr = RepoGroup.get_by_group_name(gr.group_name) |
|
249 | gr = RepoGroup.get_by_group_name(gr.group_name) | |
250 | return gr |
|
250 | return gr | |
251 |
|
251 | |||
252 | def destroy_repo_group(self, repogroupid): |
|
252 | def destroy_repo_group(self, repogroupid): | |
253 | RepoGroupModel().delete(repogroupid) |
|
253 | RepoGroupModel().delete(repogroupid) | |
254 | Session().commit() |
|
254 | Session().commit() | |
255 |
|
255 | |||
256 | def create_user(self, name, **kwargs): |
|
256 | def create_user(self, name, **kwargs): | |
257 | if 'skip_if_exists' in kwargs: |
|
257 | if 'skip_if_exists' in kwargs: | |
258 | del kwargs['skip_if_exists'] |
|
258 | del kwargs['skip_if_exists'] | |
259 | user = User.get_by_username(name) |
|
259 | user = User.get_by_username(name) | |
260 | if user: |
|
260 | if user: | |
261 | return user |
|
261 | return user | |
262 | form_data = self._get_user_create_params(name, **kwargs) |
|
262 | form_data = self._get_user_create_params(name, **kwargs) | |
263 | user = UserModel().create(form_data) |
|
263 | user = UserModel().create(form_data) | |
264 |
|
264 | |||
265 | # create token for user |
|
265 | # create token for user | |
266 | AuthTokenModel().create( |
|
266 | AuthTokenModel().create( | |
267 | user=user, description='TEST_USER_TOKEN') |
|
267 | user=user, description='TEST_USER_TOKEN') | |
268 |
|
268 | |||
269 | Session().commit() |
|
269 | Session().commit() | |
270 | user = User.get_by_username(user.username) |
|
270 | user = User.get_by_username(user.username) | |
271 | return user |
|
271 | return user | |
272 |
|
272 | |||
273 | def destroy_user(self, userid): |
|
273 | def destroy_user(self, userid): | |
274 | UserModel().delete(userid) |
|
274 | UserModel().delete(userid) | |
275 | Session().commit() |
|
275 | Session().commit() | |
276 |
|
276 | |||
277 | def destroy_users(self, userid_iter): |
|
277 | def destroy_users(self, userid_iter): | |
278 | for user_id in userid_iter: |
|
278 | for user_id in userid_iter: | |
279 | if User.get_by_username(user_id): |
|
279 | if User.get_by_username(user_id): | |
280 | UserModel().delete(user_id) |
|
280 | UserModel().delete(user_id) | |
281 | Session().commit() |
|
281 | Session().commit() | |
282 |
|
282 | |||
283 | def create_user_group(self, name, **kwargs): |
|
283 | def create_user_group(self, name, **kwargs): | |
284 | if 'skip_if_exists' in kwargs: |
|
284 | if 'skip_if_exists' in kwargs: | |
285 | del kwargs['skip_if_exists'] |
|
285 | del kwargs['skip_if_exists'] | |
286 | gr = UserGroup.get_by_group_name(group_name=name) |
|
286 | gr = UserGroup.get_by_group_name(group_name=name) | |
287 | if gr: |
|
287 | if gr: | |
288 | return gr |
|
288 | return gr | |
|
289 | # map active flag to the real attribute. For API consistency of fixtures | |||
|
290 | if 'active' in kwargs: | |||
|
291 | kwargs['users_group_active'] = kwargs['active'] | |||
|
292 | del kwargs['active'] | |||
289 | form_data = self._get_user_group_create_params(name, **kwargs) |
|
293 | form_data = self._get_user_group_create_params(name, **kwargs) | |
290 | owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) |
|
294 | owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) | |
291 | user_group = UserGroupModel().create( |
|
295 | user_group = UserGroupModel().create( | |
292 | name=form_data['users_group_name'], |
|
296 | name=form_data['users_group_name'], | |
293 | description=form_data['user_group_description'], |
|
297 | description=form_data['user_group_description'], | |
294 | owner=owner, active=form_data['users_group_active'], |
|
298 | owner=owner, active=form_data['users_group_active'], | |
295 | group_data=form_data['user_group_data']) |
|
299 | group_data=form_data['user_group_data']) | |
296 | Session().commit() |
|
300 | Session().commit() | |
297 | user_group = UserGroup.get_by_group_name(user_group.users_group_name) |
|
301 | user_group = UserGroup.get_by_group_name(user_group.users_group_name) | |
298 | return user_group |
|
302 | return user_group | |
299 |
|
303 | |||
300 | def destroy_user_group(self, usergroupid): |
|
304 | def destroy_user_group(self, usergroupid): | |
301 | UserGroupModel().delete(user_group=usergroupid, force=True) |
|
305 | UserGroupModel().delete(user_group=usergroupid, force=True) | |
302 | Session().commit() |
|
306 | Session().commit() | |
303 |
|
307 | |||
304 | def create_gist(self, **kwargs): |
|
308 | def create_gist(self, **kwargs): | |
305 | form_data = { |
|
309 | form_data = { | |
306 | 'description': 'new-gist', |
|
310 | 'description': 'new-gist', | |
307 | 'owner': TEST_USER_ADMIN_LOGIN, |
|
311 | 'owner': TEST_USER_ADMIN_LOGIN, | |
308 | 'gist_type': GistModel.cls.GIST_PUBLIC, |
|
312 | 'gist_type': GistModel.cls.GIST_PUBLIC, | |
309 | 'lifetime': -1, |
|
313 | 'lifetime': -1, | |
310 | 'acl_level': Gist.ACL_LEVEL_PUBLIC, |
|
314 | 'acl_level': Gist.ACL_LEVEL_PUBLIC, | |
311 | 'gist_mapping': {'filename1.txt': {'content': 'hello world'},} |
|
315 | 'gist_mapping': {'filename1.txt': {'content': 'hello world'},} | |
312 | } |
|
316 | } | |
313 | form_data.update(kwargs) |
|
317 | form_data.update(kwargs) | |
314 | gist = GistModel().create( |
|
318 | gist = GistModel().create( | |
315 | description=form_data['description'], owner=form_data['owner'], |
|
319 | description=form_data['description'], owner=form_data['owner'], | |
316 | gist_mapping=form_data['gist_mapping'], gist_type=form_data['gist_type'], |
|
320 | gist_mapping=form_data['gist_mapping'], gist_type=form_data['gist_type'], | |
317 | lifetime=form_data['lifetime'], gist_acl_level=form_data['acl_level'] |
|
321 | lifetime=form_data['lifetime'], gist_acl_level=form_data['acl_level'] | |
318 | ) |
|
322 | ) | |
319 | Session().commit() |
|
323 | Session().commit() | |
320 | return gist |
|
324 | return gist | |
321 |
|
325 | |||
322 | def destroy_gists(self, gistid=None): |
|
326 | def destroy_gists(self, gistid=None): | |
323 | for g in GistModel.cls.get_all(): |
|
327 | for g in GistModel.cls.get_all(): | |
324 | if gistid: |
|
328 | if gistid: | |
325 | if gistid == g.gist_access_id: |
|
329 | if gistid == g.gist_access_id: | |
326 | GistModel().delete(g) |
|
330 | GistModel().delete(g) | |
327 | else: |
|
331 | else: | |
328 | GistModel().delete(g) |
|
332 | GistModel().delete(g) | |
329 | Session().commit() |
|
333 | Session().commit() | |
330 |
|
334 | |||
331 | def load_resource(self, resource_name, strip=False): |
|
335 | def load_resource(self, resource_name, strip=False): | |
332 | with open(os.path.join(FIXTURES, resource_name)) as f: |
|
336 | with open(os.path.join(FIXTURES, resource_name)) as f: | |
333 | source = f.read() |
|
337 | source = f.read() | |
334 | if strip: |
|
338 | if strip: | |
335 | source = source.strip() |
|
339 | source = source.strip() | |
336 |
|
340 | |||
337 | return source |
|
341 | return source |
@@ -1,390 +1,319 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2017 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import json |
|
21 | import json | |
22 |
|
22 | |||
23 | from mock import patch |
|
23 | from mock import patch | |
24 | import pytest |
|
24 | import pytest | |
25 | from pylons import tmpl_context as c |
|
25 | from pylons import tmpl_context as c | |
26 |
|
26 | |||
27 | import rhodecode |
|
27 | import rhodecode | |
28 | from rhodecode.lib.utils import map_groups |
|
28 | from rhodecode.lib.utils import map_groups | |
29 | from rhodecode.model.db import Repository, User, RepoGroup |
|
29 | from rhodecode.model.db import Repository, User, RepoGroup | |
30 | from rhodecode.model.meta import Session |
|
30 | from rhodecode.model.meta import Session | |
31 | from rhodecode.model.repo import RepoModel |
|
31 | from rhodecode.model.repo import RepoModel | |
32 | from rhodecode.model.repo_group import RepoGroupModel |
|
32 | from rhodecode.model.repo_group import RepoGroupModel | |
33 | from rhodecode.model.settings import SettingsModel |
|
33 | from rhodecode.model.settings import SettingsModel | |
34 | from rhodecode.tests import TestController, url, TEST_USER_ADMIN_LOGIN |
|
34 | from rhodecode.tests import TestController, url, TEST_USER_ADMIN_LOGIN | |
35 | from rhodecode.tests.fixture import Fixture |
|
35 | from rhodecode.tests.fixture import Fixture | |
36 |
|
36 | |||
37 |
|
37 | |||
38 | fixture = Fixture() |
|
38 | fixture = Fixture() | |
39 |
|
39 | |||
40 |
|
40 | |||
41 | class TestHomeController(TestController): |
|
41 | class TestHomeController(TestController): | |
42 |
|
42 | |||
43 | def test_index(self): |
|
43 | def test_index(self): | |
44 | self.log_user() |
|
44 | self.log_user() | |
45 | response = self.app.get(url(controller='home', action='index')) |
|
45 | response = self.app.get(url(controller='home', action='index')) | |
46 | # if global permission is set |
|
46 | # if global permission is set | |
47 | response.mustcontain('Add Repository') |
|
47 | response.mustcontain('Add Repository') | |
48 |
|
48 | |||
49 | # search for objects inside the JavaScript JSON |
|
49 | # search for objects inside the JavaScript JSON | |
50 | for repo in Repository.getAll(): |
|
50 | for repo in Repository.getAll(): | |
51 | response.mustcontain('"name_raw": "%s"' % repo.repo_name) |
|
51 | response.mustcontain('"name_raw": "%s"' % repo.repo_name) | |
52 |
|
52 | |||
53 | def test_index_contains_statics_with_ver(self): |
|
53 | def test_index_contains_statics_with_ver(self): | |
54 | self.log_user() |
|
54 | self.log_user() | |
55 | response = self.app.get(url(controller='home', action='index')) |
|
55 | response = self.app.get(url(controller='home', action='index')) | |
56 |
|
56 | |||
57 | rhodecode_version_hash = c.rhodecode_version_hash |
|
57 | rhodecode_version_hash = c.rhodecode_version_hash | |
58 | response.mustcontain('style.css?ver={0}'.format(rhodecode_version_hash)) |
|
58 | response.mustcontain('style.css?ver={0}'.format(rhodecode_version_hash)) | |
59 | response.mustcontain('rhodecode-components.js?ver={0}'.format(rhodecode_version_hash)) |
|
59 | response.mustcontain('rhodecode-components.js?ver={0}'.format(rhodecode_version_hash)) | |
60 |
|
60 | |||
61 | def test_index_contains_backend_specific_details(self, backend): |
|
61 | def test_index_contains_backend_specific_details(self, backend): | |
62 | self.log_user() |
|
62 | self.log_user() | |
63 | response = self.app.get(url(controller='home', action='index')) |
|
63 | response = self.app.get(url(controller='home', action='index')) | |
64 | tip = backend.repo.get_commit().raw_id |
|
64 | tip = backend.repo.get_commit().raw_id | |
65 |
|
65 | |||
66 | # html in javascript variable: |
|
66 | # html in javascript variable: | |
67 | response.mustcontain(r'<i class=\"icon-%s\"' % (backend.alias, )) |
|
67 | response.mustcontain(r'<i class=\"icon-%s\"' % (backend.alias, )) | |
68 | response.mustcontain(r'href=\"/%s\"' % (backend.repo_name, )) |
|
68 | response.mustcontain(r'href=\"/%s\"' % (backend.repo_name, )) | |
69 |
|
69 | |||
70 | response.mustcontain("""/%s/changeset/%s""" % (backend.repo_name, tip)) |
|
70 | response.mustcontain("""/%s/changeset/%s""" % (backend.repo_name, tip)) | |
71 | response.mustcontain("""Added a symlink""") |
|
71 | response.mustcontain("""Added a symlink""") | |
72 |
|
72 | |||
73 | def test_index_with_anonymous_access_disabled(self): |
|
73 | def test_index_with_anonymous_access_disabled(self): | |
74 | with fixture.anon_access(False): |
|
74 | with fixture.anon_access(False): | |
75 | response = self.app.get(url(controller='home', action='index'), |
|
75 | response = self.app.get(url(controller='home', action='index'), | |
76 | status=302) |
|
76 | status=302) | |
77 | assert 'login' in response.location |
|
77 | assert 'login' in response.location | |
78 |
|
78 | |||
79 | def test_index_page_on_groups(self, autologin_user, repo_group): |
|
79 | def test_index_page_on_groups(self, autologin_user, repo_group): | |
80 | response = self.app.get(url('repo_group_home', group_name='gr1')) |
|
80 | response = self.app.get(url('repo_group_home', group_name='gr1')) | |
81 | response.mustcontain("gr1/repo_in_group") |
|
81 | response.mustcontain("gr1/repo_in_group") | |
82 |
|
82 | |||
83 | def test_index_page_on_group_with_trailing_slash( |
|
83 | def test_index_page_on_group_with_trailing_slash( | |
84 | self, autologin_user, repo_group): |
|
84 | self, autologin_user, repo_group): | |
85 | response = self.app.get(url('repo_group_home', group_name='gr1') + '/') |
|
85 | response = self.app.get(url('repo_group_home', group_name='gr1') + '/') | |
86 | response.mustcontain("gr1/repo_in_group") |
|
86 | response.mustcontain("gr1/repo_in_group") | |
87 |
|
87 | |||
88 | @pytest.fixture(scope='class') |
|
88 | @pytest.fixture(scope='class') | |
89 | def repo_group(self, request): |
|
89 | def repo_group(self, request): | |
90 | gr = fixture.create_repo_group('gr1') |
|
90 | gr = fixture.create_repo_group('gr1') | |
91 | fixture.create_repo(name='gr1/repo_in_group', repo_group=gr) |
|
91 | fixture.create_repo(name='gr1/repo_in_group', repo_group=gr) | |
92 |
|
92 | |||
93 | @request.addfinalizer |
|
93 | @request.addfinalizer | |
94 | def cleanup(): |
|
94 | def cleanup(): | |
95 | RepoModel().delete('gr1/repo_in_group') |
|
95 | RepoModel().delete('gr1/repo_in_group') | |
96 | RepoGroupModel().delete(repo_group='gr1', force_delete=True) |
|
96 | RepoGroupModel().delete(repo_group='gr1', force_delete=True) | |
97 | Session().commit() |
|
97 | Session().commit() | |
98 |
|
98 | |||
99 | def test_index_with_name_with_tags(self, autologin_user): |
|
99 | def test_index_with_name_with_tags(self, autologin_user): | |
100 | user = User.get_by_username('test_admin') |
|
100 | user = User.get_by_username('test_admin') | |
101 | user.name = '<img src="/image1" onload="alert(\'Hello, World!\');">' |
|
101 | user.name = '<img src="/image1" onload="alert(\'Hello, World!\');">' | |
102 | user.lastname = ( |
|
102 | user.lastname = ( | |
103 | '<img src="/image2" onload="alert(\'Hello, World!\');">') |
|
103 | '<img src="/image2" onload="alert(\'Hello, World!\');">') | |
104 | Session().add(user) |
|
104 | Session().add(user) | |
105 | Session().commit() |
|
105 | Session().commit() | |
106 |
|
106 | |||
107 | response = self.app.get(url(controller='home', action='index')) |
|
107 | response = self.app.get(url(controller='home', action='index')) | |
108 | response.mustcontain( |
|
108 | response.mustcontain( | |
109 | '<img src="/image1" onload="' |
|
109 | '<img src="/image1" onload="' | |
110 | 'alert('Hello, World!');">') |
|
110 | 'alert('Hello, World!');">') | |
111 | response.mustcontain( |
|
111 | response.mustcontain( | |
112 | '<img src="/image2" onload="' |
|
112 | '<img src="/image2" onload="' | |
113 | 'alert('Hello, World!');">') |
|
113 | 'alert('Hello, World!');">') | |
114 |
|
114 | |||
115 | @pytest.mark.parametrize("name, state", [ |
|
115 | @pytest.mark.parametrize("name, state", [ | |
116 | ('Disabled', False), |
|
116 | ('Disabled', False), | |
117 | ('Enabled', True), |
|
117 | ('Enabled', True), | |
118 | ]) |
|
118 | ]) | |
119 | def test_index_show_version(self, autologin_user, name, state): |
|
119 | def test_index_show_version(self, autologin_user, name, state): | |
120 | version_string = 'RhodeCode Enterprise %s' % rhodecode.__version__ |
|
120 | version_string = 'RhodeCode Enterprise %s' % rhodecode.__version__ | |
121 |
|
121 | |||
122 | sett = SettingsModel().create_or_update_setting( |
|
122 | sett = SettingsModel().create_or_update_setting( | |
123 | 'show_version', state, 'bool') |
|
123 | 'show_version', state, 'bool') | |
124 | Session().add(sett) |
|
124 | Session().add(sett) | |
125 | Session().commit() |
|
125 | Session().commit() | |
126 | SettingsModel().invalidate_settings_cache() |
|
126 | SettingsModel().invalidate_settings_cache() | |
127 |
|
127 | |||
128 | response = self.app.get(url(controller='home', action='index')) |
|
128 | response = self.app.get(url(controller='home', action='index')) | |
129 | if state is True: |
|
129 | if state is True: | |
130 | response.mustcontain(version_string) |
|
130 | response.mustcontain(version_string) | |
131 | if state is False: |
|
131 | if state is False: | |
132 | response.mustcontain(no=[version_string]) |
|
132 | response.mustcontain(no=[version_string]) | |
133 |
|
133 | |||
134 |
|
134 | |||
135 | class TestUserAutocompleteData(TestController): |
|
|||
136 | def test_returns_list_of_users(self, user_util): |
|
|||
137 | self.log_user() |
|
|||
138 | user = user_util.create_user(is_active=True) |
|
|||
139 | user_name = user.username |
|
|||
140 | response = self.app.get( |
|
|||
141 | url(controller='home', action='user_autocomplete_data'), |
|
|||
142 | headers={'X-REQUESTED-WITH': 'XMLHttpRequest', }, status=200) |
|
|||
143 | result = json.loads(response.body) |
|
|||
144 | values = [suggestion['value'] for suggestion in result['suggestions']] |
|
|||
145 | assert user_name in values |
|
|||
146 |
|
||||
147 | def test_returns_inactive_users_when_active_flag_sent(self, user_util): |
|
|||
148 | self.log_user() |
|
|||
149 | user = user_util.create_user(is_active=False) |
|
|||
150 | user_name = user.username |
|
|||
151 | response = self.app.get( |
|
|||
152 | url(controller='home', action='user_autocomplete_data', |
|
|||
153 | user_groups='true', active='0'), |
|
|||
154 | headers={'X-REQUESTED-WITH': 'XMLHttpRequest', }, status=200) |
|
|||
155 | result = json.loads(response.body) |
|
|||
156 | values = [suggestion['value'] for suggestion in result['suggestions']] |
|
|||
157 | assert user_name in values |
|
|||
158 |
|
||||
159 | def test_returns_groups_when_user_groups_sent(self, user_util): |
|
|||
160 | self.log_user() |
|
|||
161 | group = user_util.create_user_group(user_groups_active=True) |
|
|||
162 | group_name = group.users_group_name |
|
|||
163 | response = self.app.get( |
|
|||
164 | url(controller='home', action='user_autocomplete_data', |
|
|||
165 | user_groups='true'), |
|
|||
166 | headers={'X-REQUESTED-WITH': 'XMLHttpRequest', }, status=200) |
|
|||
167 | result = json.loads(response.body) |
|
|||
168 | values = [suggestion['value'] for suggestion in result['suggestions']] |
|
|||
169 | assert group_name in values |
|
|||
170 |
|
||||
171 | def test_result_is_limited_when_query_is_sent(self): |
|
|||
172 | self.log_user() |
|
|||
173 | fake_result = [ |
|
|||
174 | { |
|
|||
175 | 'first_name': 'John', |
|
|||
176 | 'value_display': 'hello{} (John Smith)'.format(i), |
|
|||
177 | 'icon_link': '/images/user14.png', |
|
|||
178 | 'value': 'hello{}'.format(i), |
|
|||
179 | 'last_name': 'Smith', |
|
|||
180 | 'username': 'hello{}'.format(i), |
|
|||
181 | 'id': i, |
|
|||
182 | 'value_type': u'user' |
|
|||
183 | } |
|
|||
184 | for i in range(10) |
|
|||
185 | ] |
|
|||
186 | users_patcher = patch.object( |
|
|||
187 | RepoModel, 'get_users', return_value=fake_result) |
|
|||
188 | groups_patcher = patch.object( |
|
|||
189 | RepoModel, 'get_user_groups', return_value=fake_result) |
|
|||
190 |
|
||||
191 | query = 'hello' |
|
|||
192 | with users_patcher as users_mock, groups_patcher as groups_mock: |
|
|||
193 | response = self.app.get( |
|
|||
194 | url(controller='home', action='user_autocomplete_data', |
|
|||
195 | user_groups='true', query=query), |
|
|||
196 | headers={'X-REQUESTED-WITH': 'XMLHttpRequest', }, status=200) |
|
|||
197 |
|
||||
198 | result = json.loads(response.body) |
|
|||
199 | users_mock.assert_called_once_with( |
|
|||
200 | name_contains=query, only_active=True) |
|
|||
201 | groups_mock.assert_called_once_with( |
|
|||
202 | name_contains=query, only_active=True) |
|
|||
203 | assert len(result['suggestions']) == 20 |
|
|||
204 |
|
||||
205 |
|
||||
206 | def assert_and_get_content(result): |
|
135 | def assert_and_get_content(result): | |
207 | repos = [] |
|
136 | repos = [] | |
208 | groups = [] |
|
137 | groups = [] | |
209 | commits = [] |
|
138 | commits = [] | |
210 | for data in result: |
|
139 | for data in result: | |
211 | for data_item in data['children']: |
|
140 | for data_item in data['children']: | |
212 | assert data_item['id'] |
|
141 | assert data_item['id'] | |
213 | assert data_item['text'] |
|
142 | assert data_item['text'] | |
214 | assert data_item['url'] |
|
143 | assert data_item['url'] | |
215 | if data_item['type'] == 'repo': |
|
144 | if data_item['type'] == 'repo': | |
216 | repos.append(data_item) |
|
145 | repos.append(data_item) | |
217 | elif data_item['type'] == 'group': |
|
146 | elif data_item['type'] == 'group': | |
218 | groups.append(data_item) |
|
147 | groups.append(data_item) | |
219 | elif data_item['type'] == 'commit': |
|
148 | elif data_item['type'] == 'commit': | |
220 | commits.append(data_item) |
|
149 | commits.append(data_item) | |
221 | else: |
|
150 | else: | |
222 | raise Exception('invalid type %s' % data_item['type']) |
|
151 | raise Exception('invalid type %s' % data_item['type']) | |
223 |
|
152 | |||
224 | return repos, groups, commits |
|
153 | return repos, groups, commits | |
225 |
|
154 | |||
226 |
|
155 | |||
227 | class TestGotoSwitcherData(TestController): |
|
156 | class TestGotoSwitcherData(TestController): | |
228 | required_repos_with_groups = [ |
|
157 | required_repos_with_groups = [ | |
229 | 'abc', |
|
158 | 'abc', | |
230 | 'abc-fork', |
|
159 | 'abc-fork', | |
231 | 'forks/abcd', |
|
160 | 'forks/abcd', | |
232 | 'abcd', |
|
161 | 'abcd', | |
233 | 'abcde', |
|
162 | 'abcde', | |
234 | 'a/abc', |
|
163 | 'a/abc', | |
235 | 'aa/abc', |
|
164 | 'aa/abc', | |
236 | 'aaa/abc', |
|
165 | 'aaa/abc', | |
237 | 'aaaa/abc', |
|
166 | 'aaaa/abc', | |
238 | 'repos_abc/aaa/abc', |
|
167 | 'repos_abc/aaa/abc', | |
239 | 'abc_repos/abc', |
|
168 | 'abc_repos/abc', | |
240 | 'abc_repos/abcd', |
|
169 | 'abc_repos/abcd', | |
241 | 'xxx/xyz', |
|
170 | 'xxx/xyz', | |
242 | 'forked-abc/a/abc' |
|
171 | 'forked-abc/a/abc' | |
243 | ] |
|
172 | ] | |
244 |
|
173 | |||
245 | @pytest.fixture(autouse=True, scope='class') |
|
174 | @pytest.fixture(autouse=True, scope='class') | |
246 | def prepare(self, request, pylonsapp): |
|
175 | def prepare(self, request, pylonsapp): | |
247 | for repo_and_group in self.required_repos_with_groups: |
|
176 | for repo_and_group in self.required_repos_with_groups: | |
248 | # create structure of groups and return the last group |
|
177 | # create structure of groups and return the last group | |
249 |
|
178 | |||
250 | repo_group = map_groups(repo_and_group) |
|
179 | repo_group = map_groups(repo_and_group) | |
251 |
|
180 | |||
252 | RepoModel()._create_repo( |
|
181 | RepoModel()._create_repo( | |
253 | repo_and_group, 'hg', 'test-ac', TEST_USER_ADMIN_LOGIN, |
|
182 | repo_and_group, 'hg', 'test-ac', TEST_USER_ADMIN_LOGIN, | |
254 | repo_group=getattr(repo_group, 'group_id', None)) |
|
183 | repo_group=getattr(repo_group, 'group_id', None)) | |
255 |
|
184 | |||
256 | Session().commit() |
|
185 | Session().commit() | |
257 |
|
186 | |||
258 | request.addfinalizer(self.cleanup) |
|
187 | request.addfinalizer(self.cleanup) | |
259 |
|
188 | |||
260 | def cleanup(self): |
|
189 | def cleanup(self): | |
261 | # first delete all repos |
|
190 | # first delete all repos | |
262 | for repo_and_groups in self.required_repos_with_groups: |
|
191 | for repo_and_groups in self.required_repos_with_groups: | |
263 | repo = Repository.get_by_repo_name(repo_and_groups) |
|
192 | repo = Repository.get_by_repo_name(repo_and_groups) | |
264 | if repo: |
|
193 | if repo: | |
265 | RepoModel().delete(repo) |
|
194 | RepoModel().delete(repo) | |
266 | Session().commit() |
|
195 | Session().commit() | |
267 |
|
196 | |||
268 | # then delete all empty groups |
|
197 | # then delete all empty groups | |
269 | for repo_and_groups in self.required_repos_with_groups: |
|
198 | for repo_and_groups in self.required_repos_with_groups: | |
270 | if '/' in repo_and_groups: |
|
199 | if '/' in repo_and_groups: | |
271 | r_group = repo_and_groups.rsplit('/', 1)[0] |
|
200 | r_group = repo_and_groups.rsplit('/', 1)[0] | |
272 | repo_group = RepoGroup.get_by_group_name(r_group) |
|
201 | repo_group = RepoGroup.get_by_group_name(r_group) | |
273 | if not repo_group: |
|
202 | if not repo_group: | |
274 | continue |
|
203 | continue | |
275 | parents = repo_group.parents |
|
204 | parents = repo_group.parents | |
276 | RepoGroupModel().delete(repo_group, force_delete=True) |
|
205 | RepoGroupModel().delete(repo_group, force_delete=True) | |
277 | Session().commit() |
|
206 | Session().commit() | |
278 |
|
207 | |||
279 | for el in reversed(parents): |
|
208 | for el in reversed(parents): | |
280 | RepoGroupModel().delete(el, force_delete=True) |
|
209 | RepoGroupModel().delete(el, force_delete=True) | |
281 | Session().commit() |
|
210 | Session().commit() | |
282 |
|
211 | |||
283 | def test_returns_list_of_repos_and_groups(self): |
|
212 | def test_returns_list_of_repos_and_groups(self): | |
284 | self.log_user() |
|
213 | self.log_user() | |
285 |
|
214 | |||
286 | response = self.app.get( |
|
215 | response = self.app.get( | |
287 | url(controller='home', action='goto_switcher_data'), |
|
216 | url(controller='home', action='goto_switcher_data'), | |
288 | headers={'X-REQUESTED-WITH': 'XMLHttpRequest', }, status=200) |
|
217 | headers={'X-REQUESTED-WITH': 'XMLHttpRequest', }, status=200) | |
289 | result = json.loads(response.body)['results'] |
|
218 | result = json.loads(response.body)['results'] | |
290 |
|
219 | |||
291 | repos, groups, commits = assert_and_get_content(result) |
|
220 | repos, groups, commits = assert_and_get_content(result) | |
292 |
|
221 | |||
293 | assert len(repos) == len(Repository.get_all()) |
|
222 | assert len(repos) == len(Repository.get_all()) | |
294 | assert len(groups) == len(RepoGroup.get_all()) |
|
223 | assert len(groups) == len(RepoGroup.get_all()) | |
295 | assert len(commits) == 0 |
|
224 | assert len(commits) == 0 | |
296 |
|
225 | |||
297 | def test_returns_list_of_repos_and_groups_filtered(self): |
|
226 | def test_returns_list_of_repos_and_groups_filtered(self): | |
298 | self.log_user() |
|
227 | self.log_user() | |
299 |
|
228 | |||
300 | response = self.app.get( |
|
229 | response = self.app.get( | |
301 | url(controller='home', action='goto_switcher_data'), |
|
230 | url(controller='home', action='goto_switcher_data'), | |
302 | headers={'X-REQUESTED-WITH': 'XMLHttpRequest', }, |
|
231 | headers={'X-REQUESTED-WITH': 'XMLHttpRequest', }, | |
303 | params={'query': 'abc'}, status=200) |
|
232 | params={'query': 'abc'}, status=200) | |
304 | result = json.loads(response.body)['results'] |
|
233 | result = json.loads(response.body)['results'] | |
305 |
|
234 | |||
306 | repos, groups, commits = assert_and_get_content(result) |
|
235 | repos, groups, commits = assert_and_get_content(result) | |
307 |
|
236 | |||
308 | assert len(repos) == 13 |
|
237 | assert len(repos) == 13 | |
309 | assert len(groups) == 5 |
|
238 | assert len(groups) == 5 | |
310 | assert len(commits) == 0 |
|
239 | assert len(commits) == 0 | |
311 |
|
240 | |||
312 | def test_returns_list_of_properly_sorted_and_filtered(self): |
|
241 | def test_returns_list_of_properly_sorted_and_filtered(self): | |
313 | self.log_user() |
|
242 | self.log_user() | |
314 |
|
243 | |||
315 | response = self.app.get( |
|
244 | response = self.app.get( | |
316 | url(controller='home', action='goto_switcher_data'), |
|
245 | url(controller='home', action='goto_switcher_data'), | |
317 | headers={'X-REQUESTED-WITH': 'XMLHttpRequest', }, |
|
246 | headers={'X-REQUESTED-WITH': 'XMLHttpRequest', }, | |
318 | params={'query': 'abc'}, status=200) |
|
247 | params={'query': 'abc'}, status=200) | |
319 | result = json.loads(response.body)['results'] |
|
248 | result = json.loads(response.body)['results'] | |
320 |
|
249 | |||
321 | repos, groups, commits = assert_and_get_content(result) |
|
250 | repos, groups, commits = assert_and_get_content(result) | |
322 |
|
251 | |||
323 | test_repos = [x['text'] for x in repos[:4]] |
|
252 | test_repos = [x['text'] for x in repos[:4]] | |
324 | assert ['abc', 'abcd', 'a/abc', 'abcde'] == test_repos |
|
253 | assert ['abc', 'abcd', 'a/abc', 'abcde'] == test_repos | |
325 |
|
254 | |||
326 | test_groups = [x['text'] for x in groups[:4]] |
|
255 | test_groups = [x['text'] for x in groups[:4]] | |
327 | assert ['abc_repos', 'repos_abc', |
|
256 | assert ['abc_repos', 'repos_abc', | |
328 | 'forked-abc', 'forked-abc/a'] == test_groups |
|
257 | 'forked-abc', 'forked-abc/a'] == test_groups | |
329 |
|
258 | |||
330 |
|
259 | |||
331 | class TestRepoListData(TestController): |
|
260 | class TestRepoListData(TestController): | |
332 | def test_returns_list_of_repos_and_groups(self, user_util): |
|
261 | def test_returns_list_of_repos_and_groups(self, user_util): | |
333 | self.log_user() |
|
262 | self.log_user() | |
334 |
|
263 | |||
335 | response = self.app.get( |
|
264 | response = self.app.get( | |
336 | url(controller='home', action='repo_list_data'), |
|
265 | url(controller='home', action='repo_list_data'), | |
337 | headers={'X-REQUESTED-WITH': 'XMLHttpRequest', }, status=200) |
|
266 | headers={'X-REQUESTED-WITH': 'XMLHttpRequest', }, status=200) | |
338 | result = json.loads(response.body)['results'] |
|
267 | result = json.loads(response.body)['results'] | |
339 |
|
268 | |||
340 | repos, groups, commits = assert_and_get_content(result) |
|
269 | repos, groups, commits = assert_and_get_content(result) | |
341 |
|
270 | |||
342 | assert len(repos) == len(Repository.get_all()) |
|
271 | assert len(repos) == len(Repository.get_all()) | |
343 | assert len(groups) == 0 |
|
272 | assert len(groups) == 0 | |
344 | assert len(commits) == 0 |
|
273 | assert len(commits) == 0 | |
345 |
|
274 | |||
346 | def test_returns_list_of_repos_and_groups_filtered(self): |
|
275 | def test_returns_list_of_repos_and_groups_filtered(self): | |
347 | self.log_user() |
|
276 | self.log_user() | |
348 |
|
277 | |||
349 | response = self.app.get( |
|
278 | response = self.app.get( | |
350 | url(controller='home', action='repo_list_data'), |
|
279 | url(controller='home', action='repo_list_data'), | |
351 | headers={'X-REQUESTED-WITH': 'XMLHttpRequest', }, |
|
280 | headers={'X-REQUESTED-WITH': 'XMLHttpRequest', }, | |
352 | params={'query': 'vcs_test_git'}, status=200) |
|
281 | params={'query': 'vcs_test_git'}, status=200) | |
353 | result = json.loads(response.body)['results'] |
|
282 | result = json.loads(response.body)['results'] | |
354 |
|
283 | |||
355 | repos, groups, commits = assert_and_get_content(result) |
|
284 | repos, groups, commits = assert_and_get_content(result) | |
356 |
|
285 | |||
357 | assert len(repos) == len(Repository.query().filter( |
|
286 | assert len(repos) == len(Repository.query().filter( | |
358 | Repository.repo_name.ilike('%vcs_test_git%')).all()) |
|
287 | Repository.repo_name.ilike('%vcs_test_git%')).all()) | |
359 | assert len(groups) == 0 |
|
288 | assert len(groups) == 0 | |
360 | assert len(commits) == 0 |
|
289 | assert len(commits) == 0 | |
361 |
|
290 | |||
362 | def test_returns_list_of_repos_and_groups_filtered_with_type(self): |
|
291 | def test_returns_list_of_repos_and_groups_filtered_with_type(self): | |
363 | self.log_user() |
|
292 | self.log_user() | |
364 |
|
293 | |||
365 | response = self.app.get( |
|
294 | response = self.app.get( | |
366 | url(controller='home', action='repo_list_data'), |
|
295 | url(controller='home', action='repo_list_data'), | |
367 | headers={'X-REQUESTED-WITH': 'XMLHttpRequest', }, |
|
296 | headers={'X-REQUESTED-WITH': 'XMLHttpRequest', }, | |
368 | params={'query': 'vcs_test_git', 'repo_type': 'git'}, status=200) |
|
297 | params={'query': 'vcs_test_git', 'repo_type': 'git'}, status=200) | |
369 | result = json.loads(response.body)['results'] |
|
298 | result = json.loads(response.body)['results'] | |
370 |
|
299 | |||
371 | repos, groups, commits = assert_and_get_content(result) |
|
300 | repos, groups, commits = assert_and_get_content(result) | |
372 |
|
301 | |||
373 | assert len(repos) == len(Repository.query().filter( |
|
302 | assert len(repos) == len(Repository.query().filter( | |
374 | Repository.repo_name.ilike('%vcs_test_git%')).all()) |
|
303 | Repository.repo_name.ilike('%vcs_test_git%')).all()) | |
375 | assert len(groups) == 0 |
|
304 | assert len(groups) == 0 | |
376 | assert len(commits) == 0 |
|
305 | assert len(commits) == 0 | |
377 |
|
306 | |||
378 | def test_returns_list_of_repos_non_ascii_query(self): |
|
307 | def test_returns_list_of_repos_non_ascii_query(self): | |
379 | self.log_user() |
|
308 | self.log_user() | |
380 | response = self.app.get( |
|
309 | response = self.app.get( | |
381 | url(controller='home', action='repo_list_data'), |
|
310 | url(controller='home', action='repo_list_data'), | |
382 | headers={'X-REQUESTED-WITH': 'XMLHttpRequest', }, |
|
311 | headers={'X-REQUESTED-WITH': 'XMLHttpRequest', }, | |
383 | params={'query': 'ć_vcs_test_ą', 'repo_type': 'git'}, status=200) |
|
312 | params={'query': 'ć_vcs_test_ą', 'repo_type': 'git'}, status=200) | |
384 | result = json.loads(response.body)['results'] |
|
313 | result = json.loads(response.body)['results'] | |
385 |
|
314 | |||
386 | repos, groups, commits = assert_and_get_content(result) |
|
315 | repos, groups, commits = assert_and_get_content(result) | |
387 |
|
316 | |||
388 | assert len(repos) == 0 |
|
317 | assert len(repos) == 0 | |
389 | assert len(groups) == 0 |
|
318 | assert len(groups) == 0 | |
390 | assert len(commits) == 0 |
|
319 | assert len(commits) == 0 |
General Comments 0
You need to be logged in to leave comments.
Login now