Show More
@@ -0,0 +1,33 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | ||
|
3 | # Copyright (C) 2016-2017 RhodeCode GmbH | |
|
4 | # | |
|
5 | # This program is free software: you can redistribute it and/or modify | |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
|
7 | # (only), as published by the Free Software Foundation. | |
|
8 | # | |
|
9 | # This program is distributed in the hope that it will be useful, | |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
|
12 | # GNU General Public License for more details. | |
|
13 | # | |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
|
16 | # | |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
|
20 | ||
|
21 | ||
|
22 | def includeme(config): | |
|
23 | ||
|
24 | config.add_route( | |
|
25 | name='user_autocomplete_data', | |
|
26 | pattern='/_users') | |
|
27 | ||
|
28 | config.add_route( | |
|
29 | name='user_group_autocomplete_data', | |
|
30 | pattern='/_user_groups') | |
|
31 | ||
|
32 | # Scan module for configuration decorators. | |
|
33 | config.scan() |
@@ -0,0 +1,19 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | ||
|
3 | # Copyright (C) 2016-2017 RhodeCode GmbH | |
|
4 | # | |
|
5 | # This program is free software: you can redistribute it and/or modify | |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
|
7 | # (only), as published by the Free Software Foundation. | |
|
8 | # | |
|
9 | # This program is distributed in the hope that it will be useful, | |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
|
12 | # GNU General Public License for more details. | |
|
13 | # | |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
|
16 | # | |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
@@ -0,0 +1,112 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | ||
|
3 | # Copyright (C) 2016-2017 RhodeCode GmbH | |
|
4 | # | |
|
5 | # This program is free software: you can redistribute it and/or modify | |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
|
7 | # (only), as published by the Free Software Foundation. | |
|
8 | # | |
|
9 | # This program is distributed in the hope that it will be useful, | |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
|
12 | # GNU General Public License for more details. | |
|
13 | # | |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
|
16 | # | |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
|
20 | ||
|
21 | import json | |
|
22 | import pytest | |
|
23 | ||
|
24 | from rhodecode.tests import TestController | |
|
25 | from rhodecode.tests.fixture import Fixture | |
|
26 | ||
|
27 | ||
|
28 | fixture = Fixture() | |
|
29 | ||
|
30 | ||
|
31 | def route_path(name, params=None, **kwargs): | |
|
32 | import urllib | |
|
33 | ||
|
34 | base_url = { | |
|
35 | 'user_autocomplete_data': '/_users', | |
|
36 | 'user_group_autocomplete_data': '/_user_groups' | |
|
37 | }[name].format(**kwargs) | |
|
38 | ||
|
39 | if params: | |
|
40 | base_url = '{}?{}'.format(base_url, urllib.urlencode(params)) | |
|
41 | return base_url | |
|
42 | ||
|
43 | ||
|
44 | class TestUserAutocompleteData(TestController): | |
|
45 | ||
|
46 | def test_returns_list_of_users(self, user_util, xhr_header): | |
|
47 | self.log_user() | |
|
48 | user = user_util.create_user(active=True) | |
|
49 | user_name = user.username | |
|
50 | response = self.app.get( | |
|
51 | route_path('user_autocomplete_data'), | |
|
52 | extra_environ=xhr_header, status=200) | |
|
53 | result = json.loads(response.body) | |
|
54 | values = [suggestion['value'] for suggestion in result['suggestions']] | |
|
55 | assert user_name in values | |
|
56 | ||
|
57 | def test_returns_inactive_users_when_active_flag_sent( | |
|
58 | self, user_util, xhr_header): | |
|
59 | self.log_user() | |
|
60 | user = user_util.create_user(active=False) | |
|
61 | user_name = user.username | |
|
62 | ||
|
63 | response = self.app.get( | |
|
64 | route_path('user_autocomplete_data', | |
|
65 | params=dict(user_groups='true', active='0')), | |
|
66 | extra_environ=xhr_header, status=200) | |
|
67 | result = json.loads(response.body) | |
|
68 | values = [suggestion['value'] for suggestion in result['suggestions']] | |
|
69 | assert user_name in values | |
|
70 | ||
|
71 | response = self.app.get( | |
|
72 | route_path('user_autocomplete_data', | |
|
73 | params=dict(user_groups='true', active='1')), | |
|
74 | extra_environ=xhr_header, status=200) | |
|
75 | result = json.loads(response.body) | |
|
76 | values = [suggestion['value'] for suggestion in result['suggestions']] | |
|
77 | assert user_name not in values | |
|
78 | ||
|
79 | def test_returns_groups_when_user_groups_flag_sent( | |
|
80 | self, user_util, xhr_header): | |
|
81 | self.log_user() | |
|
82 | group = user_util.create_user_group(user_groups_active=True) | |
|
83 | group_name = group.users_group_name | |
|
84 | response = self.app.get( | |
|
85 | route_path('user_autocomplete_data', | |
|
86 | params=dict(user_groups='true')), | |
|
87 | extra_environ=xhr_header, status=200) | |
|
88 | result = json.loads(response.body) | |
|
89 | values = [suggestion['value'] for suggestion in result['suggestions']] | |
|
90 | assert group_name in values | |
|
91 | ||
|
92 | @pytest.mark.parametrize('query, count', [ | |
|
93 | ('hello1', 0), | |
|
94 | ('dev', 2), | |
|
95 | ]) | |
|
96 | def test_result_is_limited_when_query_is_sent(self, user_util, xhr_header, | |
|
97 | query, count): | |
|
98 | self.log_user() | |
|
99 | ||
|
100 | user_util._test_name = 'dev-test' | |
|
101 | user_util.create_user() | |
|
102 | ||
|
103 | user_util._test_name = 'dev-group-test' | |
|
104 | user_util.create_user_group() | |
|
105 | ||
|
106 | response = self.app.get( | |
|
107 | route_path('user_autocomplete_data', | |
|
108 | params=dict(user_groups='true', query=query)), | |
|
109 | extra_environ=xhr_header, status=200) | |
|
110 | ||
|
111 | result = json.loads(response.body) | |
|
112 | assert len(result['suggestions']) == count |
@@ -0,0 +1,117 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | ||
|
3 | # Copyright (C) 2016-2017 RhodeCode GmbH | |
|
4 | # | |
|
5 | # This program is free software: you can redistribute it and/or modify | |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
|
7 | # (only), as published by the Free Software Foundation. | |
|
8 | # | |
|
9 | # This program is distributed in the hope that it will be useful, | |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
|
12 | # GNU General Public License for more details. | |
|
13 | # | |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
|
16 | # | |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
|
20 | # -*- coding: utf-8 -*- | |
|
21 | ||
|
22 | # Copyright (C) 2016-2017 RhodeCode GmbH | |
|
23 | # | |
|
24 | # This program is free software: you can redistribute it and/or modify | |
|
25 | # it under the terms of the GNU Affero General Public License, version 3 | |
|
26 | # (only), as published by the Free Software Foundation. | |
|
27 | # | |
|
28 | # This program is distributed in the hope that it will be useful, | |
|
29 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
|
30 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
|
31 | # GNU General Public License for more details. | |
|
32 | # | |
|
33 | # You should have received a copy of the GNU Affero General Public License | |
|
34 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
|
35 | # | |
|
36 | # This program is dual-licensed. If you wish to learn more about the | |
|
37 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
|
38 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
|
39 | ||
|
40 | import json | |
|
41 | ||
|
42 | import pytest | |
|
43 | ||
|
44 | from rhodecode.tests import TestController | |
|
45 | from rhodecode.tests.fixture import Fixture | |
|
46 | ||
|
47 | ||
|
48 | fixture = Fixture() | |
|
49 | ||
|
50 | ||
|
51 | def route_path(name, params=None, **kwargs): | |
|
52 | import urllib | |
|
53 | ||
|
54 | base_url = { | |
|
55 | 'user_autocomplete_data': '/_users', | |
|
56 | 'user_group_autocomplete_data': '/_user_groups' | |
|
57 | }[name].format(**kwargs) | |
|
58 | ||
|
59 | if params: | |
|
60 | base_url = '{}?{}'.format(base_url, urllib.urlencode(params)) | |
|
61 | return base_url | |
|
62 | ||
|
63 | ||
|
64 | class TestUserGroupAutocompleteData(TestController): | |
|
65 | ||
|
66 | def test_returns_list_of_user_groups(self, user_util, xhr_header): | |
|
67 | self.log_user() | |
|
68 | user_group = user_util.create_user_group(active=True) | |
|
69 | user_group_name = user_group.users_group_name | |
|
70 | response = self.app.get( | |
|
71 | route_path('user_group_autocomplete_data'), | |
|
72 | extra_environ=xhr_header, status=200) | |
|
73 | result = json.loads(response.body) | |
|
74 | values = [suggestion['value'] for suggestion in result['suggestions']] | |
|
75 | assert user_group_name in values | |
|
76 | ||
|
77 | def test_returns_inactive_user_groups_when_active_flag_sent( | |
|
78 | self, user_util, xhr_header): | |
|
79 | self.log_user() | |
|
80 | user_group = user_util.create_user_group(active=False) | |
|
81 | user_group_name = user_group.users_group_name | |
|
82 | ||
|
83 | response = self.app.get( | |
|
84 | route_path('user_group_autocomplete_data', | |
|
85 | params=dict(active='0')), | |
|
86 | extra_environ=xhr_header, status=200) | |
|
87 | result = json.loads(response.body) | |
|
88 | values = [suggestion['value'] for suggestion in result['suggestions']] | |
|
89 | assert user_group_name in values | |
|
90 | ||
|
91 | response = self.app.get( | |
|
92 | route_path('user_group_autocomplete_data', | |
|
93 | params=dict(active='1')), | |
|
94 | extra_environ=xhr_header, status=200) | |
|
95 | result = json.loads(response.body) | |
|
96 | values = [suggestion['value'] for suggestion in result['suggestions']] | |
|
97 | assert user_group_name not in values | |
|
98 | ||
|
99 | @pytest.mark.parametrize('query, count', [ | |
|
100 | ('hello1', 0), | |
|
101 | ('dev', 1), | |
|
102 | ]) | |
|
103 | def test_result_is_limited_when_query_is_sent(self, user_util, xhr_header, query, count): | |
|
104 | self.log_user() | |
|
105 | ||
|
106 | user_util._test_name = 'dev-test' | |
|
107 | user_util.create_user_group() | |
|
108 | ||
|
109 | response = self.app.get( | |
|
110 | route_path('user_group_autocomplete_data', | |
|
111 | params=dict(user_groups='true', | |
|
112 | query=query)), | |
|
113 | extra_environ=xhr_header, status=200) | |
|
114 | ||
|
115 | result = json.loads(response.body) | |
|
116 | ||
|
117 | assert len(result['suggestions']) == count |
@@ -0,0 +1,81 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | ||
|
3 | # Copyright (C) 2016-2017 RhodeCode GmbH | |
|
4 | # | |
|
5 | # This program is free software: you can redistribute it and/or modify | |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
|
7 | # (only), as published by the Free Software Foundation. | |
|
8 | # | |
|
9 | # This program is distributed in the hope that it will be useful, | |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
|
12 | # GNU General Public License for more details. | |
|
13 | # | |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
|
16 | # | |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
|
20 | ||
|
21 | import logging | |
|
22 | ||
|
23 | from pyramid.view import view_config | |
|
24 | ||
|
25 | from rhodecode.apps._base import BaseAppView | |
|
26 | from rhodecode.lib.auth import LoginRequired, NotAnonymous | |
|
27 | from rhodecode.lib.utils2 import str2bool | |
|
28 | from rhodecode.model.repo import RepoModel | |
|
29 | ||
|
30 | log = logging.getLogger(__name__) | |
|
31 | ||
|
32 | ||
|
33 | class HomeView(BaseAppView): | |
|
34 | ||
|
35 | def load_default_context(self): | |
|
36 | c = self._get_local_tmpl_context() | |
|
37 | c.user = c.auth_user.get_instance() | |
|
38 | self._register_global_c(c) | |
|
39 | return c | |
|
40 | ||
|
41 | @LoginRequired() | |
|
42 | @view_config( | |
|
43 | route_name='user_autocomplete_data', request_method='GET', | |
|
44 | renderer='json_ext', xhr=True) | |
|
45 | def user_autocomplete_data(self): | |
|
46 | query = self.request.GET.get('query') | |
|
47 | active = str2bool(self.request.GET.get('active') or True) | |
|
48 | include_groups = str2bool(self.request.GET.get('user_groups')) | |
|
49 | ||
|
50 | log.debug('generating user list, query:%s, active:%s, with_groups:%s', | |
|
51 | query, active, include_groups) | |
|
52 | ||
|
53 | repo_model = RepoModel() | |
|
54 | _users = repo_model.get_users( | |
|
55 | name_contains=query, only_active=active) | |
|
56 | ||
|
57 | if include_groups: | |
|
58 | # extend with user groups | |
|
59 | _user_groups = repo_model.get_user_groups( | |
|
60 | name_contains=query, only_active=active) | |
|
61 | _users = _users + _user_groups | |
|
62 | ||
|
63 | return {'suggestions': _users} | |
|
64 | ||
|
65 | @LoginRequired() | |
|
66 | @NotAnonymous() | |
|
67 | @view_config( | |
|
68 | route_name='user_group_autocomplete_data', request_method='GET', | |
|
69 | renderer='json_ext', xhr=True) | |
|
70 | def user_group_autocomplete_data(self): | |
|
71 | query = self.request.GET.get('query') | |
|
72 | active = str2bool(self.request.GET.get('active') or True) | |
|
73 | log.debug('generating user group list, query:%s, active:%s', | |
|
74 | query, active) | |
|
75 | ||
|
76 | repo_model = RepoModel() | |
|
77 | _user_groups = repo_model.get_user_groups( | |
|
78 | name_contains=query, only_active=active) | |
|
79 | _user_groups = _user_groups | |
|
80 | ||
|
81 | return {'suggestions': _user_groups} |
@@ -1,512 +1,513 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Pylons middleware initialization |
|
23 | 23 | """ |
|
24 | 24 | import logging |
|
25 | 25 | from collections import OrderedDict |
|
26 | 26 | |
|
27 | 27 | from paste.registry import RegistryManager |
|
28 | 28 | from paste.gzipper import make_gzip_middleware |
|
29 | 29 | from pylons.wsgiapp import PylonsApp |
|
30 | 30 | from pyramid.authorization import ACLAuthorizationPolicy |
|
31 | 31 | from pyramid.config import Configurator |
|
32 | 32 | from pyramid.settings import asbool, aslist |
|
33 | 33 | from pyramid.wsgi import wsgiapp |
|
34 | 34 | from pyramid.httpexceptions import ( |
|
35 | 35 | HTTPException, HTTPError, HTTPInternalServerError, HTTPFound) |
|
36 | 36 | from pyramid.events import ApplicationCreated |
|
37 | 37 | from pyramid.renderers import render_to_response |
|
38 | 38 | from routes.middleware import RoutesMiddleware |
|
39 | 39 | import routes.util |
|
40 | 40 | |
|
41 | 41 | import rhodecode |
|
42 | 42 | from rhodecode.model import meta |
|
43 | 43 | from rhodecode.config import patches |
|
44 | 44 | from rhodecode.config.routing import STATIC_FILE_PREFIX |
|
45 | 45 | from rhodecode.config.environment import ( |
|
46 | 46 | load_environment, load_pyramid_environment) |
|
47 | 47 | from rhodecode.lib.middleware import csrf |
|
48 | 48 | from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled |
|
49 | 49 | from rhodecode.lib.middleware.error_handling import ( |
|
50 | 50 | PylonsErrorHandlingMiddleware) |
|
51 | 51 | from rhodecode.lib.middleware.https_fixup import HttpsFixup |
|
52 | 52 | from rhodecode.lib.middleware.vcs import VCSMiddleware |
|
53 | 53 | from rhodecode.lib.plugins.utils import register_rhodecode_plugin |
|
54 | 54 | from rhodecode.lib.utils2 import aslist as rhodecode_aslist |
|
55 | 55 | from rhodecode.subscribers import ( |
|
56 | 56 | scan_repositories_if_enabled, write_metadata_if_needed, |
|
57 | 57 | write_js_routes_if_enabled, create_largeobjects_dirs_if_needed) |
|
58 | 58 | |
|
59 | 59 | |
|
60 | 60 | log = logging.getLogger(__name__) |
|
61 | 61 | |
|
62 | 62 | |
|
63 | 63 | # this is used to avoid avoid the route lookup overhead in routesmiddleware |
|
64 | 64 | # for certain routes which won't go to pylons to - eg. static files, debugger |
|
65 | 65 | # it is only needed for the pylons migration and can be removed once complete |
|
66 | 66 | class SkippableRoutesMiddleware(RoutesMiddleware): |
|
67 | 67 | """ Routes middleware that allows you to skip prefixes """ |
|
68 | 68 | |
|
69 | 69 | def __init__(self, *args, **kw): |
|
70 | 70 | self.skip_prefixes = kw.pop('skip_prefixes', []) |
|
71 | 71 | super(SkippableRoutesMiddleware, self).__init__(*args, **kw) |
|
72 | 72 | |
|
73 | 73 | def __call__(self, environ, start_response): |
|
74 | 74 | for prefix in self.skip_prefixes: |
|
75 | 75 | if environ['PATH_INFO'].startswith(prefix): |
|
76 | 76 | # added to avoid the case when a missing /_static route falls |
|
77 | 77 | # through to pylons and causes an exception as pylons is |
|
78 | 78 | # expecting wsgiorg.routingargs to be set in the environ |
|
79 | 79 | # by RoutesMiddleware. |
|
80 | 80 | if 'wsgiorg.routing_args' not in environ: |
|
81 | 81 | environ['wsgiorg.routing_args'] = (None, {}) |
|
82 | 82 | return self.app(environ, start_response) |
|
83 | 83 | |
|
84 | 84 | return super(SkippableRoutesMiddleware, self).__call__( |
|
85 | 85 | environ, start_response) |
|
86 | 86 | |
|
87 | 87 | |
|
88 | 88 | def make_app(global_conf, static_files=True, **app_conf): |
|
89 | 89 | """Create a Pylons WSGI application and return it |
|
90 | 90 | |
|
91 | 91 | ``global_conf`` |
|
92 | 92 | The inherited configuration for this application. Normally from |
|
93 | 93 | the [DEFAULT] section of the Paste ini file. |
|
94 | 94 | |
|
95 | 95 | ``app_conf`` |
|
96 | 96 | The application's local configuration. Normally specified in |
|
97 | 97 | the [app:<name>] section of the Paste ini file (where <name> |
|
98 | 98 | defaults to main). |
|
99 | 99 | |
|
100 | 100 | """ |
|
101 | 101 | # Apply compatibility patches |
|
102 | 102 | patches.kombu_1_5_1_python_2_7_11() |
|
103 | 103 | patches.inspect_getargspec() |
|
104 | 104 | |
|
105 | 105 | # Configure the Pylons environment |
|
106 | 106 | config = load_environment(global_conf, app_conf) |
|
107 | 107 | |
|
108 | 108 | # The Pylons WSGI app |
|
109 | 109 | app = PylonsApp(config=config) |
|
110 | 110 | if rhodecode.is_test: |
|
111 | 111 | app = csrf.CSRFDetector(app) |
|
112 | 112 | |
|
113 | 113 | expected_origin = config.get('expected_origin') |
|
114 | 114 | if expected_origin: |
|
115 | 115 | # The API can be accessed from other Origins. |
|
116 | 116 | app = csrf.OriginChecker(app, expected_origin, |
|
117 | 117 | skip_urls=[routes.util.url_for('api')]) |
|
118 | 118 | |
|
119 | 119 | # Establish the Registry for this application |
|
120 | 120 | app = RegistryManager(app) |
|
121 | 121 | |
|
122 | 122 | app.config = config |
|
123 | 123 | |
|
124 | 124 | return app |
|
125 | 125 | |
|
126 | 126 | |
|
127 | 127 | def make_pyramid_app(global_config, **settings): |
|
128 | 128 | """ |
|
129 | 129 | Constructs the WSGI application based on Pyramid and wraps the Pylons based |
|
130 | 130 | application. |
|
131 | 131 | |
|
132 | 132 | Specials: |
|
133 | 133 | |
|
134 | 134 | * We migrate from Pylons to Pyramid. While doing this, we keep both |
|
135 | 135 | frameworks functional. This involves moving some WSGI middlewares around |
|
136 | 136 | and providing access to some data internals, so that the old code is |
|
137 | 137 | still functional. |
|
138 | 138 | |
|
139 | 139 | * The application can also be integrated like a plugin via the call to |
|
140 | 140 | `includeme`. This is accompanied with the other utility functions which |
|
141 | 141 | are called. Changing this should be done with great care to not break |
|
142 | 142 | cases when these fragments are assembled from another place. |
|
143 | 143 | |
|
144 | 144 | """ |
|
145 | 145 | # The edition string should be available in pylons too, so we add it here |
|
146 | 146 | # before copying the settings. |
|
147 | 147 | settings.setdefault('rhodecode.edition', 'Community Edition') |
|
148 | 148 | |
|
149 | 149 | # As long as our Pylons application does expect "unprepared" settings, make |
|
150 | 150 | # sure that we keep an unmodified copy. This avoids unintentional change of |
|
151 | 151 | # behavior in the old application. |
|
152 | 152 | settings_pylons = settings.copy() |
|
153 | 153 | |
|
154 | 154 | sanitize_settings_and_apply_defaults(settings) |
|
155 | 155 | config = Configurator(settings=settings) |
|
156 | 156 | add_pylons_compat_data(config.registry, global_config, settings_pylons) |
|
157 | 157 | |
|
158 | 158 | load_pyramid_environment(global_config, settings) |
|
159 | 159 | |
|
160 | 160 | includeme_first(config) |
|
161 | 161 | includeme(config) |
|
162 | 162 | pyramid_app = config.make_wsgi_app() |
|
163 | 163 | pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config) |
|
164 | 164 | pyramid_app.config = config |
|
165 | 165 | |
|
166 | 166 | # creating the app uses a connection - return it after we are done |
|
167 | 167 | meta.Session.remove() |
|
168 | 168 | |
|
169 | 169 | return pyramid_app |
|
170 | 170 | |
|
171 | 171 | |
|
172 | 172 | def make_not_found_view(config): |
|
173 | 173 | """ |
|
174 | 174 | This creates the view which should be registered as not-found-view to |
|
175 | 175 | pyramid. Basically it contains of the old pylons app, converted to a view. |
|
176 | 176 | Additionally it is wrapped by some other middlewares. |
|
177 | 177 | """ |
|
178 | 178 | settings = config.registry.settings |
|
179 | 179 | vcs_server_enabled = settings['vcs.server.enable'] |
|
180 | 180 | |
|
181 | 181 | # Make pylons app from unprepared settings. |
|
182 | 182 | pylons_app = make_app( |
|
183 | 183 | config.registry._pylons_compat_global_config, |
|
184 | 184 | **config.registry._pylons_compat_settings) |
|
185 | 185 | config.registry._pylons_compat_config = pylons_app.config |
|
186 | 186 | |
|
187 | 187 | # Appenlight monitoring. |
|
188 | 188 | pylons_app, appenlight_client = wrap_in_appenlight_if_enabled( |
|
189 | 189 | pylons_app, settings) |
|
190 | 190 | |
|
191 | 191 | # The pylons app is executed inside of the pyramid 404 exception handler. |
|
192 | 192 | # Exceptions which are raised inside of it are not handled by pyramid |
|
193 | 193 | # again. Therefore we add a middleware that invokes the error handler in |
|
194 | 194 | # case of an exception or error response. This way we return proper error |
|
195 | 195 | # HTML pages in case of an error. |
|
196 | 196 | reraise = (settings.get('debugtoolbar.enabled', False) or |
|
197 | 197 | rhodecode.disable_error_handler) |
|
198 | 198 | pylons_app = PylonsErrorHandlingMiddleware( |
|
199 | 199 | pylons_app, error_handler, reraise) |
|
200 | 200 | |
|
201 | 201 | # The VCSMiddleware shall operate like a fallback if pyramid doesn't find a |
|
202 | 202 | # view to handle the request. Therefore it is wrapped around the pylons |
|
203 | 203 | # app. It has to be outside of the error handling otherwise error responses |
|
204 | 204 | # from the vcsserver are converted to HTML error pages. This confuses the |
|
205 | 205 | # command line tools and the user won't get a meaningful error message. |
|
206 | 206 | if vcs_server_enabled: |
|
207 | 207 | pylons_app = VCSMiddleware( |
|
208 | 208 | pylons_app, settings, appenlight_client, registry=config.registry) |
|
209 | 209 | |
|
210 | 210 | # Convert WSGI app to pyramid view and return it. |
|
211 | 211 | return wsgiapp(pylons_app) |
|
212 | 212 | |
|
213 | 213 | |
|
214 | 214 | def add_pylons_compat_data(registry, global_config, settings): |
|
215 | 215 | """ |
|
216 | 216 | Attach data to the registry to support the Pylons integration. |
|
217 | 217 | """ |
|
218 | 218 | registry._pylons_compat_global_config = global_config |
|
219 | 219 | registry._pylons_compat_settings = settings |
|
220 | 220 | |
|
221 | 221 | |
|
222 | 222 | def error_handler(exception, request): |
|
223 | 223 | import rhodecode |
|
224 | 224 | from rhodecode.lib.utils2 import AttributeDict |
|
225 | 225 | |
|
226 | 226 | rhodecode_title = rhodecode.CONFIG.get('rhodecode_title') or 'RhodeCode' |
|
227 | 227 | |
|
228 | 228 | base_response = HTTPInternalServerError() |
|
229 | 229 | # prefer original exception for the response since it may have headers set |
|
230 | 230 | if isinstance(exception, HTTPException): |
|
231 | 231 | base_response = exception |
|
232 | 232 | |
|
233 | 233 | def is_http_error(response): |
|
234 | 234 | # error which should have traceback |
|
235 | 235 | return response.status_code > 499 |
|
236 | 236 | |
|
237 | 237 | if is_http_error(base_response): |
|
238 | 238 | log.exception( |
|
239 | 239 | 'error occurred handling this request for path: %s', request.path) |
|
240 | 240 | |
|
241 | 241 | c = AttributeDict() |
|
242 | 242 | c.error_message = base_response.status |
|
243 | 243 | c.error_explanation = base_response.explanation or str(base_response) |
|
244 | 244 | c.visual = AttributeDict() |
|
245 | 245 | |
|
246 | 246 | c.visual.rhodecode_support_url = ( |
|
247 | 247 | request.registry.settings.get('rhodecode_support_url') or |
|
248 | 248 | request.route_url('rhodecode_support') |
|
249 | 249 | ) |
|
250 | 250 | c.redirect_time = 0 |
|
251 | 251 | c.rhodecode_name = rhodecode_title |
|
252 | 252 | if not c.rhodecode_name: |
|
253 | 253 | c.rhodecode_name = 'Rhodecode' |
|
254 | 254 | |
|
255 | 255 | c.causes = [] |
|
256 | 256 | if hasattr(base_response, 'causes'): |
|
257 | 257 | c.causes = base_response.causes |
|
258 | 258 | |
|
259 | 259 | response = render_to_response( |
|
260 | 260 | '/errors/error_document.mako', {'c': c}, request=request, |
|
261 | 261 | response=base_response) |
|
262 | 262 | |
|
263 | 263 | return response |
|
264 | 264 | |
|
265 | 265 | |
|
266 | 266 | def includeme(config): |
|
267 | 267 | settings = config.registry.settings |
|
268 | 268 | |
|
269 | 269 | # plugin information |
|
270 | 270 | config.registry.rhodecode_plugins = OrderedDict() |
|
271 | 271 | |
|
272 | 272 | config.add_directive( |
|
273 | 273 | 'register_rhodecode_plugin', register_rhodecode_plugin) |
|
274 | 274 | |
|
275 | 275 | if asbool(settings.get('appenlight', 'false')): |
|
276 | 276 | config.include('appenlight_client.ext.pyramid_tween') |
|
277 | 277 | |
|
278 | 278 | # Includes which are required. The application would fail without them. |
|
279 | 279 | config.include('pyramid_mako') |
|
280 | 280 | config.include('pyramid_beaker') |
|
281 | 281 | |
|
282 | 282 | config.include('rhodecode.authentication') |
|
283 | 283 | config.include('rhodecode.integrations') |
|
284 | 284 | |
|
285 | 285 | # apps |
|
286 | 286 | config.include('rhodecode.apps._base') |
|
287 | 287 | |
|
288 | 288 | config.include('rhodecode.apps.admin') |
|
289 | 289 | config.include('rhodecode.apps.channelstream') |
|
290 | 290 | config.include('rhodecode.apps.login') |
|
291 | config.include('rhodecode.apps.home') | |
|
291 | 292 | config.include('rhodecode.apps.repository') |
|
292 | 293 | config.include('rhodecode.apps.user_profile') |
|
293 | 294 | config.include('rhodecode.apps.my_account') |
|
294 | 295 | config.include('rhodecode.apps.svn_support') |
|
295 | 296 | |
|
296 | 297 | config.include('rhodecode.tweens') |
|
297 | 298 | config.include('rhodecode.api') |
|
298 | 299 | |
|
299 | 300 | config.add_route( |
|
300 | 301 | 'rhodecode_support', 'https://rhodecode.com/help/', static=True) |
|
301 | 302 | |
|
302 | 303 | config.add_translation_dirs('rhodecode:i18n/') |
|
303 | 304 | settings['default_locale_name'] = settings.get('lang', 'en') |
|
304 | 305 | |
|
305 | 306 | # Add subscribers. |
|
306 | 307 | config.add_subscriber(create_largeobjects_dirs_if_needed, ApplicationCreated) |
|
307 | 308 | config.add_subscriber(scan_repositories_if_enabled, ApplicationCreated) |
|
308 | 309 | config.add_subscriber(write_metadata_if_needed, ApplicationCreated) |
|
309 | 310 | config.add_subscriber(write_js_routes_if_enabled, ApplicationCreated) |
|
310 | 311 | |
|
311 | 312 | # Set the authorization policy. |
|
312 | 313 | authz_policy = ACLAuthorizationPolicy() |
|
313 | 314 | config.set_authorization_policy(authz_policy) |
|
314 | 315 | |
|
315 | 316 | # Set the default renderer for HTML templates to mako. |
|
316 | 317 | config.add_mako_renderer('.html') |
|
317 | 318 | |
|
318 | 319 | config.add_renderer( |
|
319 | 320 | name='json_ext', |
|
320 | 321 | factory='rhodecode.lib.ext_json_renderer.pyramid_ext_json') |
|
321 | 322 | |
|
322 | 323 | # include RhodeCode plugins |
|
323 | 324 | includes = aslist(settings.get('rhodecode.includes', [])) |
|
324 | 325 | for inc in includes: |
|
325 | 326 | config.include(inc) |
|
326 | 327 | |
|
327 | 328 | # This is the glue which allows us to migrate in chunks. By registering the |
|
328 | 329 | # pylons based application as the "Not Found" view in Pyramid, we will |
|
329 | 330 | # fallback to the old application each time the new one does not yet know |
|
330 | 331 | # how to handle a request. |
|
331 | 332 | config.add_notfound_view(make_not_found_view(config)) |
|
332 | 333 | |
|
333 | 334 | if not settings.get('debugtoolbar.enabled', False): |
|
334 | 335 | # if no toolbar, then any exception gets caught and rendered |
|
335 | 336 | config.add_view(error_handler, context=Exception) |
|
336 | 337 | |
|
337 | 338 | config.add_view(error_handler, context=HTTPError) |
|
338 | 339 | |
|
339 | 340 | |
|
340 | 341 | def includeme_first(config): |
|
341 | 342 | # redirect automatic browser favicon.ico requests to correct place |
|
342 | 343 | def favicon_redirect(context, request): |
|
343 | 344 | return HTTPFound( |
|
344 | 345 | request.static_path('rhodecode:public/images/favicon.ico')) |
|
345 | 346 | |
|
346 | 347 | config.add_view(favicon_redirect, route_name='favicon') |
|
347 | 348 | config.add_route('favicon', '/favicon.ico') |
|
348 | 349 | |
|
349 | 350 | def robots_redirect(context, request): |
|
350 | 351 | return HTTPFound( |
|
351 | 352 | request.static_path('rhodecode:public/robots.txt')) |
|
352 | 353 | |
|
353 | 354 | config.add_view(robots_redirect, route_name='robots') |
|
354 | 355 | config.add_route('robots', '/robots.txt') |
|
355 | 356 | |
|
356 | 357 | config.add_static_view( |
|
357 | 358 | '_static/deform', 'deform:static') |
|
358 | 359 | config.add_static_view( |
|
359 | 360 | '_static/rhodecode', path='rhodecode:public', cache_max_age=3600 * 24) |
|
360 | 361 | |
|
361 | 362 | |
|
362 | 363 | def wrap_app_in_wsgi_middlewares(pyramid_app, config): |
|
363 | 364 | """ |
|
364 | 365 | Apply outer WSGI middlewares around the application. |
|
365 | 366 | |
|
366 | 367 | Part of this has been moved up from the Pylons layer, so that the |
|
367 | 368 | data is also available if old Pylons code is hit through an already ported |
|
368 | 369 | view. |
|
369 | 370 | """ |
|
370 | 371 | settings = config.registry.settings |
|
371 | 372 | |
|
372 | 373 | # enable https redirects based on HTTP_X_URL_SCHEME set by proxy |
|
373 | 374 | pyramid_app = HttpsFixup(pyramid_app, settings) |
|
374 | 375 | |
|
375 | 376 | # Add RoutesMiddleware to support the pylons compatibility tween during |
|
376 | 377 | # migration to pyramid. |
|
377 | 378 | pyramid_app = SkippableRoutesMiddleware( |
|
378 | 379 | pyramid_app, config.registry._pylons_compat_config['routes.map'], |
|
379 | 380 | skip_prefixes=(STATIC_FILE_PREFIX, '/_debug_toolbar')) |
|
380 | 381 | |
|
381 | 382 | pyramid_app, _ = wrap_in_appenlight_if_enabled(pyramid_app, settings) |
|
382 | 383 | |
|
383 | 384 | if settings['gzip_responses']: |
|
384 | 385 | pyramid_app = make_gzip_middleware( |
|
385 | 386 | pyramid_app, settings, compress_level=1) |
|
386 | 387 | |
|
387 | 388 | # this should be the outer most middleware in the wsgi stack since |
|
388 | 389 | # middleware like Routes make database calls |
|
389 | 390 | def pyramid_app_with_cleanup(environ, start_response): |
|
390 | 391 | try: |
|
391 | 392 | return pyramid_app(environ, start_response) |
|
392 | 393 | finally: |
|
393 | 394 | # Dispose current database session and rollback uncommitted |
|
394 | 395 | # transactions. |
|
395 | 396 | meta.Session.remove() |
|
396 | 397 | |
|
397 | 398 | # In a single threaded mode server, on non sqlite db we should have |
|
398 | 399 | # '0 Current Checked out connections' at the end of a request, |
|
399 | 400 | # if not, then something, somewhere is leaving a connection open |
|
400 | 401 | pool = meta.Base.metadata.bind.engine.pool |
|
401 | 402 | log.debug('sa pool status: %s', pool.status()) |
|
402 | 403 | |
|
403 | 404 | |
|
404 | 405 | return pyramid_app_with_cleanup |
|
405 | 406 | |
|
406 | 407 | |
|
407 | 408 | def sanitize_settings_and_apply_defaults(settings): |
|
408 | 409 | """ |
|
409 | 410 | Applies settings defaults and does all type conversion. |
|
410 | 411 | |
|
411 | 412 | We would move all settings parsing and preparation into this place, so that |
|
412 | 413 | we have only one place left which deals with this part. The remaining parts |
|
413 | 414 | of the application would start to rely fully on well prepared settings. |
|
414 | 415 | |
|
415 | 416 | This piece would later be split up per topic to avoid a big fat monster |
|
416 | 417 | function. |
|
417 | 418 | """ |
|
418 | 419 | |
|
419 | 420 | # Pyramid's mako renderer has to search in the templates folder so that the |
|
420 | 421 | # old templates still work. Ported and new templates are expected to use |
|
421 | 422 | # real asset specifications for the includes. |
|
422 | 423 | mako_directories = settings.setdefault('mako.directories', [ |
|
423 | 424 | # Base templates of the original Pylons application |
|
424 | 425 | 'rhodecode:templates', |
|
425 | 426 | ]) |
|
426 | 427 | log.debug( |
|
427 | 428 | "Using the following Mako template directories: %s", |
|
428 | 429 | mako_directories) |
|
429 | 430 | |
|
430 | 431 | # Default includes, possible to change as a user |
|
431 | 432 | pyramid_includes = settings.setdefault('pyramid.includes', [ |
|
432 | 433 | 'rhodecode.lib.middleware.request_wrapper', |
|
433 | 434 | ]) |
|
434 | 435 | log.debug( |
|
435 | 436 | "Using the following pyramid.includes: %s", |
|
436 | 437 | pyramid_includes) |
|
437 | 438 | |
|
438 | 439 | # TODO: johbo: Re-think this, usually the call to config.include |
|
439 | 440 | # should allow to pass in a prefix. |
|
440 | 441 | settings.setdefault('rhodecode.api.url', '/_admin/api') |
|
441 | 442 | |
|
442 | 443 | # Sanitize generic settings. |
|
443 | 444 | _list_setting(settings, 'default_encoding', 'UTF-8') |
|
444 | 445 | _bool_setting(settings, 'is_test', 'false') |
|
445 | 446 | _bool_setting(settings, 'gzip_responses', 'false') |
|
446 | 447 | |
|
447 | 448 | # Call split out functions that sanitize settings for each topic. |
|
448 | 449 | _sanitize_appenlight_settings(settings) |
|
449 | 450 | _sanitize_vcs_settings(settings) |
|
450 | 451 | |
|
451 | 452 | return settings |
|
452 | 453 | |
|
453 | 454 | |
|
454 | 455 | def _sanitize_appenlight_settings(settings): |
|
455 | 456 | _bool_setting(settings, 'appenlight', 'false') |
|
456 | 457 | |
|
457 | 458 | |
|
458 | 459 | def _sanitize_vcs_settings(settings): |
|
459 | 460 | """ |
|
460 | 461 | Applies settings defaults and does type conversion for all VCS related |
|
461 | 462 | settings. |
|
462 | 463 | """ |
|
463 | 464 | _string_setting(settings, 'vcs.svn.compatible_version', '') |
|
464 | 465 | _string_setting(settings, 'git_rev_filter', '--all') |
|
465 | 466 | _string_setting(settings, 'vcs.hooks.protocol', 'http') |
|
466 | 467 | _string_setting(settings, 'vcs.scm_app_implementation', 'http') |
|
467 | 468 | _string_setting(settings, 'vcs.server', '') |
|
468 | 469 | _string_setting(settings, 'vcs.server.log_level', 'debug') |
|
469 | 470 | _string_setting(settings, 'vcs.server.protocol', 'http') |
|
470 | 471 | _bool_setting(settings, 'startup.import_repos', 'false') |
|
471 | 472 | _bool_setting(settings, 'vcs.hooks.direct_calls', 'false') |
|
472 | 473 | _bool_setting(settings, 'vcs.server.enable', 'true') |
|
473 | 474 | _bool_setting(settings, 'vcs.start_server', 'false') |
|
474 | 475 | _list_setting(settings, 'vcs.backends', 'hg, git, svn') |
|
475 | 476 | _int_setting(settings, 'vcs.connection_timeout', 3600) |
|
476 | 477 | |
|
477 | 478 | # Support legacy values of vcs.scm_app_implementation. Legacy |
|
478 | 479 | # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http' |
|
479 | 480 | # which is now mapped to 'http'. |
|
480 | 481 | scm_app_impl = settings['vcs.scm_app_implementation'] |
|
481 | 482 | if scm_app_impl == 'rhodecode.lib.middleware.utils.scm_app_http': |
|
482 | 483 | settings['vcs.scm_app_implementation'] = 'http' |
|
483 | 484 | |
|
484 | 485 | |
|
485 | 486 | def _int_setting(settings, name, default): |
|
486 | 487 | settings[name] = int(settings.get(name, default)) |
|
487 | 488 | |
|
488 | 489 | |
|
489 | 490 | def _bool_setting(settings, name, default): |
|
490 | 491 | input = settings.get(name, default) |
|
491 | 492 | if isinstance(input, unicode): |
|
492 | 493 | input = input.encode('utf8') |
|
493 | 494 | settings[name] = asbool(input) |
|
494 | 495 | |
|
495 | 496 | |
|
496 | 497 | def _list_setting(settings, name, default): |
|
497 | 498 | raw_value = settings.get(name, default) |
|
498 | 499 | |
|
499 | 500 | old_separator = ',' |
|
500 | 501 | if old_separator in raw_value: |
|
501 | 502 | # If we get a comma separated list, pass it to our own function. |
|
502 | 503 | settings[name] = rhodecode_aslist(raw_value, sep=old_separator) |
|
503 | 504 | else: |
|
504 | 505 | # Otherwise we assume it uses pyramids space/newline separation. |
|
505 | 506 | settings[name] = aslist(raw_value) |
|
506 | 507 | |
|
507 | 508 | |
|
508 | 509 | def _string_setting(settings, name, default, lower=True): |
|
509 | 510 | value = settings.get(name, default) |
|
510 | 511 | if lower: |
|
511 | 512 | value = value.lower() |
|
512 | 513 | settings[name] = value |
@@ -1,1153 +1,1148 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Routes configuration |
|
23 | 23 | |
|
24 | 24 | The more specific and detailed routes should be defined first so they |
|
25 | 25 | may take precedent over the more generic routes. For more information |
|
26 | 26 | refer to the routes manual at http://routes.groovie.org/docs/ |
|
27 | 27 | |
|
28 | 28 | IMPORTANT: if you change any routing here, make sure to take a look at lib/base.py |
|
29 | 29 | and _route_name variable which uses some of stored naming here to do redirects. |
|
30 | 30 | """ |
|
31 | 31 | import os |
|
32 | 32 | import re |
|
33 | 33 | from routes import Mapper |
|
34 | 34 | |
|
35 | 35 | from rhodecode.config import routing_links |
|
36 | 36 | |
|
37 | 37 | # prefix for non repository related links needs to be prefixed with `/` |
|
38 | 38 | ADMIN_PREFIX = '/_admin' |
|
39 | 39 | STATIC_FILE_PREFIX = '/_static' |
|
40 | 40 | |
|
41 | 41 | # Default requirements for URL parts |
|
42 | 42 | URL_NAME_REQUIREMENTS = { |
|
43 | 43 | # group name can have a slash in them, but they must not end with a slash |
|
44 | 44 | 'group_name': r'.*?[^/]', |
|
45 | 45 | 'repo_group_name': r'.*?[^/]', |
|
46 | 46 | # repo names can have a slash in them, but they must not end with a slash |
|
47 | 47 | 'repo_name': r'.*?[^/]', |
|
48 | 48 | # file path eats up everything at the end |
|
49 | 49 | 'f_path': r'.*', |
|
50 | 50 | # reference types |
|
51 | 51 | 'source_ref_type': '(branch|book|tag|rev|\%\(source_ref_type\)s)', |
|
52 | 52 | 'target_ref_type': '(branch|book|tag|rev|\%\(target_ref_type\)s)', |
|
53 | 53 | } |
|
54 | 54 | |
|
55 | 55 | |
|
56 | 56 | def add_route_requirements(route_path, requirements): |
|
57 | 57 | """ |
|
58 | 58 | Adds regex requirements to pyramid routes using a mapping dict |
|
59 | 59 | |
|
60 | 60 | >>> add_route_requirements('/{action}/{id}', {'id': r'\d+'}) |
|
61 | 61 | '/{action}/{id:\d+}' |
|
62 | 62 | |
|
63 | 63 | """ |
|
64 | 64 | for key, regex in requirements.items(): |
|
65 | 65 | route_path = route_path.replace('{%s}' % key, '{%s:%s}' % (key, regex)) |
|
66 | 66 | return route_path |
|
67 | 67 | |
|
68 | 68 | |
|
69 | 69 | class JSRoutesMapper(Mapper): |
|
70 | 70 | """ |
|
71 | 71 | Wrapper for routes.Mapper to make pyroutes compatible url definitions |
|
72 | 72 | """ |
|
73 | 73 | _named_route_regex = re.compile(r'^[a-z-_0-9A-Z]+$') |
|
74 | 74 | _argument_prog = re.compile('\{(.*?)\}|:\((.*)\)') |
|
75 | 75 | def __init__(self, *args, **kw): |
|
76 | 76 | super(JSRoutesMapper, self).__init__(*args, **kw) |
|
77 | 77 | self._jsroutes = [] |
|
78 | 78 | |
|
79 | 79 | def connect(self, *args, **kw): |
|
80 | 80 | """ |
|
81 | 81 | Wrapper for connect to take an extra argument jsroute=True |
|
82 | 82 | |
|
83 | 83 | :param jsroute: boolean, if True will add the route to the pyroutes list |
|
84 | 84 | """ |
|
85 | 85 | if kw.pop('jsroute', False): |
|
86 | 86 | if not self._named_route_regex.match(args[0]): |
|
87 | 87 | raise Exception('only named routes can be added to pyroutes') |
|
88 | 88 | self._jsroutes.append(args[0]) |
|
89 | 89 | |
|
90 | 90 | super(JSRoutesMapper, self).connect(*args, **kw) |
|
91 | 91 | |
|
92 | 92 | def _extract_route_information(self, route): |
|
93 | 93 | """ |
|
94 | 94 | Convert a route into tuple(name, path, args), eg: |
|
95 | 95 | ('show_user', '/profile/%(username)s', ['username']) |
|
96 | 96 | """ |
|
97 | 97 | routepath = route.routepath |
|
98 | 98 | def replace(matchobj): |
|
99 | 99 | if matchobj.group(1): |
|
100 | 100 | return "%%(%s)s" % matchobj.group(1).split(':')[0] |
|
101 | 101 | else: |
|
102 | 102 | return "%%(%s)s" % matchobj.group(2) |
|
103 | 103 | |
|
104 | 104 | routepath = self._argument_prog.sub(replace, routepath) |
|
105 | 105 | return ( |
|
106 | 106 | route.name, |
|
107 | 107 | routepath, |
|
108 | 108 | [(arg[0].split(':')[0] if arg[0] != '' else arg[1]) |
|
109 | 109 | for arg in self._argument_prog.findall(route.routepath)] |
|
110 | 110 | ) |
|
111 | 111 | |
|
112 | 112 | def jsroutes(self): |
|
113 | 113 | """ |
|
114 | 114 | Return a list of pyroutes.js compatible routes |
|
115 | 115 | """ |
|
116 | 116 | for route_name in self._jsroutes: |
|
117 | 117 | yield self._extract_route_information(self._routenames[route_name]) |
|
118 | 118 | |
|
119 | 119 | |
|
120 | 120 | def make_map(config): |
|
121 | 121 | """Create, configure and return the routes Mapper""" |
|
122 | 122 | rmap = JSRoutesMapper(directory=config['pylons.paths']['controllers'], |
|
123 | 123 | always_scan=config['debug']) |
|
124 | 124 | rmap.minimization = False |
|
125 | 125 | rmap.explicit = False |
|
126 | 126 | |
|
127 | 127 | from rhodecode.lib.utils2 import str2bool |
|
128 | 128 | from rhodecode.model import repo, repo_group |
|
129 | 129 | |
|
130 | 130 | def check_repo(environ, match_dict): |
|
131 | 131 | """ |
|
132 | 132 | check for valid repository for proper 404 handling |
|
133 | 133 | |
|
134 | 134 | :param environ: |
|
135 | 135 | :param match_dict: |
|
136 | 136 | """ |
|
137 | 137 | repo_name = match_dict.get('repo_name') |
|
138 | 138 | |
|
139 | 139 | if match_dict.get('f_path'): |
|
140 | 140 | # fix for multiple initial slashes that causes errors |
|
141 | 141 | match_dict['f_path'] = match_dict['f_path'].lstrip('/') |
|
142 | 142 | repo_model = repo.RepoModel() |
|
143 | 143 | by_name_match = repo_model.get_by_repo_name(repo_name) |
|
144 | 144 | # if we match quickly from database, short circuit the operation, |
|
145 | 145 | # and validate repo based on the type. |
|
146 | 146 | if by_name_match: |
|
147 | 147 | return True |
|
148 | 148 | |
|
149 | 149 | by_id_match = repo_model.get_repo_by_id(repo_name) |
|
150 | 150 | if by_id_match: |
|
151 | 151 | repo_name = by_id_match.repo_name |
|
152 | 152 | match_dict['repo_name'] = repo_name |
|
153 | 153 | return True |
|
154 | 154 | |
|
155 | 155 | return False |
|
156 | 156 | |
|
157 | 157 | def check_group(environ, match_dict): |
|
158 | 158 | """ |
|
159 | 159 | check for valid repository group path for proper 404 handling |
|
160 | 160 | |
|
161 | 161 | :param environ: |
|
162 | 162 | :param match_dict: |
|
163 | 163 | """ |
|
164 | 164 | repo_group_name = match_dict.get('group_name') |
|
165 | 165 | repo_group_model = repo_group.RepoGroupModel() |
|
166 | 166 | by_name_match = repo_group_model.get_by_group_name(repo_group_name) |
|
167 | 167 | if by_name_match: |
|
168 | 168 | return True |
|
169 | 169 | |
|
170 | 170 | return False |
|
171 | 171 | |
|
172 | 172 | def check_user_group(environ, match_dict): |
|
173 | 173 | """ |
|
174 | 174 | check for valid user group for proper 404 handling |
|
175 | 175 | |
|
176 | 176 | :param environ: |
|
177 | 177 | :param match_dict: |
|
178 | 178 | """ |
|
179 | 179 | return True |
|
180 | 180 | |
|
181 | 181 | def check_int(environ, match_dict): |
|
182 | 182 | return match_dict.get('id').isdigit() |
|
183 | 183 | |
|
184 | 184 | |
|
185 | 185 | #========================================================================== |
|
186 | 186 | # CUSTOM ROUTES HERE |
|
187 | 187 | #========================================================================== |
|
188 | 188 | |
|
189 | 189 | # MAIN PAGE |
|
190 | 190 | rmap.connect('home', '/', controller='home', action='index', jsroute=True) |
|
191 | 191 | rmap.connect('goto_switcher_data', '/_goto_data', controller='home', |
|
192 | 192 | action='goto_switcher_data') |
|
193 | 193 | rmap.connect('repo_list_data', '/_repos', controller='home', |
|
194 | 194 | action='repo_list_data') |
|
195 | 195 | |
|
196 | rmap.connect('user_autocomplete_data', '/_users', controller='home', | |
|
197 | action='user_autocomplete_data', jsroute=True) | |
|
198 | rmap.connect('user_group_autocomplete_data', '/_user_groups', controller='home', | |
|
199 | action='user_group_autocomplete_data', jsroute=True) | |
|
200 | ||
|
201 | 196 | # TODO: johbo: Static links, to be replaced by our redirection mechanism |
|
202 | 197 | rmap.connect('rst_help', |
|
203 | 198 | 'http://docutils.sourceforge.net/docs/user/rst/quickref.html', |
|
204 | 199 | _static=True) |
|
205 | 200 | rmap.connect('markdown_help', |
|
206 | 201 | 'http://daringfireball.net/projects/markdown/syntax', |
|
207 | 202 | _static=True) |
|
208 | 203 | rmap.connect('rhodecode_official', 'https://rhodecode.com', _static=True) |
|
209 | 204 | rmap.connect('rhodecode_support', 'https://rhodecode.com/help/', _static=True) |
|
210 | 205 | rmap.connect('rhodecode_translations', 'https://rhodecode.com/translate/enterprise', _static=True) |
|
211 | 206 | # TODO: anderson - making this a static link since redirect won't play |
|
212 | 207 | # nice with POST requests |
|
213 | 208 | rmap.connect('enterprise_license_convert_from_old', |
|
214 | 209 | 'https://rhodecode.com/u/license-upgrade', |
|
215 | 210 | _static=True) |
|
216 | 211 | |
|
217 | 212 | routing_links.connect_redirection_links(rmap) |
|
218 | 213 | |
|
219 | 214 | rmap.connect('ping', '%s/ping' % (ADMIN_PREFIX,), controller='home', action='ping') |
|
220 | 215 | rmap.connect('error_test', '%s/error_test' % (ADMIN_PREFIX,), controller='home', action='error_test') |
|
221 | 216 | |
|
222 | 217 | # ADMIN REPOSITORY ROUTES |
|
223 | 218 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
224 | 219 | controller='admin/repos') as m: |
|
225 | 220 | m.connect('repos', '/repos', |
|
226 | 221 | action='create', conditions={'method': ['POST']}) |
|
227 | 222 | m.connect('repos', '/repos', |
|
228 | 223 | action='index', conditions={'method': ['GET']}) |
|
229 | 224 | m.connect('new_repo', '/create_repository', jsroute=True, |
|
230 | 225 | action='create_repository', conditions={'method': ['GET']}) |
|
231 | 226 | m.connect('/repos/{repo_name}', |
|
232 | 227 | action='update', conditions={'method': ['PUT'], |
|
233 | 228 | 'function': check_repo}, |
|
234 | 229 | requirements=URL_NAME_REQUIREMENTS) |
|
235 | 230 | m.connect('delete_repo', '/repos/{repo_name}', |
|
236 | 231 | action='delete', conditions={'method': ['DELETE']}, |
|
237 | 232 | requirements=URL_NAME_REQUIREMENTS) |
|
238 | 233 | m.connect('repo', '/repos/{repo_name}', |
|
239 | 234 | action='show', conditions={'method': ['GET'], |
|
240 | 235 | 'function': check_repo}, |
|
241 | 236 | requirements=URL_NAME_REQUIREMENTS) |
|
242 | 237 | |
|
243 | 238 | # ADMIN REPOSITORY GROUPS ROUTES |
|
244 | 239 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
245 | 240 | controller='admin/repo_groups') as m: |
|
246 | 241 | m.connect('repo_groups', '/repo_groups', |
|
247 | 242 | action='create', conditions={'method': ['POST']}) |
|
248 | 243 | m.connect('repo_groups', '/repo_groups', |
|
249 | 244 | action='index', conditions={'method': ['GET']}) |
|
250 | 245 | m.connect('new_repo_group', '/repo_groups/new', |
|
251 | 246 | action='new', conditions={'method': ['GET']}) |
|
252 | 247 | m.connect('update_repo_group', '/repo_groups/{group_name}', |
|
253 | 248 | action='update', conditions={'method': ['PUT'], |
|
254 | 249 | 'function': check_group}, |
|
255 | 250 | requirements=URL_NAME_REQUIREMENTS) |
|
256 | 251 | |
|
257 | 252 | # EXTRAS REPO GROUP ROUTES |
|
258 | 253 | m.connect('edit_repo_group', '/repo_groups/{group_name}/edit', |
|
259 | 254 | action='edit', |
|
260 | 255 | conditions={'method': ['GET'], 'function': check_group}, |
|
261 | 256 | requirements=URL_NAME_REQUIREMENTS) |
|
262 | 257 | m.connect('edit_repo_group', '/repo_groups/{group_name}/edit', |
|
263 | 258 | action='edit', |
|
264 | 259 | conditions={'method': ['PUT'], 'function': check_group}, |
|
265 | 260 | requirements=URL_NAME_REQUIREMENTS) |
|
266 | 261 | |
|
267 | 262 | m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced', |
|
268 | 263 | action='edit_repo_group_advanced', |
|
269 | 264 | conditions={'method': ['GET'], 'function': check_group}, |
|
270 | 265 | requirements=URL_NAME_REQUIREMENTS) |
|
271 | 266 | m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced', |
|
272 | 267 | action='edit_repo_group_advanced', |
|
273 | 268 | conditions={'method': ['PUT'], 'function': check_group}, |
|
274 | 269 | requirements=URL_NAME_REQUIREMENTS) |
|
275 | 270 | |
|
276 | 271 | m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions', |
|
277 | 272 | action='edit_repo_group_perms', |
|
278 | 273 | conditions={'method': ['GET'], 'function': check_group}, |
|
279 | 274 | requirements=URL_NAME_REQUIREMENTS) |
|
280 | 275 | m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions', |
|
281 | 276 | action='update_perms', |
|
282 | 277 | conditions={'method': ['PUT'], 'function': check_group}, |
|
283 | 278 | requirements=URL_NAME_REQUIREMENTS) |
|
284 | 279 | |
|
285 | 280 | m.connect('delete_repo_group', '/repo_groups/{group_name}', |
|
286 | 281 | action='delete', conditions={'method': ['DELETE'], |
|
287 | 282 | 'function': check_group}, |
|
288 | 283 | requirements=URL_NAME_REQUIREMENTS) |
|
289 | 284 | |
|
290 | 285 | # ADMIN USER ROUTES |
|
291 | 286 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
292 | 287 | controller='admin/users') as m: |
|
293 | 288 | m.connect('users', '/users', |
|
294 | 289 | action='create', conditions={'method': ['POST']}) |
|
295 | 290 | m.connect('new_user', '/users/new', |
|
296 | 291 | action='new', conditions={'method': ['GET']}) |
|
297 | 292 | m.connect('update_user', '/users/{user_id}', |
|
298 | 293 | action='update', conditions={'method': ['PUT']}) |
|
299 | 294 | m.connect('delete_user', '/users/{user_id}', |
|
300 | 295 | action='delete', conditions={'method': ['DELETE']}) |
|
301 | 296 | m.connect('edit_user', '/users/{user_id}/edit', |
|
302 | 297 | action='edit', conditions={'method': ['GET']}, jsroute=True) |
|
303 | 298 | m.connect('user', '/users/{user_id}', |
|
304 | 299 | action='show', conditions={'method': ['GET']}) |
|
305 | 300 | m.connect('force_password_reset_user', '/users/{user_id}/password_reset', |
|
306 | 301 | action='reset_password', conditions={'method': ['POST']}) |
|
307 | 302 | m.connect('create_personal_repo_group', '/users/{user_id}/create_repo_group', |
|
308 | 303 | action='create_personal_repo_group', conditions={'method': ['POST']}) |
|
309 | 304 | |
|
310 | 305 | # EXTRAS USER ROUTES |
|
311 | 306 | m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced', |
|
312 | 307 | action='edit_advanced', conditions={'method': ['GET']}) |
|
313 | 308 | m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced', |
|
314 | 309 | action='update_advanced', conditions={'method': ['PUT']}) |
|
315 | 310 | |
|
316 | 311 | m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions', |
|
317 | 312 | action='edit_global_perms', conditions={'method': ['GET']}) |
|
318 | 313 | m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions', |
|
319 | 314 | action='update_global_perms', conditions={'method': ['PUT']}) |
|
320 | 315 | |
|
321 | 316 | m.connect('edit_user_perms_summary', '/users/{user_id}/edit/permissions_summary', |
|
322 | 317 | action='edit_perms_summary', conditions={'method': ['GET']}) |
|
323 | 318 | |
|
324 | 319 | m.connect('edit_user_emails', '/users/{user_id}/edit/emails', |
|
325 | 320 | action='edit_emails', conditions={'method': ['GET']}) |
|
326 | 321 | m.connect('edit_user_emails', '/users/{user_id}/edit/emails', |
|
327 | 322 | action='add_email', conditions={'method': ['PUT']}) |
|
328 | 323 | m.connect('edit_user_emails', '/users/{user_id}/edit/emails', |
|
329 | 324 | action='delete_email', conditions={'method': ['DELETE']}) |
|
330 | 325 | |
|
331 | 326 | m.connect('edit_user_ips', '/users/{user_id}/edit/ips', |
|
332 | 327 | action='edit_ips', conditions={'method': ['GET']}) |
|
333 | 328 | m.connect('edit_user_ips', '/users/{user_id}/edit/ips', |
|
334 | 329 | action='add_ip', conditions={'method': ['PUT']}) |
|
335 | 330 | m.connect('edit_user_ips', '/users/{user_id}/edit/ips', |
|
336 | 331 | action='delete_ip', conditions={'method': ['DELETE']}) |
|
337 | 332 | |
|
338 | 333 | # ADMIN USER GROUPS REST ROUTES |
|
339 | 334 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
340 | 335 | controller='admin/user_groups') as m: |
|
341 | 336 | m.connect('users_groups', '/user_groups', |
|
342 | 337 | action='create', conditions={'method': ['POST']}) |
|
343 | 338 | m.connect('users_groups', '/user_groups', |
|
344 | 339 | action='index', conditions={'method': ['GET']}) |
|
345 | 340 | m.connect('new_users_group', '/user_groups/new', |
|
346 | 341 | action='new', conditions={'method': ['GET']}) |
|
347 | 342 | m.connect('update_users_group', '/user_groups/{user_group_id}', |
|
348 | 343 | action='update', conditions={'method': ['PUT']}) |
|
349 | 344 | m.connect('delete_users_group', '/user_groups/{user_group_id}', |
|
350 | 345 | action='delete', conditions={'method': ['DELETE']}) |
|
351 | 346 | m.connect('edit_users_group', '/user_groups/{user_group_id}/edit', |
|
352 | 347 | action='edit', conditions={'method': ['GET']}, |
|
353 | 348 | function=check_user_group) |
|
354 | 349 | |
|
355 | 350 | # EXTRAS USER GROUP ROUTES |
|
356 | 351 | m.connect('edit_user_group_global_perms', |
|
357 | 352 | '/user_groups/{user_group_id}/edit/global_permissions', |
|
358 | 353 | action='edit_global_perms', conditions={'method': ['GET']}) |
|
359 | 354 | m.connect('edit_user_group_global_perms', |
|
360 | 355 | '/user_groups/{user_group_id}/edit/global_permissions', |
|
361 | 356 | action='update_global_perms', conditions={'method': ['PUT']}) |
|
362 | 357 | m.connect('edit_user_group_perms_summary', |
|
363 | 358 | '/user_groups/{user_group_id}/edit/permissions_summary', |
|
364 | 359 | action='edit_perms_summary', conditions={'method': ['GET']}) |
|
365 | 360 | |
|
366 | 361 | m.connect('edit_user_group_perms', |
|
367 | 362 | '/user_groups/{user_group_id}/edit/permissions', |
|
368 | 363 | action='edit_perms', conditions={'method': ['GET']}) |
|
369 | 364 | m.connect('edit_user_group_perms', |
|
370 | 365 | '/user_groups/{user_group_id}/edit/permissions', |
|
371 | 366 | action='update_perms', conditions={'method': ['PUT']}) |
|
372 | 367 | |
|
373 | 368 | m.connect('edit_user_group_advanced', |
|
374 | 369 | '/user_groups/{user_group_id}/edit/advanced', |
|
375 | 370 | action='edit_advanced', conditions={'method': ['GET']}) |
|
376 | 371 | |
|
377 | 372 | m.connect('edit_user_group_advanced_sync', |
|
378 | 373 | '/user_groups/{user_group_id}/edit/advanced/sync', |
|
379 | 374 | action='edit_advanced_set_synchronization', conditions={'method': ['POST']}) |
|
380 | 375 | |
|
381 | 376 | m.connect('edit_user_group_members', |
|
382 | 377 | '/user_groups/{user_group_id}/edit/members', jsroute=True, |
|
383 | 378 | action='user_group_members', conditions={'method': ['GET']}) |
|
384 | 379 | |
|
385 | 380 | # ADMIN PERMISSIONS ROUTES |
|
386 | 381 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
387 | 382 | controller='admin/permissions') as m: |
|
388 | 383 | m.connect('admin_permissions_application', '/permissions/application', |
|
389 | 384 | action='permission_application_update', conditions={'method': ['POST']}) |
|
390 | 385 | m.connect('admin_permissions_application', '/permissions/application', |
|
391 | 386 | action='permission_application', conditions={'method': ['GET']}) |
|
392 | 387 | |
|
393 | 388 | m.connect('admin_permissions_global', '/permissions/global', |
|
394 | 389 | action='permission_global_update', conditions={'method': ['POST']}) |
|
395 | 390 | m.connect('admin_permissions_global', '/permissions/global', |
|
396 | 391 | action='permission_global', conditions={'method': ['GET']}) |
|
397 | 392 | |
|
398 | 393 | m.connect('admin_permissions_object', '/permissions/object', |
|
399 | 394 | action='permission_objects_update', conditions={'method': ['POST']}) |
|
400 | 395 | m.connect('admin_permissions_object', '/permissions/object', |
|
401 | 396 | action='permission_objects', conditions={'method': ['GET']}) |
|
402 | 397 | |
|
403 | 398 | m.connect('admin_permissions_ips', '/permissions/ips', |
|
404 | 399 | action='permission_ips', conditions={'method': ['POST']}) |
|
405 | 400 | m.connect('admin_permissions_ips', '/permissions/ips', |
|
406 | 401 | action='permission_ips', conditions={'method': ['GET']}) |
|
407 | 402 | |
|
408 | 403 | m.connect('admin_permissions_overview', '/permissions/overview', |
|
409 | 404 | action='permission_perms', conditions={'method': ['GET']}) |
|
410 | 405 | |
|
411 | 406 | # ADMIN DEFAULTS REST ROUTES |
|
412 | 407 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
413 | 408 | controller='admin/defaults') as m: |
|
414 | 409 | m.connect('admin_defaults_repositories', '/defaults/repositories', |
|
415 | 410 | action='update_repository_defaults', conditions={'method': ['POST']}) |
|
416 | 411 | m.connect('admin_defaults_repositories', '/defaults/repositories', |
|
417 | 412 | action='index', conditions={'method': ['GET']}) |
|
418 | 413 | |
|
419 | 414 | # ADMIN DEBUG STYLE ROUTES |
|
420 | 415 | if str2bool(config.get('debug_style')): |
|
421 | 416 | with rmap.submapper(path_prefix=ADMIN_PREFIX + '/debug_style', |
|
422 | 417 | controller='debug_style') as m: |
|
423 | 418 | m.connect('debug_style_home', '', |
|
424 | 419 | action='index', conditions={'method': ['GET']}) |
|
425 | 420 | m.connect('debug_style_template', '/t/{t_path}', |
|
426 | 421 | action='template', conditions={'method': ['GET']}) |
|
427 | 422 | |
|
428 | 423 | # ADMIN SETTINGS ROUTES |
|
429 | 424 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
430 | 425 | controller='admin/settings') as m: |
|
431 | 426 | |
|
432 | 427 | # default |
|
433 | 428 | m.connect('admin_settings', '/settings', |
|
434 | 429 | action='settings_global_update', |
|
435 | 430 | conditions={'method': ['POST']}) |
|
436 | 431 | m.connect('admin_settings', '/settings', |
|
437 | 432 | action='settings_global', conditions={'method': ['GET']}) |
|
438 | 433 | |
|
439 | 434 | m.connect('admin_settings_vcs', '/settings/vcs', |
|
440 | 435 | action='settings_vcs_update', |
|
441 | 436 | conditions={'method': ['POST']}) |
|
442 | 437 | m.connect('admin_settings_vcs', '/settings/vcs', |
|
443 | 438 | action='settings_vcs', |
|
444 | 439 | conditions={'method': ['GET']}) |
|
445 | 440 | m.connect('admin_settings_vcs', '/settings/vcs', |
|
446 | 441 | action='delete_svn_pattern', |
|
447 | 442 | conditions={'method': ['DELETE']}) |
|
448 | 443 | |
|
449 | 444 | m.connect('admin_settings_mapping', '/settings/mapping', |
|
450 | 445 | action='settings_mapping_update', |
|
451 | 446 | conditions={'method': ['POST']}) |
|
452 | 447 | m.connect('admin_settings_mapping', '/settings/mapping', |
|
453 | 448 | action='settings_mapping', conditions={'method': ['GET']}) |
|
454 | 449 | |
|
455 | 450 | m.connect('admin_settings_global', '/settings/global', |
|
456 | 451 | action='settings_global_update', |
|
457 | 452 | conditions={'method': ['POST']}) |
|
458 | 453 | m.connect('admin_settings_global', '/settings/global', |
|
459 | 454 | action='settings_global', conditions={'method': ['GET']}) |
|
460 | 455 | |
|
461 | 456 | m.connect('admin_settings_visual', '/settings/visual', |
|
462 | 457 | action='settings_visual_update', |
|
463 | 458 | conditions={'method': ['POST']}) |
|
464 | 459 | m.connect('admin_settings_visual', '/settings/visual', |
|
465 | 460 | action='settings_visual', conditions={'method': ['GET']}) |
|
466 | 461 | |
|
467 | 462 | m.connect('admin_settings_issuetracker', |
|
468 | 463 | '/settings/issue-tracker', action='settings_issuetracker', |
|
469 | 464 | conditions={'method': ['GET']}) |
|
470 | 465 | m.connect('admin_settings_issuetracker_save', |
|
471 | 466 | '/settings/issue-tracker/save', |
|
472 | 467 | action='settings_issuetracker_save', |
|
473 | 468 | conditions={'method': ['POST']}) |
|
474 | 469 | m.connect('admin_issuetracker_test', '/settings/issue-tracker/test', |
|
475 | 470 | action='settings_issuetracker_test', |
|
476 | 471 | conditions={'method': ['POST']}) |
|
477 | 472 | m.connect('admin_issuetracker_delete', |
|
478 | 473 | '/settings/issue-tracker/delete', |
|
479 | 474 | action='settings_issuetracker_delete', |
|
480 | 475 | conditions={'method': ['DELETE']}) |
|
481 | 476 | |
|
482 | 477 | m.connect('admin_settings_email', '/settings/email', |
|
483 | 478 | action='settings_email_update', |
|
484 | 479 | conditions={'method': ['POST']}) |
|
485 | 480 | m.connect('admin_settings_email', '/settings/email', |
|
486 | 481 | action='settings_email', conditions={'method': ['GET']}) |
|
487 | 482 | |
|
488 | 483 | m.connect('admin_settings_hooks', '/settings/hooks', |
|
489 | 484 | action='settings_hooks_update', |
|
490 | 485 | conditions={'method': ['POST', 'DELETE']}) |
|
491 | 486 | m.connect('admin_settings_hooks', '/settings/hooks', |
|
492 | 487 | action='settings_hooks', conditions={'method': ['GET']}) |
|
493 | 488 | |
|
494 | 489 | m.connect('admin_settings_search', '/settings/search', |
|
495 | 490 | action='settings_search', conditions={'method': ['GET']}) |
|
496 | 491 | |
|
497 | 492 | m.connect('admin_settings_supervisor', '/settings/supervisor', |
|
498 | 493 | action='settings_supervisor', conditions={'method': ['GET']}) |
|
499 | 494 | m.connect('admin_settings_supervisor_log', '/settings/supervisor/{procid}/log', |
|
500 | 495 | action='settings_supervisor_log', conditions={'method': ['GET']}) |
|
501 | 496 | |
|
502 | 497 | m.connect('admin_settings_labs', '/settings/labs', |
|
503 | 498 | action='settings_labs_update', |
|
504 | 499 | conditions={'method': ['POST']}) |
|
505 | 500 | m.connect('admin_settings_labs', '/settings/labs', |
|
506 | 501 | action='settings_labs', conditions={'method': ['GET']}) |
|
507 | 502 | |
|
508 | 503 | # ADMIN MY ACCOUNT |
|
509 | 504 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
510 | 505 | controller='admin/my_account') as m: |
|
511 | 506 | |
|
512 | 507 | m.connect('my_account_edit', '/my_account/edit', |
|
513 | 508 | action='my_account_edit', conditions={'method': ['GET']}) |
|
514 | 509 | m.connect('my_account', '/my_account/update', |
|
515 | 510 | action='my_account_update', conditions={'method': ['POST']}) |
|
516 | 511 | |
|
517 | 512 | # NOTE(marcink): this needs to be kept for password force flag to be |
|
518 | 513 | # handler, remove after migration to pyramid |
|
519 | 514 | m.connect('my_account_password', '/my_account/password', |
|
520 | 515 | action='my_account_password', conditions={'method': ['GET']}) |
|
521 | 516 | |
|
522 | 517 | m.connect('my_account_repos', '/my_account/repos', |
|
523 | 518 | action='my_account_repos', conditions={'method': ['GET']}) |
|
524 | 519 | |
|
525 | 520 | m.connect('my_account_watched', '/my_account/watched', |
|
526 | 521 | action='my_account_watched', conditions={'method': ['GET']}) |
|
527 | 522 | |
|
528 | 523 | m.connect('my_account_pullrequests', '/my_account/pull_requests', |
|
529 | 524 | action='my_account_pullrequests', conditions={'method': ['GET']}) |
|
530 | 525 | |
|
531 | 526 | m.connect('my_account_perms', '/my_account/perms', |
|
532 | 527 | action='my_account_perms', conditions={'method': ['GET']}) |
|
533 | 528 | |
|
534 | 529 | m.connect('my_account_emails', '/my_account/emails', |
|
535 | 530 | action='my_account_emails', conditions={'method': ['GET']}) |
|
536 | 531 | m.connect('my_account_emails', '/my_account/emails', |
|
537 | 532 | action='my_account_emails_add', conditions={'method': ['POST']}) |
|
538 | 533 | m.connect('my_account_emails', '/my_account/emails', |
|
539 | 534 | action='my_account_emails_delete', conditions={'method': ['DELETE']}) |
|
540 | 535 | |
|
541 | 536 | m.connect('my_account_notifications', '/my_account/notifications', |
|
542 | 537 | action='my_notifications', |
|
543 | 538 | conditions={'method': ['GET']}) |
|
544 | 539 | m.connect('my_account_notifications_toggle_visibility', |
|
545 | 540 | '/my_account/toggle_visibility', |
|
546 | 541 | action='my_notifications_toggle_visibility', |
|
547 | 542 | conditions={'method': ['POST']}) |
|
548 | 543 | m.connect('my_account_notifications_test_channelstream', |
|
549 | 544 | '/my_account/test_channelstream', |
|
550 | 545 | action='my_account_notifications_test_channelstream', |
|
551 | 546 | conditions={'method': ['POST']}) |
|
552 | 547 | |
|
553 | 548 | # NOTIFICATION REST ROUTES |
|
554 | 549 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
555 | 550 | controller='admin/notifications') as m: |
|
556 | 551 | m.connect('notifications', '/notifications', |
|
557 | 552 | action='index', conditions={'method': ['GET']}) |
|
558 | 553 | m.connect('notifications_mark_all_read', '/notifications/mark_all_read', |
|
559 | 554 | action='mark_all_read', conditions={'method': ['POST']}) |
|
560 | 555 | m.connect('/notifications/{notification_id}', |
|
561 | 556 | action='update', conditions={'method': ['PUT']}) |
|
562 | 557 | m.connect('/notifications/{notification_id}', |
|
563 | 558 | action='delete', conditions={'method': ['DELETE']}) |
|
564 | 559 | m.connect('notification', '/notifications/{notification_id}', |
|
565 | 560 | action='show', conditions={'method': ['GET']}) |
|
566 | 561 | |
|
567 | 562 | # ADMIN GIST |
|
568 | 563 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
569 | 564 | controller='admin/gists') as m: |
|
570 | 565 | m.connect('gists', '/gists', |
|
571 | 566 | action='create', conditions={'method': ['POST']}) |
|
572 | 567 | m.connect('gists', '/gists', jsroute=True, |
|
573 | 568 | action='index', conditions={'method': ['GET']}) |
|
574 | 569 | m.connect('new_gist', '/gists/new', jsroute=True, |
|
575 | 570 | action='new', conditions={'method': ['GET']}) |
|
576 | 571 | |
|
577 | 572 | m.connect('/gists/{gist_id}', |
|
578 | 573 | action='delete', conditions={'method': ['DELETE']}) |
|
579 | 574 | m.connect('edit_gist', '/gists/{gist_id}/edit', |
|
580 | 575 | action='edit_form', conditions={'method': ['GET']}) |
|
581 | 576 | m.connect('edit_gist', '/gists/{gist_id}/edit', |
|
582 | 577 | action='edit', conditions={'method': ['POST']}) |
|
583 | 578 | m.connect( |
|
584 | 579 | 'edit_gist_check_revision', '/gists/{gist_id}/edit/check_revision', |
|
585 | 580 | action='check_revision', conditions={'method': ['GET']}) |
|
586 | 581 | |
|
587 | 582 | m.connect('gist', '/gists/{gist_id}', |
|
588 | 583 | action='show', conditions={'method': ['GET']}) |
|
589 | 584 | m.connect('gist_rev', '/gists/{gist_id}/{revision}', |
|
590 | 585 | revision='tip', |
|
591 | 586 | action='show', conditions={'method': ['GET']}) |
|
592 | 587 | m.connect('formatted_gist', '/gists/{gist_id}/{revision}/{format}', |
|
593 | 588 | revision='tip', |
|
594 | 589 | action='show', conditions={'method': ['GET']}) |
|
595 | 590 | m.connect('formatted_gist_file', '/gists/{gist_id}/{revision}/{format}/{f_path}', |
|
596 | 591 | revision='tip', |
|
597 | 592 | action='show', conditions={'method': ['GET']}, |
|
598 | 593 | requirements=URL_NAME_REQUIREMENTS) |
|
599 | 594 | |
|
600 | 595 | # ADMIN MAIN PAGES |
|
601 | 596 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
602 | 597 | controller='admin/admin') as m: |
|
603 | 598 | m.connect('admin_home', '', action='index') |
|
604 | 599 | m.connect('admin_add_repo', '/add_repo/{new_repo:[a-z0-9\. _-]*}', |
|
605 | 600 | action='add_repo') |
|
606 | 601 | m.connect( |
|
607 | 602 | 'pull_requests_global_0', '/pull_requests/{pull_request_id:[0-9]+}', |
|
608 | 603 | action='pull_requests') |
|
609 | 604 | m.connect( |
|
610 | 605 | 'pull_requests_global_1', '/pull-requests/{pull_request_id:[0-9]+}', |
|
611 | 606 | action='pull_requests') |
|
612 | 607 | m.connect( |
|
613 | 608 | 'pull_requests_global', '/pull-request/{pull_request_id:[0-9]+}', |
|
614 | 609 | action='pull_requests') |
|
615 | 610 | |
|
616 | 611 | # USER JOURNAL |
|
617 | 612 | rmap.connect('journal', '%s/journal' % (ADMIN_PREFIX,), |
|
618 | 613 | controller='journal', action='index') |
|
619 | 614 | rmap.connect('journal_rss', '%s/journal/rss' % (ADMIN_PREFIX,), |
|
620 | 615 | controller='journal', action='journal_rss') |
|
621 | 616 | rmap.connect('journal_atom', '%s/journal/atom' % (ADMIN_PREFIX,), |
|
622 | 617 | controller='journal', action='journal_atom') |
|
623 | 618 | |
|
624 | 619 | rmap.connect('public_journal', '%s/public_journal' % (ADMIN_PREFIX,), |
|
625 | 620 | controller='journal', action='public_journal') |
|
626 | 621 | |
|
627 | 622 | rmap.connect('public_journal_rss', '%s/public_journal/rss' % (ADMIN_PREFIX,), |
|
628 | 623 | controller='journal', action='public_journal_rss') |
|
629 | 624 | |
|
630 | 625 | rmap.connect('public_journal_rss_old', '%s/public_journal_rss' % (ADMIN_PREFIX,), |
|
631 | 626 | controller='journal', action='public_journal_rss') |
|
632 | 627 | |
|
633 | 628 | rmap.connect('public_journal_atom', |
|
634 | 629 | '%s/public_journal/atom' % (ADMIN_PREFIX,), controller='journal', |
|
635 | 630 | action='public_journal_atom') |
|
636 | 631 | |
|
637 | 632 | rmap.connect('public_journal_atom_old', |
|
638 | 633 | '%s/public_journal_atom' % (ADMIN_PREFIX,), controller='journal', |
|
639 | 634 | action='public_journal_atom') |
|
640 | 635 | |
|
641 | 636 | rmap.connect('toggle_following', '%s/toggle_following' % (ADMIN_PREFIX,), |
|
642 | 637 | controller='journal', action='toggle_following', jsroute=True, |
|
643 | 638 | conditions={'method': ['POST']}) |
|
644 | 639 | |
|
645 | 640 | # FULL TEXT SEARCH |
|
646 | 641 | rmap.connect('search', '%s/search' % (ADMIN_PREFIX,), |
|
647 | 642 | controller='search') |
|
648 | 643 | rmap.connect('search_repo_home', '/{repo_name}/search', |
|
649 | 644 | controller='search', |
|
650 | 645 | action='index', |
|
651 | 646 | conditions={'function': check_repo}, |
|
652 | 647 | requirements=URL_NAME_REQUIREMENTS) |
|
653 | 648 | |
|
654 | 649 | # FEEDS |
|
655 | 650 | rmap.connect('rss_feed_home', '/{repo_name}/feed/rss', |
|
656 | 651 | controller='feed', action='rss', |
|
657 | 652 | conditions={'function': check_repo}, |
|
658 | 653 | requirements=URL_NAME_REQUIREMENTS) |
|
659 | 654 | |
|
660 | 655 | rmap.connect('atom_feed_home', '/{repo_name}/feed/atom', |
|
661 | 656 | controller='feed', action='atom', |
|
662 | 657 | conditions={'function': check_repo}, |
|
663 | 658 | requirements=URL_NAME_REQUIREMENTS) |
|
664 | 659 | |
|
665 | 660 | #========================================================================== |
|
666 | 661 | # REPOSITORY ROUTES |
|
667 | 662 | #========================================================================== |
|
668 | 663 | |
|
669 | 664 | rmap.connect('repo_creating_home', '/{repo_name}/repo_creating', |
|
670 | 665 | controller='admin/repos', action='repo_creating', |
|
671 | 666 | requirements=URL_NAME_REQUIREMENTS) |
|
672 | 667 | rmap.connect('repo_check_home', '/{repo_name}/crepo_check', |
|
673 | 668 | controller='admin/repos', action='repo_check', |
|
674 | 669 | requirements=URL_NAME_REQUIREMENTS) |
|
675 | 670 | |
|
676 | 671 | rmap.connect('repo_stats', '/{repo_name}/repo_stats/{commit_id}', |
|
677 | 672 | controller='summary', action='repo_stats', |
|
678 | 673 | conditions={'function': check_repo}, |
|
679 | 674 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
680 | 675 | |
|
681 | 676 | rmap.connect('repo_refs_data', '/{repo_name}/refs-data', |
|
682 | 677 | controller='summary', action='repo_refs_data', |
|
683 | 678 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
684 | 679 | rmap.connect('repo_refs_changelog_data', '/{repo_name}/refs-data-changelog', |
|
685 | 680 | controller='summary', action='repo_refs_changelog_data', |
|
686 | 681 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
687 | 682 | rmap.connect('repo_default_reviewers_data', '/{repo_name}/default-reviewers', |
|
688 | 683 | controller='summary', action='repo_default_reviewers_data', |
|
689 | 684 | jsroute=True, requirements=URL_NAME_REQUIREMENTS) |
|
690 | 685 | |
|
691 | 686 | rmap.connect('changeset_home', '/{repo_name}/changeset/{revision}', |
|
692 | 687 | controller='changeset', revision='tip', |
|
693 | 688 | conditions={'function': check_repo}, |
|
694 | 689 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
695 | 690 | rmap.connect('changeset_children', '/{repo_name}/changeset_children/{revision}', |
|
696 | 691 | controller='changeset', revision='tip', action='changeset_children', |
|
697 | 692 | conditions={'function': check_repo}, |
|
698 | 693 | requirements=URL_NAME_REQUIREMENTS) |
|
699 | 694 | rmap.connect('changeset_parents', '/{repo_name}/changeset_parents/{revision}', |
|
700 | 695 | controller='changeset', revision='tip', action='changeset_parents', |
|
701 | 696 | conditions={'function': check_repo}, |
|
702 | 697 | requirements=URL_NAME_REQUIREMENTS) |
|
703 | 698 | |
|
704 | 699 | # repo edit options |
|
705 | 700 | rmap.connect('edit_repo', '/{repo_name}/settings', jsroute=True, |
|
706 | 701 | controller='admin/repos', action='edit', |
|
707 | 702 | conditions={'method': ['GET'], 'function': check_repo}, |
|
708 | 703 | requirements=URL_NAME_REQUIREMENTS) |
|
709 | 704 | |
|
710 | 705 | rmap.connect('edit_repo_perms', '/{repo_name}/settings/permissions', |
|
711 | 706 | jsroute=True, |
|
712 | 707 | controller='admin/repos', action='edit_permissions', |
|
713 | 708 | conditions={'method': ['GET'], 'function': check_repo}, |
|
714 | 709 | requirements=URL_NAME_REQUIREMENTS) |
|
715 | 710 | rmap.connect('edit_repo_perms_update', '/{repo_name}/settings/permissions', |
|
716 | 711 | controller='admin/repos', action='edit_permissions_update', |
|
717 | 712 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
718 | 713 | requirements=URL_NAME_REQUIREMENTS) |
|
719 | 714 | |
|
720 | 715 | rmap.connect('edit_repo_fields', '/{repo_name}/settings/fields', |
|
721 | 716 | controller='admin/repos', action='edit_fields', |
|
722 | 717 | conditions={'method': ['GET'], 'function': check_repo}, |
|
723 | 718 | requirements=URL_NAME_REQUIREMENTS) |
|
724 | 719 | rmap.connect('create_repo_fields', '/{repo_name}/settings/fields/new', |
|
725 | 720 | controller='admin/repos', action='create_repo_field', |
|
726 | 721 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
727 | 722 | requirements=URL_NAME_REQUIREMENTS) |
|
728 | 723 | rmap.connect('delete_repo_fields', '/{repo_name}/settings/fields/{field_id}', |
|
729 | 724 | controller='admin/repos', action='delete_repo_field', |
|
730 | 725 | conditions={'method': ['DELETE'], 'function': check_repo}, |
|
731 | 726 | requirements=URL_NAME_REQUIREMENTS) |
|
732 | 727 | |
|
733 | 728 | rmap.connect('edit_repo_advanced', '/{repo_name}/settings/advanced', |
|
734 | 729 | controller='admin/repos', action='edit_advanced', |
|
735 | 730 | conditions={'method': ['GET'], 'function': check_repo}, |
|
736 | 731 | requirements=URL_NAME_REQUIREMENTS) |
|
737 | 732 | |
|
738 | 733 | rmap.connect('edit_repo_advanced_locking', '/{repo_name}/settings/advanced/locking', |
|
739 | 734 | controller='admin/repos', action='edit_advanced_locking', |
|
740 | 735 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
741 | 736 | requirements=URL_NAME_REQUIREMENTS) |
|
742 | 737 | rmap.connect('toggle_locking', '/{repo_name}/settings/advanced/locking_toggle', |
|
743 | 738 | controller='admin/repos', action='toggle_locking', |
|
744 | 739 | conditions={'method': ['GET'], 'function': check_repo}, |
|
745 | 740 | requirements=URL_NAME_REQUIREMENTS) |
|
746 | 741 | |
|
747 | 742 | rmap.connect('edit_repo_advanced_journal', '/{repo_name}/settings/advanced/journal', |
|
748 | 743 | controller='admin/repos', action='edit_advanced_journal', |
|
749 | 744 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
750 | 745 | requirements=URL_NAME_REQUIREMENTS) |
|
751 | 746 | |
|
752 | 747 | rmap.connect('edit_repo_advanced_fork', '/{repo_name}/settings/advanced/fork', |
|
753 | 748 | controller='admin/repos', action='edit_advanced_fork', |
|
754 | 749 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
755 | 750 | requirements=URL_NAME_REQUIREMENTS) |
|
756 | 751 | |
|
757 | 752 | rmap.connect('edit_repo_caches', '/{repo_name}/settings/caches', |
|
758 | 753 | controller='admin/repos', action='edit_caches_form', |
|
759 | 754 | conditions={'method': ['GET'], 'function': check_repo}, |
|
760 | 755 | requirements=URL_NAME_REQUIREMENTS) |
|
761 | 756 | rmap.connect('edit_repo_caches', '/{repo_name}/settings/caches', |
|
762 | 757 | controller='admin/repos', action='edit_caches', |
|
763 | 758 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
764 | 759 | requirements=URL_NAME_REQUIREMENTS) |
|
765 | 760 | |
|
766 | 761 | rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote', |
|
767 | 762 | controller='admin/repos', action='edit_remote_form', |
|
768 | 763 | conditions={'method': ['GET'], 'function': check_repo}, |
|
769 | 764 | requirements=URL_NAME_REQUIREMENTS) |
|
770 | 765 | rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote', |
|
771 | 766 | controller='admin/repos', action='edit_remote', |
|
772 | 767 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
773 | 768 | requirements=URL_NAME_REQUIREMENTS) |
|
774 | 769 | |
|
775 | 770 | rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics', |
|
776 | 771 | controller='admin/repos', action='edit_statistics_form', |
|
777 | 772 | conditions={'method': ['GET'], 'function': check_repo}, |
|
778 | 773 | requirements=URL_NAME_REQUIREMENTS) |
|
779 | 774 | rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics', |
|
780 | 775 | controller='admin/repos', action='edit_statistics', |
|
781 | 776 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
782 | 777 | requirements=URL_NAME_REQUIREMENTS) |
|
783 | 778 | rmap.connect('repo_settings_issuetracker', |
|
784 | 779 | '/{repo_name}/settings/issue-tracker', |
|
785 | 780 | controller='admin/repos', action='repo_issuetracker', |
|
786 | 781 | conditions={'method': ['GET'], 'function': check_repo}, |
|
787 | 782 | requirements=URL_NAME_REQUIREMENTS) |
|
788 | 783 | rmap.connect('repo_issuetracker_test', |
|
789 | 784 | '/{repo_name}/settings/issue-tracker/test', |
|
790 | 785 | controller='admin/repos', action='repo_issuetracker_test', |
|
791 | 786 | conditions={'method': ['POST'], 'function': check_repo}, |
|
792 | 787 | requirements=URL_NAME_REQUIREMENTS) |
|
793 | 788 | rmap.connect('repo_issuetracker_delete', |
|
794 | 789 | '/{repo_name}/settings/issue-tracker/delete', |
|
795 | 790 | controller='admin/repos', action='repo_issuetracker_delete', |
|
796 | 791 | conditions={'method': ['DELETE'], 'function': check_repo}, |
|
797 | 792 | requirements=URL_NAME_REQUIREMENTS) |
|
798 | 793 | rmap.connect('repo_issuetracker_save', |
|
799 | 794 | '/{repo_name}/settings/issue-tracker/save', |
|
800 | 795 | controller='admin/repos', action='repo_issuetracker_save', |
|
801 | 796 | conditions={'method': ['POST'], 'function': check_repo}, |
|
802 | 797 | requirements=URL_NAME_REQUIREMENTS) |
|
803 | 798 | rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs', |
|
804 | 799 | controller='admin/repos', action='repo_settings_vcs_update', |
|
805 | 800 | conditions={'method': ['POST'], 'function': check_repo}, |
|
806 | 801 | requirements=URL_NAME_REQUIREMENTS) |
|
807 | 802 | rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs', |
|
808 | 803 | controller='admin/repos', action='repo_settings_vcs', |
|
809 | 804 | conditions={'method': ['GET'], 'function': check_repo}, |
|
810 | 805 | requirements=URL_NAME_REQUIREMENTS) |
|
811 | 806 | rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs', |
|
812 | 807 | controller='admin/repos', action='repo_delete_svn_pattern', |
|
813 | 808 | conditions={'method': ['DELETE'], 'function': check_repo}, |
|
814 | 809 | requirements=URL_NAME_REQUIREMENTS) |
|
815 | 810 | rmap.connect('repo_pullrequest_settings', '/{repo_name}/settings/pullrequest', |
|
816 | 811 | controller='admin/repos', action='repo_settings_pullrequest', |
|
817 | 812 | conditions={'method': ['GET', 'POST'], 'function': check_repo}, |
|
818 | 813 | requirements=URL_NAME_REQUIREMENTS) |
|
819 | 814 | |
|
820 | 815 | # still working url for backward compat. |
|
821 | 816 | rmap.connect('raw_changeset_home_depraced', |
|
822 | 817 | '/{repo_name}/raw-changeset/{revision}', |
|
823 | 818 | controller='changeset', action='changeset_raw', |
|
824 | 819 | revision='tip', conditions={'function': check_repo}, |
|
825 | 820 | requirements=URL_NAME_REQUIREMENTS) |
|
826 | 821 | |
|
827 | 822 | # new URLs |
|
828 | 823 | rmap.connect('changeset_raw_home', |
|
829 | 824 | '/{repo_name}/changeset-diff/{revision}', |
|
830 | 825 | controller='changeset', action='changeset_raw', |
|
831 | 826 | revision='tip', conditions={'function': check_repo}, |
|
832 | 827 | requirements=URL_NAME_REQUIREMENTS) |
|
833 | 828 | |
|
834 | 829 | rmap.connect('changeset_patch_home', |
|
835 | 830 | '/{repo_name}/changeset-patch/{revision}', |
|
836 | 831 | controller='changeset', action='changeset_patch', |
|
837 | 832 | revision='tip', conditions={'function': check_repo}, |
|
838 | 833 | requirements=URL_NAME_REQUIREMENTS) |
|
839 | 834 | |
|
840 | 835 | rmap.connect('changeset_download_home', |
|
841 | 836 | '/{repo_name}/changeset-download/{revision}', |
|
842 | 837 | controller='changeset', action='changeset_download', |
|
843 | 838 | revision='tip', conditions={'function': check_repo}, |
|
844 | 839 | requirements=URL_NAME_REQUIREMENTS) |
|
845 | 840 | |
|
846 | 841 | rmap.connect('changeset_comment', |
|
847 | 842 | '/{repo_name}/changeset/{revision}/comment', jsroute=True, |
|
848 | 843 | controller='changeset', revision='tip', action='comment', |
|
849 | 844 | conditions={'function': check_repo}, |
|
850 | 845 | requirements=URL_NAME_REQUIREMENTS) |
|
851 | 846 | |
|
852 | 847 | rmap.connect('changeset_comment_preview', |
|
853 | 848 | '/{repo_name}/changeset/comment/preview', jsroute=True, |
|
854 | 849 | controller='changeset', action='preview_comment', |
|
855 | 850 | conditions={'function': check_repo, 'method': ['POST']}, |
|
856 | 851 | requirements=URL_NAME_REQUIREMENTS) |
|
857 | 852 | |
|
858 | 853 | rmap.connect('changeset_comment_delete', |
|
859 | 854 | '/{repo_name}/changeset/comment/{comment_id}/delete', |
|
860 | 855 | controller='changeset', action='delete_comment', |
|
861 | 856 | conditions={'function': check_repo, 'method': ['DELETE']}, |
|
862 | 857 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
863 | 858 | |
|
864 | 859 | rmap.connect('changeset_info', '/{repo_name}/changeset_info/{revision}', |
|
865 | 860 | controller='changeset', action='changeset_info', |
|
866 | 861 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
867 | 862 | |
|
868 | 863 | rmap.connect('compare_home', |
|
869 | 864 | '/{repo_name}/compare', |
|
870 | 865 | controller='compare', action='index', |
|
871 | 866 | conditions={'function': check_repo}, |
|
872 | 867 | requirements=URL_NAME_REQUIREMENTS) |
|
873 | 868 | |
|
874 | 869 | rmap.connect('compare_url', |
|
875 | 870 | '/{repo_name}/compare/{source_ref_type}@{source_ref:.*?}...{target_ref_type}@{target_ref:.*?}', |
|
876 | 871 | controller='compare', action='compare', |
|
877 | 872 | conditions={'function': check_repo}, |
|
878 | 873 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
879 | 874 | |
|
880 | 875 | rmap.connect('pullrequest_home', |
|
881 | 876 | '/{repo_name}/pull-request/new', controller='pullrequests', |
|
882 | 877 | action='index', conditions={'function': check_repo, |
|
883 | 878 | 'method': ['GET']}, |
|
884 | 879 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
885 | 880 | |
|
886 | 881 | rmap.connect('pullrequest', |
|
887 | 882 | '/{repo_name}/pull-request/new', controller='pullrequests', |
|
888 | 883 | action='create', conditions={'function': check_repo, |
|
889 | 884 | 'method': ['POST']}, |
|
890 | 885 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
891 | 886 | |
|
892 | 887 | rmap.connect('pullrequest_repo_refs', |
|
893 | 888 | '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}', |
|
894 | 889 | controller='pullrequests', |
|
895 | 890 | action='get_repo_refs', |
|
896 | 891 | conditions={'function': check_repo, 'method': ['GET']}, |
|
897 | 892 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
898 | 893 | |
|
899 | 894 | rmap.connect('pullrequest_repo_destinations', |
|
900 | 895 | '/{repo_name}/pull-request/repo-destinations', |
|
901 | 896 | controller='pullrequests', |
|
902 | 897 | action='get_repo_destinations', |
|
903 | 898 | conditions={'function': check_repo, 'method': ['GET']}, |
|
904 | 899 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
905 | 900 | |
|
906 | 901 | rmap.connect('pullrequest_show', |
|
907 | 902 | '/{repo_name}/pull-request/{pull_request_id}', |
|
908 | 903 | controller='pullrequests', |
|
909 | 904 | action='show', conditions={'function': check_repo, |
|
910 | 905 | 'method': ['GET']}, |
|
911 | 906 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
912 | 907 | |
|
913 | 908 | rmap.connect('pullrequest_update', |
|
914 | 909 | '/{repo_name}/pull-request/{pull_request_id}', |
|
915 | 910 | controller='pullrequests', |
|
916 | 911 | action='update', conditions={'function': check_repo, |
|
917 | 912 | 'method': ['PUT']}, |
|
918 | 913 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
919 | 914 | |
|
920 | 915 | rmap.connect('pullrequest_merge', |
|
921 | 916 | '/{repo_name}/pull-request/{pull_request_id}', |
|
922 | 917 | controller='pullrequests', |
|
923 | 918 | action='merge', conditions={'function': check_repo, |
|
924 | 919 | 'method': ['POST']}, |
|
925 | 920 | requirements=URL_NAME_REQUIREMENTS) |
|
926 | 921 | |
|
927 | 922 | rmap.connect('pullrequest_delete', |
|
928 | 923 | '/{repo_name}/pull-request/{pull_request_id}', |
|
929 | 924 | controller='pullrequests', |
|
930 | 925 | action='delete', conditions={'function': check_repo, |
|
931 | 926 | 'method': ['DELETE']}, |
|
932 | 927 | requirements=URL_NAME_REQUIREMENTS) |
|
933 | 928 | |
|
934 | 929 | rmap.connect('pullrequest_show_all', |
|
935 | 930 | '/{repo_name}/pull-request', |
|
936 | 931 | controller='pullrequests', |
|
937 | 932 | action='show_all', conditions={'function': check_repo, |
|
938 | 933 | 'method': ['GET']}, |
|
939 | 934 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
940 | 935 | |
|
941 | 936 | rmap.connect('pullrequest_comment', |
|
942 | 937 | '/{repo_name}/pull-request-comment/{pull_request_id}', |
|
943 | 938 | controller='pullrequests', |
|
944 | 939 | action='comment', conditions={'function': check_repo, |
|
945 | 940 | 'method': ['POST']}, |
|
946 | 941 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
947 | 942 | |
|
948 | 943 | rmap.connect('pullrequest_comment_delete', |
|
949 | 944 | '/{repo_name}/pull-request-comment/{comment_id}/delete', |
|
950 | 945 | controller='pullrequests', action='delete_comment', |
|
951 | 946 | conditions={'function': check_repo, 'method': ['DELETE']}, |
|
952 | 947 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
953 | 948 | |
|
954 | 949 | rmap.connect('summary_home_explicit', '/{repo_name}/summary', |
|
955 | 950 | controller='summary', conditions={'function': check_repo}, |
|
956 | 951 | requirements=URL_NAME_REQUIREMENTS) |
|
957 | 952 | |
|
958 | 953 | rmap.connect('branches_home', '/{repo_name}/branches', |
|
959 | 954 | controller='branches', conditions={'function': check_repo}, |
|
960 | 955 | requirements=URL_NAME_REQUIREMENTS) |
|
961 | 956 | |
|
962 | 957 | rmap.connect('tags_home', '/{repo_name}/tags', |
|
963 | 958 | controller='tags', conditions={'function': check_repo}, |
|
964 | 959 | requirements=URL_NAME_REQUIREMENTS) |
|
965 | 960 | |
|
966 | 961 | rmap.connect('bookmarks_home', '/{repo_name}/bookmarks', |
|
967 | 962 | controller='bookmarks', conditions={'function': check_repo}, |
|
968 | 963 | requirements=URL_NAME_REQUIREMENTS) |
|
969 | 964 | |
|
970 | 965 | rmap.connect('changelog_home', '/{repo_name}/changelog', jsroute=True, |
|
971 | 966 | controller='changelog', conditions={'function': check_repo}, |
|
972 | 967 | requirements=URL_NAME_REQUIREMENTS) |
|
973 | 968 | |
|
974 | 969 | rmap.connect('changelog_summary_home', '/{repo_name}/changelog_summary', |
|
975 | 970 | controller='changelog', action='changelog_summary', |
|
976 | 971 | conditions={'function': check_repo}, |
|
977 | 972 | requirements=URL_NAME_REQUIREMENTS) |
|
978 | 973 | |
|
979 | 974 | rmap.connect('changelog_file_home', |
|
980 | 975 | '/{repo_name}/changelog/{revision}/{f_path}', |
|
981 | 976 | controller='changelog', f_path=None, |
|
982 | 977 | conditions={'function': check_repo}, |
|
983 | 978 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
984 | 979 | |
|
985 | 980 | rmap.connect('changelog_elements', '/{repo_name}/changelog_details', |
|
986 | 981 | controller='changelog', action='changelog_elements', |
|
987 | 982 | conditions={'function': check_repo}, |
|
988 | 983 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
989 | 984 | |
|
990 | 985 | rmap.connect('files_home', '/{repo_name}/files/{revision}/{f_path}', |
|
991 | 986 | controller='files', revision='tip', f_path='', |
|
992 | 987 | conditions={'function': check_repo}, |
|
993 | 988 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
994 | 989 | |
|
995 | 990 | rmap.connect('files_home_simple_catchrev', |
|
996 | 991 | '/{repo_name}/files/{revision}', |
|
997 | 992 | controller='files', revision='tip', f_path='', |
|
998 | 993 | conditions={'function': check_repo}, |
|
999 | 994 | requirements=URL_NAME_REQUIREMENTS) |
|
1000 | 995 | |
|
1001 | 996 | rmap.connect('files_home_simple_catchall', |
|
1002 | 997 | '/{repo_name}/files', |
|
1003 | 998 | controller='files', revision='tip', f_path='', |
|
1004 | 999 | conditions={'function': check_repo}, |
|
1005 | 1000 | requirements=URL_NAME_REQUIREMENTS) |
|
1006 | 1001 | |
|
1007 | 1002 | rmap.connect('files_history_home', |
|
1008 | 1003 | '/{repo_name}/history/{revision}/{f_path}', |
|
1009 | 1004 | controller='files', action='history', revision='tip', f_path='', |
|
1010 | 1005 | conditions={'function': check_repo}, |
|
1011 | 1006 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
1012 | 1007 | |
|
1013 | 1008 | rmap.connect('files_authors_home', |
|
1014 | 1009 | '/{repo_name}/authors/{revision}/{f_path}', |
|
1015 | 1010 | controller='files', action='authors', revision='tip', f_path='', |
|
1016 | 1011 | conditions={'function': check_repo}, |
|
1017 | 1012 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
1018 | 1013 | |
|
1019 | 1014 | rmap.connect('files_diff_home', '/{repo_name}/diff/{f_path}', |
|
1020 | 1015 | controller='files', action='diff', f_path='', |
|
1021 | 1016 | conditions={'function': check_repo}, |
|
1022 | 1017 | requirements=URL_NAME_REQUIREMENTS) |
|
1023 | 1018 | |
|
1024 | 1019 | rmap.connect('files_diff_2way_home', |
|
1025 | 1020 | '/{repo_name}/diff-2way/{f_path}', |
|
1026 | 1021 | controller='files', action='diff_2way', f_path='', |
|
1027 | 1022 | conditions={'function': check_repo}, |
|
1028 | 1023 | requirements=URL_NAME_REQUIREMENTS) |
|
1029 | 1024 | |
|
1030 | 1025 | rmap.connect('files_rawfile_home', |
|
1031 | 1026 | '/{repo_name}/rawfile/{revision}/{f_path}', |
|
1032 | 1027 | controller='files', action='rawfile', revision='tip', |
|
1033 | 1028 | f_path='', conditions={'function': check_repo}, |
|
1034 | 1029 | requirements=URL_NAME_REQUIREMENTS) |
|
1035 | 1030 | |
|
1036 | 1031 | rmap.connect('files_raw_home', |
|
1037 | 1032 | '/{repo_name}/raw/{revision}/{f_path}', |
|
1038 | 1033 | controller='files', action='raw', revision='tip', f_path='', |
|
1039 | 1034 | conditions={'function': check_repo}, |
|
1040 | 1035 | requirements=URL_NAME_REQUIREMENTS) |
|
1041 | 1036 | |
|
1042 | 1037 | rmap.connect('files_render_home', |
|
1043 | 1038 | '/{repo_name}/render/{revision}/{f_path}', |
|
1044 | 1039 | controller='files', action='index', revision='tip', f_path='', |
|
1045 | 1040 | rendered=True, conditions={'function': check_repo}, |
|
1046 | 1041 | requirements=URL_NAME_REQUIREMENTS) |
|
1047 | 1042 | |
|
1048 | 1043 | rmap.connect('files_annotate_home', |
|
1049 | 1044 | '/{repo_name}/annotate/{revision}/{f_path}', |
|
1050 | 1045 | controller='files', action='index', revision='tip', |
|
1051 | 1046 | f_path='', annotate=True, conditions={'function': check_repo}, |
|
1052 | 1047 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
1053 | 1048 | |
|
1054 | 1049 | rmap.connect('files_annotate_previous', |
|
1055 | 1050 | '/{repo_name}/annotate-previous/{revision}/{f_path}', |
|
1056 | 1051 | controller='files', action='annotate_previous', revision='tip', |
|
1057 | 1052 | f_path='', annotate=True, conditions={'function': check_repo}, |
|
1058 | 1053 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
1059 | 1054 | |
|
1060 | 1055 | rmap.connect('files_edit', |
|
1061 | 1056 | '/{repo_name}/edit/{revision}/{f_path}', |
|
1062 | 1057 | controller='files', action='edit', revision='tip', |
|
1063 | 1058 | f_path='', |
|
1064 | 1059 | conditions={'function': check_repo, 'method': ['POST']}, |
|
1065 | 1060 | requirements=URL_NAME_REQUIREMENTS) |
|
1066 | 1061 | |
|
1067 | 1062 | rmap.connect('files_edit_home', |
|
1068 | 1063 | '/{repo_name}/edit/{revision}/{f_path}', |
|
1069 | 1064 | controller='files', action='edit_home', revision='tip', |
|
1070 | 1065 | f_path='', conditions={'function': check_repo}, |
|
1071 | 1066 | requirements=URL_NAME_REQUIREMENTS) |
|
1072 | 1067 | |
|
1073 | 1068 | rmap.connect('files_add', |
|
1074 | 1069 | '/{repo_name}/add/{revision}/{f_path}', |
|
1075 | 1070 | controller='files', action='add', revision='tip', |
|
1076 | 1071 | f_path='', |
|
1077 | 1072 | conditions={'function': check_repo, 'method': ['POST']}, |
|
1078 | 1073 | requirements=URL_NAME_REQUIREMENTS) |
|
1079 | 1074 | |
|
1080 | 1075 | rmap.connect('files_add_home', |
|
1081 | 1076 | '/{repo_name}/add/{revision}/{f_path}', |
|
1082 | 1077 | controller='files', action='add_home', revision='tip', |
|
1083 | 1078 | f_path='', conditions={'function': check_repo}, |
|
1084 | 1079 | requirements=URL_NAME_REQUIREMENTS) |
|
1085 | 1080 | |
|
1086 | 1081 | rmap.connect('files_delete', |
|
1087 | 1082 | '/{repo_name}/delete/{revision}/{f_path}', |
|
1088 | 1083 | controller='files', action='delete', revision='tip', |
|
1089 | 1084 | f_path='', |
|
1090 | 1085 | conditions={'function': check_repo, 'method': ['POST']}, |
|
1091 | 1086 | requirements=URL_NAME_REQUIREMENTS) |
|
1092 | 1087 | |
|
1093 | 1088 | rmap.connect('files_delete_home', |
|
1094 | 1089 | '/{repo_name}/delete/{revision}/{f_path}', |
|
1095 | 1090 | controller='files', action='delete_home', revision='tip', |
|
1096 | 1091 | f_path='', conditions={'function': check_repo}, |
|
1097 | 1092 | requirements=URL_NAME_REQUIREMENTS) |
|
1098 | 1093 | |
|
1099 | 1094 | rmap.connect('files_archive_home', '/{repo_name}/archive/{fname}', |
|
1100 | 1095 | controller='files', action='archivefile', |
|
1101 | 1096 | conditions={'function': check_repo}, |
|
1102 | 1097 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
1103 | 1098 | |
|
1104 | 1099 | rmap.connect('files_nodelist_home', |
|
1105 | 1100 | '/{repo_name}/nodelist/{revision}/{f_path}', |
|
1106 | 1101 | controller='files', action='nodelist', |
|
1107 | 1102 | conditions={'function': check_repo}, |
|
1108 | 1103 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
1109 | 1104 | |
|
1110 | 1105 | rmap.connect('files_nodetree_full', |
|
1111 | 1106 | '/{repo_name}/nodetree_full/{commit_id}/{f_path}', |
|
1112 | 1107 | controller='files', action='nodetree_full', |
|
1113 | 1108 | conditions={'function': check_repo}, |
|
1114 | 1109 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
1115 | 1110 | |
|
1116 | 1111 | rmap.connect('repo_fork_create_home', '/{repo_name}/fork', |
|
1117 | 1112 | controller='forks', action='fork_create', |
|
1118 | 1113 | conditions={'function': check_repo, 'method': ['POST']}, |
|
1119 | 1114 | requirements=URL_NAME_REQUIREMENTS) |
|
1120 | 1115 | |
|
1121 | 1116 | rmap.connect('repo_fork_home', '/{repo_name}/fork', |
|
1122 | 1117 | controller='forks', action='fork', |
|
1123 | 1118 | conditions={'function': check_repo}, |
|
1124 | 1119 | requirements=URL_NAME_REQUIREMENTS) |
|
1125 | 1120 | |
|
1126 | 1121 | rmap.connect('repo_forks_home', '/{repo_name}/forks', |
|
1127 | 1122 | controller='forks', action='forks', |
|
1128 | 1123 | conditions={'function': check_repo}, |
|
1129 | 1124 | requirements=URL_NAME_REQUIREMENTS) |
|
1130 | 1125 | |
|
1131 | 1126 | # must be here for proper group/repo catching pattern |
|
1132 | 1127 | _connect_with_slash( |
|
1133 | 1128 | rmap, 'repo_group_home', '/{group_name}', |
|
1134 | 1129 | controller='home', action='index_repo_group', |
|
1135 | 1130 | conditions={'function': check_group}, |
|
1136 | 1131 | requirements=URL_NAME_REQUIREMENTS) |
|
1137 | 1132 | |
|
1138 | 1133 | # catch all, at the end |
|
1139 | 1134 | _connect_with_slash( |
|
1140 | 1135 | rmap, 'summary_home', '/{repo_name}', jsroute=True, |
|
1141 | 1136 | controller='summary', action='index', |
|
1142 | 1137 | conditions={'function': check_repo}, |
|
1143 | 1138 | requirements=URL_NAME_REQUIREMENTS) |
|
1144 | 1139 | |
|
1145 | 1140 | return rmap |
|
1146 | 1141 | |
|
1147 | 1142 | |
|
1148 | 1143 | def _connect_with_slash(mapper, name, path, *args, **kwargs): |
|
1149 | 1144 | """ |
|
1150 | 1145 | Connect a route with an optional trailing slash in `path`. |
|
1151 | 1146 | """ |
|
1152 | 1147 | mapper.connect(name + '_slash', path + '/', *args, **kwargs) |
|
1153 | 1148 | mapper.connect(name, path, *args, **kwargs) |
@@ -1,290 +1,257 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Home controller for RhodeCode Enterprise |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import logging |
|
26 | 26 | import time |
|
27 | 27 | import re |
|
28 | 28 | |
|
29 | 29 | from pylons import tmpl_context as c, request, url, config |
|
30 | 30 | from pylons.i18n.translation import _ |
|
31 | 31 | from sqlalchemy.sql import func |
|
32 | 32 | |
|
33 | 33 | from rhodecode.lib.auth import ( |
|
34 | 34 | LoginRequired, HasPermissionAllDecorator, AuthUser, |
|
35 | 35 | HasRepoGroupPermissionAnyDecorator, XHRRequired) |
|
36 | 36 | from rhodecode.lib.base import BaseController, render |
|
37 | 37 | from rhodecode.lib.index import searcher_from_config |
|
38 | 38 | from rhodecode.lib.ext_json import json |
|
39 | 39 | from rhodecode.lib.utils import jsonify |
|
40 | 40 | from rhodecode.lib.utils2 import safe_unicode, str2bool |
|
41 | 41 | from rhodecode.model.db import Repository, RepoGroup |
|
42 | 42 | from rhodecode.model.repo import RepoModel |
|
43 | 43 | from rhodecode.model.repo_group import RepoGroupModel |
|
44 | 44 | from rhodecode.model.scm import RepoList, RepoGroupList |
|
45 | 45 | |
|
46 | 46 | |
|
47 | 47 | log = logging.getLogger(__name__) |
|
48 | 48 | |
|
49 | 49 | |
|
50 | 50 | class HomeController(BaseController): |
|
51 | 51 | def __before__(self): |
|
52 | 52 | super(HomeController, self).__before__() |
|
53 | 53 | |
|
54 | 54 | def ping(self): |
|
55 | 55 | """ |
|
56 | 56 | Ping, doesn't require login, good for checking out the platform |
|
57 | 57 | """ |
|
58 | 58 | instance_id = getattr(c, 'rhodecode_instanceid', '') |
|
59 | 59 | return 'pong[%s] => %s' % (instance_id, self.ip_addr,) |
|
60 | 60 | |
|
61 | 61 | @LoginRequired() |
|
62 | 62 | @HasPermissionAllDecorator('hg.admin') |
|
63 | 63 | def error_test(self): |
|
64 | 64 | """ |
|
65 | 65 | Test exception handling and emails on errors |
|
66 | 66 | """ |
|
67 | 67 | class TestException(Exception): |
|
68 | 68 | pass |
|
69 | 69 | |
|
70 | 70 | msg = ('RhodeCode Enterprise %s test exception. Generation time: %s' |
|
71 | 71 | % (c.rhodecode_name, time.time())) |
|
72 | 72 | raise TestException(msg) |
|
73 | 73 | |
|
74 | 74 | def _get_groups_and_repos(self, repo_group_id=None): |
|
75 | 75 | # repo groups groups |
|
76 | 76 | repo_group_list = RepoGroup.get_all_repo_groups(group_id=repo_group_id) |
|
77 | 77 | _perms = ['group.read', 'group.write', 'group.admin'] |
|
78 | 78 | repo_group_list_acl = RepoGroupList(repo_group_list, perm_set=_perms) |
|
79 | 79 | repo_group_data = RepoGroupModel().get_repo_groups_as_dict( |
|
80 | 80 | repo_group_list=repo_group_list_acl, admin=False) |
|
81 | 81 | |
|
82 | 82 | # repositories |
|
83 | 83 | repo_list = Repository.get_all_repos(group_id=repo_group_id) |
|
84 | 84 | _perms = ['repository.read', 'repository.write', 'repository.admin'] |
|
85 | 85 | repo_list_acl = RepoList(repo_list, perm_set=_perms) |
|
86 | 86 | repo_data = RepoModel().get_repos_as_dict( |
|
87 | 87 | repo_list=repo_list_acl, admin=False) |
|
88 | 88 | |
|
89 | 89 | return repo_data, repo_group_data |
|
90 | 90 | |
|
91 | 91 | @LoginRequired() |
|
92 | 92 | def index(self): |
|
93 | 93 | c.repo_group = None |
|
94 | 94 | |
|
95 | 95 | repo_data, repo_group_data = self._get_groups_and_repos() |
|
96 | 96 | # json used to render the grids |
|
97 | 97 | c.repos_data = json.dumps(repo_data) |
|
98 | 98 | c.repo_groups_data = json.dumps(repo_group_data) |
|
99 | 99 | |
|
100 | 100 | return render('/index.mako') |
|
101 | 101 | |
|
102 | 102 | @LoginRequired() |
|
103 | 103 | @HasRepoGroupPermissionAnyDecorator('group.read', 'group.write', |
|
104 | 104 | 'group.admin') |
|
105 | 105 | def index_repo_group(self, group_name): |
|
106 | 106 | """GET /repo_group_name: Show a specific item""" |
|
107 | 107 | c.repo_group = RepoGroupModel()._get_repo_group(group_name) |
|
108 | 108 | repo_data, repo_group_data = self._get_groups_and_repos( |
|
109 | 109 | c.repo_group.group_id) |
|
110 | 110 | |
|
111 | 111 | # json used to render the grids |
|
112 | 112 | c.repos_data = json.dumps(repo_data) |
|
113 | 113 | c.repo_groups_data = json.dumps(repo_group_data) |
|
114 | 114 | |
|
115 | 115 | return render('index_repo_group.mako') |
|
116 | 116 | |
|
117 | 117 | def _get_repo_list(self, name_contains=None, repo_type=None, limit=20): |
|
118 | 118 | query = Repository.query()\ |
|
119 | 119 | .order_by(func.length(Repository.repo_name))\ |
|
120 | 120 | .order_by(Repository.repo_name) |
|
121 | 121 | |
|
122 | 122 | if repo_type: |
|
123 | 123 | query = query.filter(Repository.repo_type == repo_type) |
|
124 | 124 | |
|
125 | 125 | if name_contains: |
|
126 | 126 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) |
|
127 | 127 | query = query.filter( |
|
128 | 128 | Repository.repo_name.ilike(ilike_expression)) |
|
129 | 129 | query = query.limit(limit) |
|
130 | 130 | |
|
131 | 131 | all_repos = query.all() |
|
132 | 132 | repo_iter = self.scm_model.get_repos(all_repos) |
|
133 | 133 | return [ |
|
134 | 134 | { |
|
135 | 135 | 'id': obj['name'], |
|
136 | 136 | 'text': obj['name'], |
|
137 | 137 | 'type': 'repo', |
|
138 | 138 | 'obj': obj['dbrepo'], |
|
139 | 139 | 'url': url('summary_home', repo_name=obj['name']) |
|
140 | 140 | } |
|
141 | 141 | for obj in repo_iter] |
|
142 | 142 | |
|
143 | 143 | def _get_repo_group_list(self, name_contains=None, limit=20): |
|
144 | 144 | query = RepoGroup.query()\ |
|
145 | 145 | .order_by(func.length(RepoGroup.group_name))\ |
|
146 | 146 | .order_by(RepoGroup.group_name) |
|
147 | 147 | |
|
148 | 148 | if name_contains: |
|
149 | 149 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) |
|
150 | 150 | query = query.filter( |
|
151 | 151 | RepoGroup.group_name.ilike(ilike_expression)) |
|
152 | 152 | query = query.limit(limit) |
|
153 | 153 | |
|
154 | 154 | all_groups = query.all() |
|
155 | 155 | repo_groups_iter = self.scm_model.get_repo_groups(all_groups) |
|
156 | 156 | return [ |
|
157 | 157 | { |
|
158 | 158 | 'id': obj.group_name, |
|
159 | 159 | 'text': obj.group_name, |
|
160 | 160 | 'type': 'group', |
|
161 | 161 | 'obj': {}, |
|
162 | 162 | 'url': url('repo_group_home', group_name=obj.group_name) |
|
163 | 163 | } |
|
164 | 164 | for obj in repo_groups_iter] |
|
165 | 165 | |
|
166 | 166 | def _get_hash_commit_list(self, hash_starts_with=None, limit=20): |
|
167 | 167 | if not hash_starts_with or len(hash_starts_with) < 3: |
|
168 | 168 | return [] |
|
169 | 169 | |
|
170 | 170 | commit_hashes = re.compile('([0-9a-f]{2,40})').findall(hash_starts_with) |
|
171 | 171 | |
|
172 | 172 | if len(commit_hashes) != 1: |
|
173 | 173 | return [] |
|
174 | 174 | |
|
175 | 175 | commit_hash_prefix = commit_hashes[0] |
|
176 | 176 | |
|
177 | 177 | auth_user = AuthUser( |
|
178 | 178 | user_id=c.rhodecode_user.user_id, ip_addr=self.ip_addr) |
|
179 | 179 | searcher = searcher_from_config(config) |
|
180 | 180 | result = searcher.search( |
|
181 | 181 | 'commit_id:%s*' % commit_hash_prefix, 'commit', auth_user, |
|
182 | 182 | raise_on_exc=False) |
|
183 | 183 | |
|
184 | 184 | return [ |
|
185 | 185 | { |
|
186 | 186 | 'id': entry['commit_id'], |
|
187 | 187 | 'text': entry['commit_id'], |
|
188 | 188 | 'type': 'commit', |
|
189 | 189 | 'obj': {'repo': entry['repository']}, |
|
190 | 190 | 'url': url('changeset_home', |
|
191 | 191 | repo_name=entry['repository'], |
|
192 | 192 | revision=entry['commit_id']) |
|
193 | 193 | } |
|
194 | 194 | for entry in result['results']] |
|
195 | 195 | |
|
196 | 196 | @LoginRequired() |
|
197 | 197 | @XHRRequired() |
|
198 | 198 | @jsonify |
|
199 | 199 | def goto_switcher_data(self): |
|
200 | 200 | query = request.GET.get('query') |
|
201 | 201 | log.debug('generating goto switcher list, query %s', query) |
|
202 | 202 | |
|
203 | 203 | res = [] |
|
204 | 204 | repo_groups = self._get_repo_group_list(query) |
|
205 | 205 | if repo_groups: |
|
206 | 206 | res.append({ |
|
207 | 207 | 'text': _('Groups'), |
|
208 | 208 | 'children': repo_groups |
|
209 | 209 | }) |
|
210 | 210 | |
|
211 | 211 | repos = self._get_repo_list(query) |
|
212 | 212 | if repos: |
|
213 | 213 | res.append({ |
|
214 | 214 | 'text': _('Repositories'), |
|
215 | 215 | 'children': repos |
|
216 | 216 | }) |
|
217 | 217 | |
|
218 | 218 | commits = self._get_hash_commit_list(query) |
|
219 | 219 | if commits: |
|
220 | 220 | unique_repos = {} |
|
221 | 221 | for commit in commits: |
|
222 | 222 | unique_repos.setdefault(commit['obj']['repo'], [] |
|
223 | 223 | ).append(commit) |
|
224 | 224 | |
|
225 | 225 | for repo in unique_repos: |
|
226 | 226 | res.append({ |
|
227 | 227 | 'text': _('Commits in %(repo)s') % {'repo': repo}, |
|
228 | 228 | 'children': unique_repos[repo] |
|
229 | 229 | }) |
|
230 | 230 | |
|
231 | 231 | data = { |
|
232 | 232 | 'more': False, |
|
233 | 233 | 'results': res |
|
234 | 234 | } |
|
235 | 235 | return data |
|
236 | 236 | |
|
237 | 237 | @LoginRequired() |
|
238 | 238 | @XHRRequired() |
|
239 | 239 | @jsonify |
|
240 | 240 | def repo_list_data(self): |
|
241 | 241 | query = request.GET.get('query') |
|
242 | 242 | repo_type = request.GET.get('repo_type') |
|
243 | 243 | log.debug('generating repo list, query:%s', query) |
|
244 | 244 | |
|
245 | 245 | res = [] |
|
246 | 246 | repos = self._get_repo_list(query, repo_type=repo_type) |
|
247 | 247 | if repos: |
|
248 | 248 | res.append({ |
|
249 | 249 | 'text': _('Repositories'), |
|
250 | 250 | 'children': repos |
|
251 | 251 | }) |
|
252 | 252 | |
|
253 | 253 | data = { |
|
254 | 254 | 'more': False, |
|
255 | 255 | 'results': res |
|
256 | 256 | } |
|
257 | 257 | return data |
|
258 | ||
|
259 | @LoginRequired() | |
|
260 | @XHRRequired() | |
|
261 | @jsonify | |
|
262 | def user_autocomplete_data(self): | |
|
263 | query = request.GET.get('query') | |
|
264 | active = str2bool(request.GET.get('active') or True) | |
|
265 | ||
|
266 | repo_model = RepoModel() | |
|
267 | _users = repo_model.get_users( | |
|
268 | name_contains=query, only_active=active) | |
|
269 | ||
|
270 | if request.GET.get('user_groups'): | |
|
271 | # extend with user groups | |
|
272 | _user_groups = repo_model.get_user_groups( | |
|
273 | name_contains=query, only_active=active) | |
|
274 | _users = _users + _user_groups | |
|
275 | ||
|
276 | return {'suggestions': _users} | |
|
277 | ||
|
278 | @LoginRequired() | |
|
279 | @XHRRequired() | |
|
280 | @jsonify | |
|
281 | def user_group_autocomplete_data(self): | |
|
282 | query = request.GET.get('query') | |
|
283 | active = str2bool(request.GET.get('active') or True) | |
|
284 | ||
|
285 | repo_model = RepoModel() | |
|
286 | _user_groups = repo_model.get_user_groups( | |
|
287 | name_contains=query, only_active=active) | |
|
288 | _user_groups = _user_groups | |
|
289 | ||
|
290 | return {'suggestions': _user_groups} |
@@ -1,337 +1,341 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Helpers for fixture generation |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import os |
|
26 | 26 | import time |
|
27 | 27 | import tempfile |
|
28 | 28 | import shutil |
|
29 | 29 | |
|
30 | 30 | import configobj |
|
31 | 31 | |
|
32 | 32 | from rhodecode.tests import * |
|
33 | 33 | from rhodecode.model.db import Repository, User, RepoGroup, UserGroup, Gist |
|
34 | 34 | from rhodecode.model.meta import Session |
|
35 | 35 | from rhodecode.model.repo import RepoModel |
|
36 | 36 | from rhodecode.model.user import UserModel |
|
37 | 37 | from rhodecode.model.repo_group import RepoGroupModel |
|
38 | 38 | from rhodecode.model.user_group import UserGroupModel |
|
39 | 39 | from rhodecode.model.gist import GistModel |
|
40 | 40 | from rhodecode.model.auth_token import AuthTokenModel |
|
41 | 41 | |
|
42 | 42 | dn = os.path.dirname |
|
43 | 43 | FIXTURES = os.path.join(dn(dn(os.path.abspath(__file__))), 'tests', 'fixtures') |
|
44 | 44 | |
|
45 | 45 | |
|
46 | 46 | def error_function(*args, **kwargs): |
|
47 | 47 | raise Exception('Total Crash !') |
|
48 | 48 | |
|
49 | 49 | |
|
50 | 50 | class TestINI(object): |
|
51 | 51 | """ |
|
52 | 52 | Allows to create a new test.ini file as a copy of existing one with edited |
|
53 | 53 | data. Example usage:: |
|
54 | 54 | |
|
55 | 55 | with TestINI('test.ini', [{'section':{'key':val'}]) as new_test_ini_path: |
|
56 | 56 | print 'paster server %s' % new_test_ini |
|
57 | 57 | """ |
|
58 | 58 | |
|
59 | 59 | def __init__(self, ini_file_path, ini_params, new_file_prefix='DEFAULT', |
|
60 | 60 | destroy=True, dir=None): |
|
61 | 61 | self.ini_file_path = ini_file_path |
|
62 | 62 | self.ini_params = ini_params |
|
63 | 63 | self.new_path = None |
|
64 | 64 | self.new_path_prefix = new_file_prefix |
|
65 | 65 | self._destroy = destroy |
|
66 | 66 | self._dir = dir |
|
67 | 67 | |
|
68 | 68 | def __enter__(self): |
|
69 | 69 | return self.create() |
|
70 | 70 | |
|
71 | 71 | def __exit__(self, exc_type, exc_val, exc_tb): |
|
72 | 72 | self.destroy() |
|
73 | 73 | |
|
74 | 74 | def create(self): |
|
75 | 75 | config = configobj.ConfigObj( |
|
76 | 76 | self.ini_file_path, file_error=True, write_empty_values=True) |
|
77 | 77 | |
|
78 | 78 | for data in self.ini_params: |
|
79 | 79 | section, ini_params = data.items()[0] |
|
80 | 80 | for key, val in ini_params.items(): |
|
81 | 81 | config[section][key] = val |
|
82 | 82 | with tempfile.NamedTemporaryFile( |
|
83 | 83 | prefix=self.new_path_prefix, suffix='.ini', dir=self._dir, |
|
84 | 84 | delete=False) as new_ini_file: |
|
85 | 85 | config.write(new_ini_file) |
|
86 | 86 | self.new_path = new_ini_file.name |
|
87 | 87 | |
|
88 | 88 | return self.new_path |
|
89 | 89 | |
|
90 | 90 | def destroy(self): |
|
91 | 91 | if self._destroy: |
|
92 | 92 | os.remove(self.new_path) |
|
93 | 93 | |
|
94 | 94 | |
|
95 | 95 | class Fixture(object): |
|
96 | 96 | |
|
97 | 97 | def anon_access(self, status): |
|
98 | 98 | """ |
|
99 | 99 | Context process for disabling anonymous access. use like: |
|
100 | 100 | fixture = Fixture() |
|
101 | 101 | with fixture.anon_access(False): |
|
102 | 102 | #tests |
|
103 | 103 | |
|
104 | 104 | after this block anon access will be set to `not status` |
|
105 | 105 | """ |
|
106 | 106 | |
|
107 | 107 | class context(object): |
|
108 | 108 | def __enter__(self): |
|
109 | 109 | anon = User.get_default_user() |
|
110 | 110 | anon.active = status |
|
111 | 111 | Session().add(anon) |
|
112 | 112 | Session().commit() |
|
113 | 113 | time.sleep(1.5) # must sleep for cache (1s to expire) |
|
114 | 114 | |
|
115 | 115 | def __exit__(self, exc_type, exc_val, exc_tb): |
|
116 | 116 | anon = User.get_default_user() |
|
117 | 117 | anon.active = not status |
|
118 | 118 | Session().add(anon) |
|
119 | 119 | Session().commit() |
|
120 | 120 | |
|
121 | 121 | return context() |
|
122 | 122 | |
|
123 | 123 | def _get_repo_create_params(self, **custom): |
|
124 | 124 | defs = { |
|
125 | 125 | 'repo_name': None, |
|
126 | 126 | 'repo_type': 'hg', |
|
127 | 127 | 'clone_uri': '', |
|
128 | 128 | 'repo_group': '-1', |
|
129 | 129 | 'repo_description': 'DESC', |
|
130 | 130 | 'repo_private': False, |
|
131 | 131 | 'repo_landing_rev': 'rev:tip', |
|
132 | 132 | 'repo_copy_permissions': False, |
|
133 | 133 | 'repo_state': Repository.STATE_CREATED, |
|
134 | 134 | } |
|
135 | 135 | defs.update(custom) |
|
136 | 136 | if 'repo_name_full' not in custom: |
|
137 | 137 | defs.update({'repo_name_full': defs['repo_name']}) |
|
138 | 138 | |
|
139 | 139 | # fix the repo name if passed as repo_name_full |
|
140 | 140 | if defs['repo_name']: |
|
141 | 141 | defs['repo_name'] = defs['repo_name'].split('/')[-1] |
|
142 | 142 | |
|
143 | 143 | return defs |
|
144 | 144 | |
|
145 | 145 | def _get_group_create_params(self, **custom): |
|
146 | 146 | defs = { |
|
147 | 147 | 'group_name': None, |
|
148 | 148 | 'group_description': 'DESC', |
|
149 | 149 | 'perm_updates': [], |
|
150 | 150 | 'perm_additions': [], |
|
151 | 151 | 'perm_deletions': [], |
|
152 | 152 | 'group_parent_id': -1, |
|
153 | 153 | 'enable_locking': False, |
|
154 | 154 | 'recursive': False, |
|
155 | 155 | } |
|
156 | 156 | defs.update(custom) |
|
157 | 157 | |
|
158 | 158 | return defs |
|
159 | 159 | |
|
160 | 160 | def _get_user_create_params(self, name, **custom): |
|
161 | 161 | defs = { |
|
162 | 162 | 'username': name, |
|
163 | 163 | 'password': 'qweqwe', |
|
164 | 164 | 'email': '%s+test@rhodecode.org' % name, |
|
165 | 165 | 'firstname': 'TestUser', |
|
166 | 166 | 'lastname': 'Test', |
|
167 | 167 | 'active': True, |
|
168 | 168 | 'admin': False, |
|
169 | 169 | 'extern_type': 'rhodecode', |
|
170 | 170 | 'extern_name': None, |
|
171 | 171 | } |
|
172 | 172 | defs.update(custom) |
|
173 | 173 | |
|
174 | 174 | return defs |
|
175 | 175 | |
|
176 | 176 | def _get_user_group_create_params(self, name, **custom): |
|
177 | 177 | defs = { |
|
178 | 178 | 'users_group_name': name, |
|
179 | 179 | 'user_group_description': 'DESC', |
|
180 | 180 | 'users_group_active': True, |
|
181 | 181 | 'user_group_data': {}, |
|
182 | 182 | } |
|
183 | 183 | defs.update(custom) |
|
184 | 184 | |
|
185 | 185 | return defs |
|
186 | 186 | |
|
187 | 187 | def create_repo(self, name, **kwargs): |
|
188 | 188 | repo_group = kwargs.get('repo_group') |
|
189 | 189 | if isinstance(repo_group, RepoGroup): |
|
190 | 190 | kwargs['repo_group'] = repo_group.group_id |
|
191 | 191 | name = name.split(Repository.NAME_SEP)[-1] |
|
192 | 192 | name = Repository.NAME_SEP.join((repo_group.group_name, name)) |
|
193 | 193 | |
|
194 | 194 | if 'skip_if_exists' in kwargs: |
|
195 | 195 | del kwargs['skip_if_exists'] |
|
196 | 196 | r = Repository.get_by_repo_name(name) |
|
197 | 197 | if r: |
|
198 | 198 | return r |
|
199 | 199 | |
|
200 | 200 | form_data = self._get_repo_create_params(repo_name=name, **kwargs) |
|
201 | 201 | cur_user = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) |
|
202 | 202 | RepoModel().create(form_data, cur_user) |
|
203 | 203 | Session().commit() |
|
204 | 204 | repo = Repository.get_by_repo_name(name) |
|
205 | 205 | assert repo |
|
206 | 206 | return repo |
|
207 | 207 | |
|
208 | 208 | def create_fork(self, repo_to_fork, fork_name, **kwargs): |
|
209 | 209 | repo_to_fork = Repository.get_by_repo_name(repo_to_fork) |
|
210 | 210 | |
|
211 | 211 | form_data = self._get_repo_create_params(repo_name=fork_name, |
|
212 | 212 | fork_parent_id=repo_to_fork.repo_id, |
|
213 | 213 | repo_type=repo_to_fork.repo_type, |
|
214 | 214 | **kwargs) |
|
215 | 215 | #TODO: fix it !! |
|
216 | 216 | form_data['description'] = form_data['repo_description'] |
|
217 | 217 | form_data['private'] = form_data['repo_private'] |
|
218 | 218 | form_data['landing_rev'] = form_data['repo_landing_rev'] |
|
219 | 219 | |
|
220 | 220 | owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) |
|
221 | 221 | RepoModel().create_fork(form_data, cur_user=owner) |
|
222 | 222 | Session().commit() |
|
223 | 223 | r = Repository.get_by_repo_name(fork_name) |
|
224 | 224 | assert r |
|
225 | 225 | return r |
|
226 | 226 | |
|
227 | 227 | def destroy_repo(self, repo_name, **kwargs): |
|
228 | 228 | RepoModel().delete(repo_name, **kwargs) |
|
229 | 229 | Session().commit() |
|
230 | 230 | |
|
231 | 231 | def destroy_repo_on_filesystem(self, repo_name): |
|
232 | 232 | rm_path = os.path.join(RepoModel().repos_path, repo_name) |
|
233 | 233 | if os.path.isdir(rm_path): |
|
234 | 234 | shutil.rmtree(rm_path) |
|
235 | 235 | |
|
236 | 236 | def create_repo_group(self, name, **kwargs): |
|
237 | 237 | if 'skip_if_exists' in kwargs: |
|
238 | 238 | del kwargs['skip_if_exists'] |
|
239 | 239 | gr = RepoGroup.get_by_group_name(group_name=name) |
|
240 | 240 | if gr: |
|
241 | 241 | return gr |
|
242 | 242 | form_data = self._get_group_create_params(group_name=name, **kwargs) |
|
243 | 243 | owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) |
|
244 | 244 | gr = RepoGroupModel().create( |
|
245 | 245 | group_name=form_data['group_name'], |
|
246 | 246 | group_description=form_data['group_name'], |
|
247 | 247 | owner=owner) |
|
248 | 248 | Session().commit() |
|
249 | 249 | gr = RepoGroup.get_by_group_name(gr.group_name) |
|
250 | 250 | return gr |
|
251 | 251 | |
|
252 | 252 | def destroy_repo_group(self, repogroupid): |
|
253 | 253 | RepoGroupModel().delete(repogroupid) |
|
254 | 254 | Session().commit() |
|
255 | 255 | |
|
256 | 256 | def create_user(self, name, **kwargs): |
|
257 | 257 | if 'skip_if_exists' in kwargs: |
|
258 | 258 | del kwargs['skip_if_exists'] |
|
259 | 259 | user = User.get_by_username(name) |
|
260 | 260 | if user: |
|
261 | 261 | return user |
|
262 | 262 | form_data = self._get_user_create_params(name, **kwargs) |
|
263 | 263 | user = UserModel().create(form_data) |
|
264 | 264 | |
|
265 | 265 | # create token for user |
|
266 | 266 | AuthTokenModel().create( |
|
267 | 267 | user=user, description='TEST_USER_TOKEN') |
|
268 | 268 | |
|
269 | 269 | Session().commit() |
|
270 | 270 | user = User.get_by_username(user.username) |
|
271 | 271 | return user |
|
272 | 272 | |
|
273 | 273 | def destroy_user(self, userid): |
|
274 | 274 | UserModel().delete(userid) |
|
275 | 275 | Session().commit() |
|
276 | 276 | |
|
277 | 277 | def destroy_users(self, userid_iter): |
|
278 | 278 | for user_id in userid_iter: |
|
279 | 279 | if User.get_by_username(user_id): |
|
280 | 280 | UserModel().delete(user_id) |
|
281 | 281 | Session().commit() |
|
282 | 282 | |
|
283 | 283 | def create_user_group(self, name, **kwargs): |
|
284 | 284 | if 'skip_if_exists' in kwargs: |
|
285 | 285 | del kwargs['skip_if_exists'] |
|
286 | 286 | gr = UserGroup.get_by_group_name(group_name=name) |
|
287 | 287 | if gr: |
|
288 | 288 | return gr |
|
289 | # map active flag to the real attribute. For API consistency of fixtures | |
|
290 | if 'active' in kwargs: | |
|
291 | kwargs['users_group_active'] = kwargs['active'] | |
|
292 | del kwargs['active'] | |
|
289 | 293 | form_data = self._get_user_group_create_params(name, **kwargs) |
|
290 | 294 | owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) |
|
291 | 295 | user_group = UserGroupModel().create( |
|
292 | 296 | name=form_data['users_group_name'], |
|
293 | 297 | description=form_data['user_group_description'], |
|
294 | 298 | owner=owner, active=form_data['users_group_active'], |
|
295 | 299 | group_data=form_data['user_group_data']) |
|
296 | 300 | Session().commit() |
|
297 | 301 | user_group = UserGroup.get_by_group_name(user_group.users_group_name) |
|
298 | 302 | return user_group |
|
299 | 303 | |
|
300 | 304 | def destroy_user_group(self, usergroupid): |
|
301 | 305 | UserGroupModel().delete(user_group=usergroupid, force=True) |
|
302 | 306 | Session().commit() |
|
303 | 307 | |
|
304 | 308 | def create_gist(self, **kwargs): |
|
305 | 309 | form_data = { |
|
306 | 310 | 'description': 'new-gist', |
|
307 | 311 | 'owner': TEST_USER_ADMIN_LOGIN, |
|
308 | 312 | 'gist_type': GistModel.cls.GIST_PUBLIC, |
|
309 | 313 | 'lifetime': -1, |
|
310 | 314 | 'acl_level': Gist.ACL_LEVEL_PUBLIC, |
|
311 | 315 | 'gist_mapping': {'filename1.txt': {'content': 'hello world'},} |
|
312 | 316 | } |
|
313 | 317 | form_data.update(kwargs) |
|
314 | 318 | gist = GistModel().create( |
|
315 | 319 | description=form_data['description'], owner=form_data['owner'], |
|
316 | 320 | gist_mapping=form_data['gist_mapping'], gist_type=form_data['gist_type'], |
|
317 | 321 | lifetime=form_data['lifetime'], gist_acl_level=form_data['acl_level'] |
|
318 | 322 | ) |
|
319 | 323 | Session().commit() |
|
320 | 324 | return gist |
|
321 | 325 | |
|
322 | 326 | def destroy_gists(self, gistid=None): |
|
323 | 327 | for g in GistModel.cls.get_all(): |
|
324 | 328 | if gistid: |
|
325 | 329 | if gistid == g.gist_access_id: |
|
326 | 330 | GistModel().delete(g) |
|
327 | 331 | else: |
|
328 | 332 | GistModel().delete(g) |
|
329 | 333 | Session().commit() |
|
330 | 334 | |
|
331 | 335 | def load_resource(self, resource_name, strip=False): |
|
332 | 336 | with open(os.path.join(FIXTURES, resource_name)) as f: |
|
333 | 337 | source = f.read() |
|
334 | 338 | if strip: |
|
335 | 339 | source = source.strip() |
|
336 | 340 | |
|
337 | 341 | return source |
@@ -1,390 +1,319 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import json |
|
22 | 22 | |
|
23 | 23 | from mock import patch |
|
24 | 24 | import pytest |
|
25 | 25 | from pylons import tmpl_context as c |
|
26 | 26 | |
|
27 | 27 | import rhodecode |
|
28 | 28 | from rhodecode.lib.utils import map_groups |
|
29 | 29 | from rhodecode.model.db import Repository, User, RepoGroup |
|
30 | 30 | from rhodecode.model.meta import Session |
|
31 | 31 | from rhodecode.model.repo import RepoModel |
|
32 | 32 | from rhodecode.model.repo_group import RepoGroupModel |
|
33 | 33 | from rhodecode.model.settings import SettingsModel |
|
34 | 34 | from rhodecode.tests import TestController, url, TEST_USER_ADMIN_LOGIN |
|
35 | 35 | from rhodecode.tests.fixture import Fixture |
|
36 | 36 | |
|
37 | 37 | |
|
38 | 38 | fixture = Fixture() |
|
39 | 39 | |
|
40 | 40 | |
|
41 | 41 | class TestHomeController(TestController): |
|
42 | 42 | |
|
43 | 43 | def test_index(self): |
|
44 | 44 | self.log_user() |
|
45 | 45 | response = self.app.get(url(controller='home', action='index')) |
|
46 | 46 | # if global permission is set |
|
47 | 47 | response.mustcontain('Add Repository') |
|
48 | 48 | |
|
49 | 49 | # search for objects inside the JavaScript JSON |
|
50 | 50 | for repo in Repository.getAll(): |
|
51 | 51 | response.mustcontain('"name_raw": "%s"' % repo.repo_name) |
|
52 | 52 | |
|
53 | 53 | def test_index_contains_statics_with_ver(self): |
|
54 | 54 | self.log_user() |
|
55 | 55 | response = self.app.get(url(controller='home', action='index')) |
|
56 | 56 | |
|
57 | 57 | rhodecode_version_hash = c.rhodecode_version_hash |
|
58 | 58 | response.mustcontain('style.css?ver={0}'.format(rhodecode_version_hash)) |
|
59 | 59 | response.mustcontain('rhodecode-components.js?ver={0}'.format(rhodecode_version_hash)) |
|
60 | 60 | |
|
61 | 61 | def test_index_contains_backend_specific_details(self, backend): |
|
62 | 62 | self.log_user() |
|
63 | 63 | response = self.app.get(url(controller='home', action='index')) |
|
64 | 64 | tip = backend.repo.get_commit().raw_id |
|
65 | 65 | |
|
66 | 66 | # html in javascript variable: |
|
67 | 67 | response.mustcontain(r'<i class=\"icon-%s\"' % (backend.alias, )) |
|
68 | 68 | response.mustcontain(r'href=\"/%s\"' % (backend.repo_name, )) |
|
69 | 69 | |
|
70 | 70 | response.mustcontain("""/%s/changeset/%s""" % (backend.repo_name, tip)) |
|
71 | 71 | response.mustcontain("""Added a symlink""") |
|
72 | 72 | |
|
73 | 73 | def test_index_with_anonymous_access_disabled(self): |
|
74 | 74 | with fixture.anon_access(False): |
|
75 | 75 | response = self.app.get(url(controller='home', action='index'), |
|
76 | 76 | status=302) |
|
77 | 77 | assert 'login' in response.location |
|
78 | 78 | |
|
79 | 79 | def test_index_page_on_groups(self, autologin_user, repo_group): |
|
80 | 80 | response = self.app.get(url('repo_group_home', group_name='gr1')) |
|
81 | 81 | response.mustcontain("gr1/repo_in_group") |
|
82 | 82 | |
|
83 | 83 | def test_index_page_on_group_with_trailing_slash( |
|
84 | 84 | self, autologin_user, repo_group): |
|
85 | 85 | response = self.app.get(url('repo_group_home', group_name='gr1') + '/') |
|
86 | 86 | response.mustcontain("gr1/repo_in_group") |
|
87 | 87 | |
|
88 | 88 | @pytest.fixture(scope='class') |
|
89 | 89 | def repo_group(self, request): |
|
90 | 90 | gr = fixture.create_repo_group('gr1') |
|
91 | 91 | fixture.create_repo(name='gr1/repo_in_group', repo_group=gr) |
|
92 | 92 | |
|
93 | 93 | @request.addfinalizer |
|
94 | 94 | def cleanup(): |
|
95 | 95 | RepoModel().delete('gr1/repo_in_group') |
|
96 | 96 | RepoGroupModel().delete(repo_group='gr1', force_delete=True) |
|
97 | 97 | Session().commit() |
|
98 | 98 | |
|
99 | 99 | def test_index_with_name_with_tags(self, autologin_user): |
|
100 | 100 | user = User.get_by_username('test_admin') |
|
101 | 101 | user.name = '<img src="/image1" onload="alert(\'Hello, World!\');">' |
|
102 | 102 | user.lastname = ( |
|
103 | 103 | '<img src="/image2" onload="alert(\'Hello, World!\');">') |
|
104 | 104 | Session().add(user) |
|
105 | 105 | Session().commit() |
|
106 | 106 | |
|
107 | 107 | response = self.app.get(url(controller='home', action='index')) |
|
108 | 108 | response.mustcontain( |
|
109 | 109 | '<img src="/image1" onload="' |
|
110 | 110 | 'alert('Hello, World!');">') |
|
111 | 111 | response.mustcontain( |
|
112 | 112 | '<img src="/image2" onload="' |
|
113 | 113 | 'alert('Hello, World!');">') |
|
114 | 114 | |
|
115 | 115 | @pytest.mark.parametrize("name, state", [ |
|
116 | 116 | ('Disabled', False), |
|
117 | 117 | ('Enabled', True), |
|
118 | 118 | ]) |
|
119 | 119 | def test_index_show_version(self, autologin_user, name, state): |
|
120 | 120 | version_string = 'RhodeCode Enterprise %s' % rhodecode.__version__ |
|
121 | 121 | |
|
122 | 122 | sett = SettingsModel().create_or_update_setting( |
|
123 | 123 | 'show_version', state, 'bool') |
|
124 | 124 | Session().add(sett) |
|
125 | 125 | Session().commit() |
|
126 | 126 | SettingsModel().invalidate_settings_cache() |
|
127 | 127 | |
|
128 | 128 | response = self.app.get(url(controller='home', action='index')) |
|
129 | 129 | if state is True: |
|
130 | 130 | response.mustcontain(version_string) |
|
131 | 131 | if state is False: |
|
132 | 132 | response.mustcontain(no=[version_string]) |
|
133 | 133 | |
|
134 | 134 | |
|
135 | class TestUserAutocompleteData(TestController): | |
|
136 | def test_returns_list_of_users(self, user_util): | |
|
137 | self.log_user() | |
|
138 | user = user_util.create_user(is_active=True) | |
|
139 | user_name = user.username | |
|
140 | response = self.app.get( | |
|
141 | url(controller='home', action='user_autocomplete_data'), | |
|
142 | headers={'X-REQUESTED-WITH': 'XMLHttpRequest', }, status=200) | |
|
143 | result = json.loads(response.body) | |
|
144 | values = [suggestion['value'] for suggestion in result['suggestions']] | |
|
145 | assert user_name in values | |
|
146 | ||
|
147 | def test_returns_inactive_users_when_active_flag_sent(self, user_util): | |
|
148 | self.log_user() | |
|
149 | user = user_util.create_user(is_active=False) | |
|
150 | user_name = user.username | |
|
151 | response = self.app.get( | |
|
152 | url(controller='home', action='user_autocomplete_data', | |
|
153 | user_groups='true', active='0'), | |
|
154 | headers={'X-REQUESTED-WITH': 'XMLHttpRequest', }, status=200) | |
|
155 | result = json.loads(response.body) | |
|
156 | values = [suggestion['value'] for suggestion in result['suggestions']] | |
|
157 | assert user_name in values | |
|
158 | ||
|
159 | def test_returns_groups_when_user_groups_sent(self, user_util): | |
|
160 | self.log_user() | |
|
161 | group = user_util.create_user_group(user_groups_active=True) | |
|
162 | group_name = group.users_group_name | |
|
163 | response = self.app.get( | |
|
164 | url(controller='home', action='user_autocomplete_data', | |
|
165 | user_groups='true'), | |
|
166 | headers={'X-REQUESTED-WITH': 'XMLHttpRequest', }, status=200) | |
|
167 | result = json.loads(response.body) | |
|
168 | values = [suggestion['value'] for suggestion in result['suggestions']] | |
|
169 | assert group_name in values | |
|
170 | ||
|
171 | def test_result_is_limited_when_query_is_sent(self): | |
|
172 | self.log_user() | |
|
173 | fake_result = [ | |
|
174 | { | |
|
175 | 'first_name': 'John', | |
|
176 | 'value_display': 'hello{} (John Smith)'.format(i), | |
|
177 | 'icon_link': '/images/user14.png', | |
|
178 | 'value': 'hello{}'.format(i), | |
|
179 | 'last_name': 'Smith', | |
|
180 | 'username': 'hello{}'.format(i), | |
|
181 | 'id': i, | |
|
182 | 'value_type': u'user' | |
|
183 | } | |
|
184 | for i in range(10) | |
|
185 | ] | |
|
186 | users_patcher = patch.object( | |
|
187 | RepoModel, 'get_users', return_value=fake_result) | |
|
188 | groups_patcher = patch.object( | |
|
189 | RepoModel, 'get_user_groups', return_value=fake_result) | |
|
190 | ||
|
191 | query = 'hello' | |
|
192 | with users_patcher as users_mock, groups_patcher as groups_mock: | |
|
193 | response = self.app.get( | |
|
194 | url(controller='home', action='user_autocomplete_data', | |
|
195 | user_groups='true', query=query), | |
|
196 | headers={'X-REQUESTED-WITH': 'XMLHttpRequest', }, status=200) | |
|
197 | ||
|
198 | result = json.loads(response.body) | |
|
199 | users_mock.assert_called_once_with( | |
|
200 | name_contains=query, only_active=True) | |
|
201 | groups_mock.assert_called_once_with( | |
|
202 | name_contains=query, only_active=True) | |
|
203 | assert len(result['suggestions']) == 20 | |
|
204 | ||
|
205 | ||
|
206 | 135 | def assert_and_get_content(result): |
|
207 | 136 | repos = [] |
|
208 | 137 | groups = [] |
|
209 | 138 | commits = [] |
|
210 | 139 | for data in result: |
|
211 | 140 | for data_item in data['children']: |
|
212 | 141 | assert data_item['id'] |
|
213 | 142 | assert data_item['text'] |
|
214 | 143 | assert data_item['url'] |
|
215 | 144 | if data_item['type'] == 'repo': |
|
216 | 145 | repos.append(data_item) |
|
217 | 146 | elif data_item['type'] == 'group': |
|
218 | 147 | groups.append(data_item) |
|
219 | 148 | elif data_item['type'] == 'commit': |
|
220 | 149 | commits.append(data_item) |
|
221 | 150 | else: |
|
222 | 151 | raise Exception('invalid type %s' % data_item['type']) |
|
223 | 152 | |
|
224 | 153 | return repos, groups, commits |
|
225 | 154 | |
|
226 | 155 | |
|
227 | 156 | class TestGotoSwitcherData(TestController): |
|
228 | 157 | required_repos_with_groups = [ |
|
229 | 158 | 'abc', |
|
230 | 159 | 'abc-fork', |
|
231 | 160 | 'forks/abcd', |
|
232 | 161 | 'abcd', |
|
233 | 162 | 'abcde', |
|
234 | 163 | 'a/abc', |
|
235 | 164 | 'aa/abc', |
|
236 | 165 | 'aaa/abc', |
|
237 | 166 | 'aaaa/abc', |
|
238 | 167 | 'repos_abc/aaa/abc', |
|
239 | 168 | 'abc_repos/abc', |
|
240 | 169 | 'abc_repos/abcd', |
|
241 | 170 | 'xxx/xyz', |
|
242 | 171 | 'forked-abc/a/abc' |
|
243 | 172 | ] |
|
244 | 173 | |
|
245 | 174 | @pytest.fixture(autouse=True, scope='class') |
|
246 | 175 | def prepare(self, request, pylonsapp): |
|
247 | 176 | for repo_and_group in self.required_repos_with_groups: |
|
248 | 177 | # create structure of groups and return the last group |
|
249 | 178 | |
|
250 | 179 | repo_group = map_groups(repo_and_group) |
|
251 | 180 | |
|
252 | 181 | RepoModel()._create_repo( |
|
253 | 182 | repo_and_group, 'hg', 'test-ac', TEST_USER_ADMIN_LOGIN, |
|
254 | 183 | repo_group=getattr(repo_group, 'group_id', None)) |
|
255 | 184 | |
|
256 | 185 | Session().commit() |
|
257 | 186 | |
|
258 | 187 | request.addfinalizer(self.cleanup) |
|
259 | 188 | |
|
260 | 189 | def cleanup(self): |
|
261 | 190 | # first delete all repos |
|
262 | 191 | for repo_and_groups in self.required_repos_with_groups: |
|
263 | 192 | repo = Repository.get_by_repo_name(repo_and_groups) |
|
264 | 193 | if repo: |
|
265 | 194 | RepoModel().delete(repo) |
|
266 | 195 | Session().commit() |
|
267 | 196 | |
|
268 | 197 | # then delete all empty groups |
|
269 | 198 | for repo_and_groups in self.required_repos_with_groups: |
|
270 | 199 | if '/' in repo_and_groups: |
|
271 | 200 | r_group = repo_and_groups.rsplit('/', 1)[0] |
|
272 | 201 | repo_group = RepoGroup.get_by_group_name(r_group) |
|
273 | 202 | if not repo_group: |
|
274 | 203 | continue |
|
275 | 204 | parents = repo_group.parents |
|
276 | 205 | RepoGroupModel().delete(repo_group, force_delete=True) |
|
277 | 206 | Session().commit() |
|
278 | 207 | |
|
279 | 208 | for el in reversed(parents): |
|
280 | 209 | RepoGroupModel().delete(el, force_delete=True) |
|
281 | 210 | Session().commit() |
|
282 | 211 | |
|
283 | 212 | def test_returns_list_of_repos_and_groups(self): |
|
284 | 213 | self.log_user() |
|
285 | 214 | |
|
286 | 215 | response = self.app.get( |
|
287 | 216 | url(controller='home', action='goto_switcher_data'), |
|
288 | 217 | headers={'X-REQUESTED-WITH': 'XMLHttpRequest', }, status=200) |
|
289 | 218 | result = json.loads(response.body)['results'] |
|
290 | 219 | |
|
291 | 220 | repos, groups, commits = assert_and_get_content(result) |
|
292 | 221 | |
|
293 | 222 | assert len(repos) == len(Repository.get_all()) |
|
294 | 223 | assert len(groups) == len(RepoGroup.get_all()) |
|
295 | 224 | assert len(commits) == 0 |
|
296 | 225 | |
|
297 | 226 | def test_returns_list_of_repos_and_groups_filtered(self): |
|
298 | 227 | self.log_user() |
|
299 | 228 | |
|
300 | 229 | response = self.app.get( |
|
301 | 230 | url(controller='home', action='goto_switcher_data'), |
|
302 | 231 | headers={'X-REQUESTED-WITH': 'XMLHttpRequest', }, |
|
303 | 232 | params={'query': 'abc'}, status=200) |
|
304 | 233 | result = json.loads(response.body)['results'] |
|
305 | 234 | |
|
306 | 235 | repos, groups, commits = assert_and_get_content(result) |
|
307 | 236 | |
|
308 | 237 | assert len(repos) == 13 |
|
309 | 238 | assert len(groups) == 5 |
|
310 | 239 | assert len(commits) == 0 |
|
311 | 240 | |
|
312 | 241 | def test_returns_list_of_properly_sorted_and_filtered(self): |
|
313 | 242 | self.log_user() |
|
314 | 243 | |
|
315 | 244 | response = self.app.get( |
|
316 | 245 | url(controller='home', action='goto_switcher_data'), |
|
317 | 246 | headers={'X-REQUESTED-WITH': 'XMLHttpRequest', }, |
|
318 | 247 | params={'query': 'abc'}, status=200) |
|
319 | 248 | result = json.loads(response.body)['results'] |
|
320 | 249 | |
|
321 | 250 | repos, groups, commits = assert_and_get_content(result) |
|
322 | 251 | |
|
323 | 252 | test_repos = [x['text'] for x in repos[:4]] |
|
324 | 253 | assert ['abc', 'abcd', 'a/abc', 'abcde'] == test_repos |
|
325 | 254 | |
|
326 | 255 | test_groups = [x['text'] for x in groups[:4]] |
|
327 | 256 | assert ['abc_repos', 'repos_abc', |
|
328 | 257 | 'forked-abc', 'forked-abc/a'] == test_groups |
|
329 | 258 | |
|
330 | 259 | |
|
331 | 260 | class TestRepoListData(TestController): |
|
332 | 261 | def test_returns_list_of_repos_and_groups(self, user_util): |
|
333 | 262 | self.log_user() |
|
334 | 263 | |
|
335 | 264 | response = self.app.get( |
|
336 | 265 | url(controller='home', action='repo_list_data'), |
|
337 | 266 | headers={'X-REQUESTED-WITH': 'XMLHttpRequest', }, status=200) |
|
338 | 267 | result = json.loads(response.body)['results'] |
|
339 | 268 | |
|
340 | 269 | repos, groups, commits = assert_and_get_content(result) |
|
341 | 270 | |
|
342 | 271 | assert len(repos) == len(Repository.get_all()) |
|
343 | 272 | assert len(groups) == 0 |
|
344 | 273 | assert len(commits) == 0 |
|
345 | 274 | |
|
346 | 275 | def test_returns_list_of_repos_and_groups_filtered(self): |
|
347 | 276 | self.log_user() |
|
348 | 277 | |
|
349 | 278 | response = self.app.get( |
|
350 | 279 | url(controller='home', action='repo_list_data'), |
|
351 | 280 | headers={'X-REQUESTED-WITH': 'XMLHttpRequest', }, |
|
352 | 281 | params={'query': 'vcs_test_git'}, status=200) |
|
353 | 282 | result = json.loads(response.body)['results'] |
|
354 | 283 | |
|
355 | 284 | repos, groups, commits = assert_and_get_content(result) |
|
356 | 285 | |
|
357 | 286 | assert len(repos) == len(Repository.query().filter( |
|
358 | 287 | Repository.repo_name.ilike('%vcs_test_git%')).all()) |
|
359 | 288 | assert len(groups) == 0 |
|
360 | 289 | assert len(commits) == 0 |
|
361 | 290 | |
|
362 | 291 | def test_returns_list_of_repos_and_groups_filtered_with_type(self): |
|
363 | 292 | self.log_user() |
|
364 | 293 | |
|
365 | 294 | response = self.app.get( |
|
366 | 295 | url(controller='home', action='repo_list_data'), |
|
367 | 296 | headers={'X-REQUESTED-WITH': 'XMLHttpRequest', }, |
|
368 | 297 | params={'query': 'vcs_test_git', 'repo_type': 'git'}, status=200) |
|
369 | 298 | result = json.loads(response.body)['results'] |
|
370 | 299 | |
|
371 | 300 | repos, groups, commits = assert_and_get_content(result) |
|
372 | 301 | |
|
373 | 302 | assert len(repos) == len(Repository.query().filter( |
|
374 | 303 | Repository.repo_name.ilike('%vcs_test_git%')).all()) |
|
375 | 304 | assert len(groups) == 0 |
|
376 | 305 | assert len(commits) == 0 |
|
377 | 306 | |
|
378 | 307 | def test_returns_list_of_repos_non_ascii_query(self): |
|
379 | 308 | self.log_user() |
|
380 | 309 | response = self.app.get( |
|
381 | 310 | url(controller='home', action='repo_list_data'), |
|
382 | 311 | headers={'X-REQUESTED-WITH': 'XMLHttpRequest', }, |
|
383 | 312 | params={'query': 'ć_vcs_test_ą', 'repo_type': 'git'}, status=200) |
|
384 | 313 | result = json.loads(response.body)['results'] |
|
385 | 314 | |
|
386 | 315 | repos, groups, commits = assert_and_get_content(result) |
|
387 | 316 | |
|
388 | 317 | assert len(repos) == 0 |
|
389 | 318 | assert len(groups) == 0 |
|
390 | 319 | assert len(commits) == 0 |
General Comments 0
You need to be logged in to leave comments.
Login now