##// END OF EJS Templates
Merge branch default into stable
marcink -
r168:40e6b177 merge stable
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -0,0 +1,55 b''
1 |RCE| 4.1.0 |RNS|
2 -----------------
3
4 Release Date
5 ^^^^^^^^^^^^
6
7 - 2016-06-XX
8
9 General
10 ^^^^^^^
11
12 - Migrated more views to Pyramid. Those now include login, social plugins, search
13 - Started Implementing Pyramid Events system in exchange to rcextensions callbacks
14 - JS routes assets are now generated in development mode automatically
15 - ini: Add fallback authentication plugin setting. In case only one
16 authentication backend is enabled users can now enable fallback auth if
17 they cannot log-in due to external servers being down
18 - Bumped Mercurial to 3.8.3 version
19 - Bumped RhodeCode Tools to 0.8.3 version
20
21 New Features
22 ^^^^^^^^^^^^
23
24 - search: add syntax highlighting, line numbers and line context to file
25 content search results
26 - Go To switcher now searches commit hashes as well
27 - Token based authentication is now in CE edition as well
28 - User groups: added autocomplete widget to be able to select members of
29 other group as part of current group.
30
31 Security
32 ^^^^^^^^
33
34 - Added new action loggers for actions like adding/revoking permissions.
35 - permissions: show origin of permissions in permissions summary. Allows users
36 to see where and how permissions are inherited
37
38 Performance
39 ^^^^^^^^^^^
40
41
42
43 Fixes
44 ^^^^^
45
46 - api: gracefully handle errors on repos that are damaged or missing
47 from filesystem.
48 - logging: log the original error when a merge failure occurs
49 - #3965 Cannot change the owner of a user's group by using the API
50 - database is now initialized inside pyramid context
51 - fixed wrong check on LDAP plugin about missing ldap server
52 - Bring back multi-threaded workers to gunicorn for backward compatibility with
53 previous RhodeCode versions
54 - Commit dates are now properly handled as UTC. This fixes some issues
55 with displaying age of commits No newline at end of file
@@ -0,0 +1,225 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
21 import colander
22 import logging
23
24 from sqlalchemy.ext.hybrid import hybrid_property
25
26 from rhodecode.authentication.base import RhodeCodeExternalAuthPlugin
27 from rhodecode.authentication.schema import AuthnPluginSettingsSchemaBase
28 from rhodecode.authentication.routes import AuthnPluginResourceBase
29 from rhodecode.lib.colander_utils import strip_whitespace
30 from rhodecode.lib.utils2 import str2bool, safe_unicode
31 from rhodecode.model.db import User
32 from rhodecode.translation import _
33
34
35 log = logging.getLogger(__name__)
36
37
38 def plugin_factory(plugin_id, *args, **kwds):
39 """
40 Factory function that is called during plugin discovery.
41 It returns the plugin instance.
42 """
43 plugin = RhodeCodeAuthPlugin(plugin_id)
44 return plugin
45
46
47 class HeadersAuthnResource(AuthnPluginResourceBase):
48 pass
49
50
51 class HeadersSettingsSchema(AuthnPluginSettingsSchemaBase):
52 header = colander.SchemaNode(
53 colander.String(),
54 default='REMOTE_USER',
55 description=_('Header to extract the user from'),
56 preparer=strip_whitespace,
57 title=_('Header'),
58 widget='string')
59 fallback_header = colander.SchemaNode(
60 colander.String(),
61 default='HTTP_X_FORWARDED_USER',
62 description=_('Header to extract the user from when main one fails'),
63 preparer=strip_whitespace,
64 title=_('Fallback header'),
65 widget='string')
66 clean_username = colander.SchemaNode(
67 colander.Boolean(),
68 default=True,
69 description=_('Perform cleaning of user, if passed user has @ in '
70 'username then first part before @ is taken. '
71 'If there\'s \\ in the username only the part after '
72 ' \\ is taken'),
73 missing=False,
74 title=_('Clean username'),
75 widget='bool')
76
77
78 class RhodeCodeAuthPlugin(RhodeCodeExternalAuthPlugin):
79
80 def includeme(self, config):
81 config.add_authn_plugin(self)
82 config.add_authn_resource(self.get_id(), HeadersAuthnResource(self))
83 config.add_view(
84 'rhodecode.authentication.views.AuthnPluginViewBase',
85 attr='settings_get',
86 renderer='rhodecode:templates/admin/auth/plugin_settings.html',
87 request_method='GET',
88 route_name='auth_home',
89 context=HeadersAuthnResource)
90 config.add_view(
91 'rhodecode.authentication.views.AuthnPluginViewBase',
92 attr='settings_post',
93 renderer='rhodecode:templates/admin/auth/plugin_settings.html',
94 request_method='POST',
95 route_name='auth_home',
96 context=HeadersAuthnResource)
97
98 def get_display_name(self):
99 return _('Headers')
100
101 def get_settings_schema(self):
102 return HeadersSettingsSchema()
103
104 @hybrid_property
105 def name(self):
106 return 'headers'
107
108 @property
109 def is_headers_auth(self):
110 return True
111
112 def use_fake_password(self):
113 return True
114
115 def user_activation_state(self):
116 def_user_perms = User.get_default_user().AuthUser.permissions['global']
117 return 'hg.extern_activate.auto' in def_user_perms
118
119 def _clean_username(self, username):
120 # Removing realm and domain from username
121 username = username.split('@')[0]
122 username = username.rsplit('\\')[-1]
123 return username
124
125 def _get_username(self, environ, settings):
126 username = None
127 environ = environ or {}
128 if not environ:
129 log.debug('got empty environ: %s' % environ)
130
131 settings = settings or {}
132 if settings.get('header'):
133 header = settings.get('header')
134 username = environ.get(header)
135 log.debug('extracted %s:%s' % (header, username))
136
137 # fallback mode
138 if not username and settings.get('fallback_header'):
139 header = settings.get('fallback_header')
140 username = environ.get(header)
141 log.debug('extracted %s:%s' % (header, username))
142
143 if username and str2bool(settings.get('clean_username')):
144 log.debug('Received username `%s` from headers' % username)
145 username = self._clean_username(username)
146 log.debug('New cleanup user is:%s' % username)
147 return username
148
149 def get_user(self, username=None, **kwargs):
150 """
151 Helper method for user fetching in plugins, by default it's using
152 simple fetch by username, but this method can be custimized in plugins
153 eg. headers auth plugin to fetch user by environ params
154 :param username: username if given to fetch
155 :param kwargs: extra arguments needed for user fetching.
156 """
157 environ = kwargs.get('environ') or {}
158 settings = kwargs.get('settings') or {}
159 username = self._get_username(environ, settings)
160 # we got the username, so use default method now
161 return super(RhodeCodeAuthPlugin, self).get_user(username)
162
163 def auth(self, userobj, username, password, settings, **kwargs):
164 """
165 Get's the headers_auth username (or email). It tries to get username
166 from REMOTE_USER if this plugin is enabled, if that fails
167 it tries to get username from HTTP_X_FORWARDED_USER if fallback header
168 is set. clean_username extracts the username from this data if it's
169 having @ in it.
170 Return None on failure. On success, return a dictionary of the form:
171
172 see: RhodeCodeAuthPluginBase.auth_func_attrs
173
174 :param userobj:
175 :param username:
176 :param password:
177 :param settings:
178 :param kwargs:
179 """
180 environ = kwargs.get('environ')
181 if not environ:
182 log.debug('Empty environ data skipping...')
183 return None
184
185 if not userobj:
186 userobj = self.get_user('', environ=environ, settings=settings)
187
188 # we don't care passed username/password for headers auth plugins.
189 # only way to log in is using environ
190 username = None
191 if userobj:
192 username = getattr(userobj, 'username')
193
194 if not username:
195 # we don't have any objects in DB user doesn't exist extract
196 # username from environ based on the settings
197 username = self._get_username(environ, settings)
198
199 # if cannot fetch username, it's a no-go for this plugin to proceed
200 if not username:
201 return None
202
203 # old attrs fetched from RhodeCode database
204 admin = getattr(userobj, 'admin', False)
205 active = getattr(userobj, 'active', True)
206 email = getattr(userobj, 'email', '')
207 firstname = getattr(userobj, 'firstname', '')
208 lastname = getattr(userobj, 'lastname', '')
209 extern_type = getattr(userobj, 'extern_type', '')
210
211 user_attrs = {
212 'username': username,
213 'firstname': safe_unicode(firstname or username),
214 'lastname': safe_unicode(lastname or ''),
215 'groups': [],
216 'email': email or '',
217 'admin': admin or False,
218 'active': active,
219 'active_from_extern': True,
220 'extern_name': username,
221 'extern_type': extern_type,
222 }
223
224 log.info('user `%s` authenticated correctly' % user_attrs['username'])
225 return user_attrs
@@ -0,0 +1,136 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2016-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
21 """
22 RhodeCode authentication token plugin for built in internal auth
23 """
24
25 import logging
26
27 from sqlalchemy.ext.hybrid import hybrid_property
28
29 from rhodecode.translation import _
30 from rhodecode.authentication.base import RhodeCodeAuthPluginBase, VCS_TYPE
31 from rhodecode.authentication.routes import AuthnPluginResourceBase
32 from rhodecode.model.db import User, UserApiKeys
33
34
35 log = logging.getLogger(__name__)
36
37
38 def plugin_factory(plugin_id, *args, **kwds):
39 plugin = RhodeCodeAuthPlugin(plugin_id)
40 return plugin
41
42
43 class RhodecodeAuthnResource(AuthnPluginResourceBase):
44 pass
45
46
47 class RhodeCodeAuthPlugin(RhodeCodeAuthPluginBase):
48 """
49 Enables usage of authentication tokens for vcs operations.
50 """
51
52 def includeme(self, config):
53 config.add_authn_plugin(self)
54 config.add_authn_resource(self.get_id(), RhodecodeAuthnResource(self))
55 config.add_view(
56 'rhodecode.authentication.views.AuthnPluginViewBase',
57 attr='settings_get',
58 renderer='rhodecode:templates/admin/auth/plugin_settings.html',
59 request_method='GET',
60 route_name='auth_home',
61 context=RhodecodeAuthnResource)
62 config.add_view(
63 'rhodecode.authentication.views.AuthnPluginViewBase',
64 attr='settings_post',
65 renderer='rhodecode:templates/admin/auth/plugin_settings.html',
66 request_method='POST',
67 route_name='auth_home',
68 context=RhodecodeAuthnResource)
69
70 def get_display_name(self):
71 return _('Rhodecode Token Auth')
72
73 @hybrid_property
74 def name(self):
75 return "authtoken"
76
77 def user_activation_state(self):
78 def_user_perms = User.get_default_user().AuthUser.permissions['global']
79 return 'hg.register.auto_activate' in def_user_perms
80
81 def allows_authentication_from(
82 self, user, allows_non_existing_user=True,
83 allowed_auth_plugins=None, allowed_auth_sources=None):
84 """
85 Custom method for this auth that doesn't accept empty users. And also
86 allows rhodecode and authtoken extern_type to auth with this. But only
87 via vcs mode
88 """
89 # only this and rhodecode plugins can use this type
90 from rhodecode.authentication.plugins import auth_rhodecode
91 allowed_auth_plugins = [
92 self.name, auth_rhodecode.RhodeCodeAuthPlugin.name]
93 # only for vcs operations
94 allowed_auth_sources = [VCS_TYPE]
95
96 return super(RhodeCodeAuthPlugin, self).allows_authentication_from(
97 user, allows_non_existing_user=False,
98 allowed_auth_plugins=allowed_auth_plugins,
99 allowed_auth_sources=allowed_auth_sources)
100
101 def auth(self, userobj, username, password, settings, **kwargs):
102 if not userobj:
103 log.debug('userobj was:%s skipping' % (userobj, ))
104 return None
105
106 user_attrs = {
107 "username": userobj.username,
108 "firstname": userobj.firstname,
109 "lastname": userobj.lastname,
110 "groups": [],
111 "email": userobj.email,
112 "admin": userobj.admin,
113 "active": userobj.active,
114 "active_from_extern": userobj.active,
115 "extern_name": userobj.user_id,
116 "extern_type": userobj.extern_type,
117 }
118
119 log.debug('Authenticating user with args %s', user_attrs)
120 if userobj.active:
121 role = UserApiKeys.ROLE_VCS
122 active_tokens = [x.api_key for x in
123 User.extra_valid_auth_tokens(userobj, role=role)]
124 if userobj.username == username and password in active_tokens:
125 log.info(
126 'user `%s` successfully authenticated via %s',
127 user_attrs['username'], self.name)
128 return user_attrs
129 log.error(
130 'user `%s` failed to authenticate via %s, reason: bad or '
131 'inactive token.', username, self.name)
132 else:
133 log.warning(
134 'user `%s` failed to authenticate via %s, reason: account not '
135 'active.', username, self.name)
136 return None
@@ -0,0 +1,42 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
21 def generate_jsroutes_content(jsroutes):
22 statements = []
23 for url_name, url, fields in jsroutes:
24 statements.append(
25 "pyroutes.register('%s', '%s', %s);" % (url_name, url, fields))
26 return u'''
27 /******************************************************************************
28 * *
29 * DO NOT CHANGE THIS FILE MANUALLY *
30 * *
31 * *
32 * This file is automatically generated when the app starts up. *
33 * *
34 * To add a route here pass jsroute=True to the route definition in the app *
35 * *
36 ******************************************************************************/
37 function registerRCRoutes() {
38 // routes registration
39 %s
40 }
41 ''' % '\n '.join(statements)
42
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100755
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,6 +1,6 b''
1 1 [bumpversion]
2 current_version = 4.0.1
2 current_version = 4.1.0
3 3 message = release: Bump version {current_version} to {new_version}
4 4
5 5 [bumpversion:file:rhodecode/VERSION]
6 6
@@ -1,137 +1,138 b''
1 1 module.exports = function(grunt) {
2 2 grunt.initConfig({
3 3
4 4 dirs: {
5 5 css: "rhodecode/public/css",
6 6 js: {
7 7 "src": "rhodecode/public/js/src",
8 8 "dest": "rhodecode/public/js"
9 9 }
10 10 },
11 11
12 12 concat: {
13 13 dist: {
14 14 src: [
15 15 // Base libraries
16 16 '<%= dirs.js.src %>/jquery-1.11.1.min.js',
17 17 '<%= dirs.js.src %>/logging.js',
18 18 '<%= dirs.js.src %>/bootstrap.js',
19 19 '<%= dirs.js.src %>/mousetrap.js',
20 20 '<%= dirs.js.src %>/moment.js',
21 21 '<%= dirs.js.src %>/appenlight-client-0.4.1.min.js',
22 22
23 23 // Plugins
24 24 '<%= dirs.js.src %>/plugins/jquery.pjax.js',
25 25 '<%= dirs.js.src %>/plugins/jquery.dataTables.js',
26 26 '<%= dirs.js.src %>/plugins/flavoured_checkbox.js',
27 27 '<%= dirs.js.src %>/plugins/jquery.auto-grow-input.js',
28 28 '<%= dirs.js.src %>/plugins/jquery.autocomplete.js',
29 29 '<%= dirs.js.src %>/plugins/jquery.debounce.js',
30 '<%= dirs.js.src %>/plugins/jquery.mark.js',
30 31 '<%= dirs.js.src %>/plugins/jquery.timeago.js',
31 32 '<%= dirs.js.src %>/plugins/jquery.timeago-extension.js',
32 33
33 34 // Select2
34 35 '<%= dirs.js.src %>/select2/select2.js',
35
36
36 37 // Code-mirror
37 38 '<%= dirs.js.src %>/codemirror/codemirror.js',
38 39 '<%= dirs.js.src %>/codemirror/codemirror_loadmode.js',
39 40 '<%= dirs.js.src %>/codemirror/codemirror_hint.js',
40 41 '<%= dirs.js.src %>/codemirror/codemirror_overlay.js',
41 42 '<%= dirs.js.src %>/codemirror/codemirror_placeholder.js',
42 43 // TODO: mikhail: this is an exception. Since the code mirror modes
43 44 // are loaded "on the fly", we need to keep them in a public folder
44 45 '<%= dirs.js.dest %>/mode/meta.js',
45 46 '<%= dirs.js.dest %>/mode/meta_ext.js',
46 47 '<%= dirs.js.dest %>/rhodecode/i18n/select2/translations.js',
47 48
48 49 // Rhodecode utilities
49 50 '<%= dirs.js.src %>/rhodecode/utils/array.js',
50 51 '<%= dirs.js.src %>/rhodecode/utils/string.js',
51 52 '<%= dirs.js.src %>/rhodecode/utils/pyroutes.js',
52 53 '<%= dirs.js.src %>/rhodecode/utils/ajax.js',
53 54 '<%= dirs.js.src %>/rhodecode/utils/autocomplete.js',
54 55 '<%= dirs.js.src %>/rhodecode/utils/colorgenerator.js',
55 56 '<%= dirs.js.src %>/rhodecode/utils/ie.js',
56 57 '<%= dirs.js.src %>/rhodecode/utils/os.js',
57 58
58 59 // Rhodecode widgets
59 60 '<%= dirs.js.src %>/rhodecode/widgets/multiselect.js',
60 61
61 62 // Rhodecode components
62 '<%= dirs.js.src %>/rhodecode/pyroutes.js',
63 '<%= dirs.js.src %>/rhodecode/init.js',
63 64 '<%= dirs.js.src %>/rhodecode/codemirror.js',
64 65 '<%= dirs.js.src %>/rhodecode/comments.js',
65 66 '<%= dirs.js.src %>/rhodecode/constants.js',
66 67 '<%= dirs.js.src %>/rhodecode/files.js',
67 68 '<%= dirs.js.src %>/rhodecode/followers.js',
68 69 '<%= dirs.js.src %>/rhodecode/menus.js',
69 70 '<%= dirs.js.src %>/rhodecode/notifications.js',
70 71 '<%= dirs.js.src %>/rhodecode/permissions.js',
71 72 '<%= dirs.js.src %>/rhodecode/pjax.js',
72 73 '<%= dirs.js.src %>/rhodecode/pullrequests.js',
73 74 '<%= dirs.js.src %>/rhodecode/settings.js',
74 75 '<%= dirs.js.src %>/rhodecode/select2_widgets.js',
75 76 '<%= dirs.js.src %>/rhodecode/tooltips.js',
76 77 '<%= dirs.js.src %>/rhodecode/users.js',
77 78 '<%= dirs.js.src %>/rhodecode/appenlight.js',
78 79
79 80 // Rhodecode main module
80 81 '<%= dirs.js.src %>/rhodecode.js'
81 82 ],
82 83 dest: '<%= dirs.js.dest %>/scripts.js',
83 84 nonull: true
84 85 }
85 86 },
86 87
87 88 less: {
88 89 development: {
89 90 options: {
90 91 compress: false,
91 92 yuicompress: false,
92 93 optimization: 0
93 94 },
94 95 files: {
95 96 "<%= dirs.css %>/style.css": "<%= dirs.css %>/main.less"
96 97 }
97 98 },
98 99 production: {
99 100 options: {
100 101 compress: true,
101 102 yuicompress: true,
102 103 optimization: 2
103 104 },
104 105 files: {
105 106 "<%= dirs.css %>/style.css": "<%= dirs.css %>/main.less"
106 107 }
107 108 }
108 109 },
109 110
110 111 watch: {
111 112 less: {
112 113 files: ["<%= dirs.css %>/*.less"],
113 114 tasks: ["less:production"]
114 115 },
115 116 js: {
116 117 files: ["<%= dirs.js.src %>/**/*.js"],
117 118 tasks: ["concat:dist"]
118 119 }
119 120 },
120 121
121 122 jshint: {
122 123 rhodecode: {
123 124 src: '<%= dirs.js.src %>/rhodecode/**/*.js',
124 125 options: {
125 126 jshintrc: '.jshintrc'
126 127 }
127 128 }
128 129 }
129 130 });
130 131
131 132 grunt.loadNpmTasks('grunt-contrib-less');
132 133 grunt.loadNpmTasks('grunt-contrib-concat');
133 134 grunt.loadNpmTasks('grunt-contrib-watch');
134 135 grunt.loadNpmTasks('grunt-contrib-jshint');
135 136
136 137 grunt.registerTask('default', ['less:production', 'concat:dist']);
137 138 };
@@ -1,577 +1,608 b''
1 1 ################################################################################
2 2 ################################################################################
3 3 # RhodeCode Enterprise - configuration file #
4 4 # Built-in functions and variables #
5 5 # The %(here)s variable will be replaced with the parent directory of this file#
6 6 # #
7 7 ################################################################################
8 8
9 9 [DEFAULT]
10 10 debug = true
11 11 pdebug = false
12 12 ################################################################################
13 13 ## Uncomment and replace with the email address which should receive ##
14 14 ## any error reports after an application crash ##
15 15 ## Additionally these settings will be used by the RhodeCode mailing system ##
16 16 ################################################################################
17 17 #email_to = admin@localhost
18 18 #error_email_from = paste_error@localhost
19 19 #app_email_from = rhodecode-noreply@localhost
20 20 #error_message =
21 21 #email_prefix = [RhodeCode]
22 22
23 23 #smtp_server = mail.server.com
24 24 #smtp_username =
25 25 #smtp_password =
26 26 #smtp_port =
27 27 #smtp_use_tls = false
28 28 #smtp_use_ssl = true
29 29 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
30 30 #smtp_auth =
31 31
32 32 [server:main]
33 33 ## COMMON ##
34 34 host = 127.0.0.1
35 35 port = 5000
36 36
37 ##########################
38 ## WAITRESS WSGI SERVER ##
39 ##########################
37 ##################################
38 ## WAITRESS WSGI SERVER ##
39 ## Recommended for Development ##
40 ##################################
40 41 use = egg:waitress#main
41 42 ## number of worker threads
42 43 threads = 5
43 44 ## MAX BODY SIZE 100GB
44 45 max_request_body_size = 107374182400
45 46 ## Use poll instead of select, fixes file descriptors limits problems.
46 47 ## May not work on old windows systems.
47 48 asyncore_use_poll = true
48 49
49 50
50 51 ##########################
51 52 ## GUNICORN WSGI SERVER ##
52 53 ##########################
53 54 ## run with gunicorn --log-config <inifile.ini> --paste <inifile.ini>
54 55 #use = egg:gunicorn#main
55 56 ## Sets the number of process workers. You must set `instance_id = *`
56 57 ## when this option is set to more than one worker, recommended
57 58 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
58 59 ## The `instance_id = *` must be set in the [app:main] section below
59 #workers = 1
60 #workers = 2
60 61 ## number of threads for each of the worker, must be set to 1 for gevent
61 62 ## generally recommened to be at 1
62 63 #threads = 1
63 64 ## process name
64 65 #proc_name = rhodecode
65 66 ## type of worker class, one of sync, gevent
66 67 ## recommended for bigger setup is using of of other than sync one
67 68 #worker_class = sync
68 69 ## The maximum number of simultaneous clients. Valid only for Gevent
69 70 #worker_connections = 10
70 71 ## max number of requests that worker will handle before being gracefully
71 72 ## restarted, could prevent memory leaks
72 73 #max_requests = 1000
73 74 #max_requests_jitter = 30
74 ## ammount of time a worker can spend with handling a request before it
75 ## amount of time a worker can spend with handling a request before it
75 76 ## gets killed and restarted. Set to 6hrs
76 77 #timeout = 21600
77 78
78 79
79 80 ## prefix middleware for RhodeCode, disables force_https flag.
80 81 ## allows to set RhodeCode under a prefix in server.
81 82 ## eg https://server.com/<prefix>. Enable `filter-with =` option below as well.
82 83 #[filter:proxy-prefix]
83 84 #use = egg:PasteDeploy#prefix
84 85 #prefix = /<your-prefix>
85 86
86 87 [app:main]
87 88 use = egg:rhodecode-enterprise-ce
88 89 ## enable proxy prefix middleware, defined below
89 90 #filter-with = proxy-prefix
90 91
91 92 # During development the we want to have the debug toolbar enabled
92 93 pyramid.includes =
93 94 pyramid_debugtoolbar
94 95 rhodecode.utils.debugtoolbar
95 96 rhodecode.lib.middleware.request_wrapper
96 97
97 98 pyramid.reload_templates = true
98 99
99 100 debugtoolbar.hosts = 0.0.0.0/0
100 101 debugtoolbar.exclude_prefixes =
101 102 /css
102 103 /fonts
103 104 /images
104 105 /js
105 106
106 107 ## RHODECODE PLUGINS ##
107 108 rhodecode.includes =
108 109 rhodecode.api
109 110
110 111
111 112 # api prefix url
112 113 rhodecode.api.url = /_admin/api
113 114
114 115
115 116 ## END RHODECODE PLUGINS ##
116 117
117 118 full_stack = true
118 119
119 120 ## Serve static files via RhodeCode, disable to serve them via HTTP server
120 121 static_files = true
121 122
122 123 ## Optional Languages
123 124 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
124 125 lang = en
125 126
126 127 ## perform a full repository scan on each server start, this should be
127 128 ## set to false after first startup, to allow faster server restarts.
128 129 startup.import_repos = false
129 130
130 131 ## Uncomment and set this path to use archive download cache.
131 132 ## Once enabled, generated archives will be cached at this location
132 133 ## and served from the cache during subsequent requests for the same archive of
133 134 ## the repository.
134 135 #archive_cache_dir = /tmp/tarballcache
135 136
136 137 ## change this to unique ID for security
137 138 app_instance_uuid = rc-production
138 139
139 140 ## cut off limit for large diffs (size in bytes)
140 141 cut_off_limit_diff = 1024000
141 142 cut_off_limit_file = 256000
142 143
143 144 ## use cache version of scm repo everywhere
144 145 vcs_full_cache = true
145 146
146 147 ## force https in RhodeCode, fixes https redirects, assumes it's always https
147 148 ## Normally this is controlled by proper http flags sent from http server
148 149 force_https = false
149 150
150 151 ## use Strict-Transport-Security headers
151 152 use_htsts = false
152 153
153 154 ## number of commits stats will parse on each iteration
154 155 commit_parse_limit = 25
155 156
156 157 ## git rev filter option, --all is the default filter, if you need to
157 158 ## hide all refs in changelog switch this to --branches --tags
158 159 git_rev_filter = --branches --tags
159 160
160 161 # Set to true if your repos are exposed using the dumb protocol
161 162 git_update_server_info = false
162 163
163 164 ## RSS/ATOM feed options
164 165 rss_cut_off_limit = 256000
165 166 rss_items_per_page = 10
166 167 rss_include_diff = false
167 168
168 169 ## gist URL alias, used to create nicer urls for gist. This should be an
169 170 ## url that does rewrites to _admin/gists/<gistid>.
170 171 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
171 172 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/<gistid>
172 173 gist_alias_url =
173 174
174 175 ## List of controllers (using glob pattern syntax) that AUTH TOKENS could be
175 176 ## used for access.
176 177 ## Adding ?auth_token = <token> to the url authenticates this request as if it
177 178 ## came from the the logged in user who own this authentication token.
178 179 ##
179 180 ## Syntax is <ControllerClass>:<function_pattern>.
180 181 ## To enable access to raw_files put `FilesController:raw`.
181 182 ## To enable access to patches add `ChangesetController:changeset_patch`.
182 183 ## The list should be "," separated and on a single line.
183 184 ##
184 185 ## Recommended controllers to enable:
185 186 # ChangesetController:changeset_patch,
186 187 # ChangesetController:changeset_raw,
187 188 # FilesController:raw,
188 189 # FilesController:archivefile,
189 190 # GistsController:*,
190 191 api_access_controllers_whitelist =
191 192
192 193 ## default encoding used to convert from and to unicode
193 194 ## can be also a comma separated list of encoding in case of mixed encodings
194 195 default_encoding = UTF-8
195 196
196 197 ## instance-id prefix
197 198 ## a prefix key for this instance used for cache invalidation when running
198 199 ## multiple instances of rhodecode, make sure it's globally unique for
199 200 ## all running rhodecode instances. Leave empty if you don't use it
200 201 instance_id =
201 202
203 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
204 ## of an authentication plugin also if it is disabled by it's settings.
205 ## This could be useful if you are unable to log in to the system due to broken
206 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
207 ## module to log in again and fix the settings.
208 ##
209 ## Available builtin plugin IDs (hash is part of the ID):
210 ## egg:rhodecode-enterprise-ce#rhodecode
211 ## egg:rhodecode-enterprise-ce#pam
212 ## egg:rhodecode-enterprise-ce#ldap
213 ## egg:rhodecode-enterprise-ce#jasig_cas
214 ## egg:rhodecode-enterprise-ce#headers
215 ## egg:rhodecode-enterprise-ce#crowd
216 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
217
202 218 ## alternative return HTTP header for failed authentication. Default HTTP
203 219 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
204 220 ## handling that causing a series of failed authentication calls.
205 221 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
206 222 ## This will be served instead of default 401 on bad authnetication
207 223 auth_ret_code =
208 224
209 225 ## use special detection method when serving auth_ret_code, instead of serving
210 226 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
211 227 ## and then serve auth_ret_code to clients
212 228 auth_ret_code_detection = false
213 229
214 230 ## locking return code. When repository is locked return this HTTP code. 2XX
215 231 ## codes don't break the transactions while 4XX codes do
216 232 lock_ret_code = 423
217 233
218 234 ## allows to change the repository location in settings page
219 235 allow_repo_location_change = true
220 236
221 237 ## allows to setup custom hooks in settings page
222 238 allow_custom_hooks_settings = true
223 239
224 240 ## generated license token, goto license page in RhodeCode settings to obtain
225 241 ## new token
226 242 license_token =
227 243
228 244 ## supervisor connection uri, for managing supervisor and logs.
229 245 supervisor.uri =
230 246 ## supervisord group name/id we only want this RC instance to handle
231 247 supervisor.group_id = dev
232 248
233 249 ## Display extended labs settings
234 250 labs_settings_active = true
235 251
236 252 ####################################
237 253 ### CELERY CONFIG ####
238 254 ####################################
239 255 use_celery = false
240 256 broker.host = localhost
241 257 broker.vhost = rabbitmqhost
242 258 broker.port = 5672
243 259 broker.user = rabbitmq
244 260 broker.password = qweqwe
245 261
246 262 celery.imports = rhodecode.lib.celerylib.tasks
247 263
248 264 celery.result.backend = amqp
249 265 celery.result.dburi = amqp://
250 266 celery.result.serialier = json
251 267
252 268 #celery.send.task.error.emails = true
253 269 #celery.amqp.task.result.expires = 18000
254 270
255 271 celeryd.concurrency = 2
256 272 #celeryd.log.file = celeryd.log
257 273 celeryd.log.level = debug
258 274 celeryd.max.tasks.per.child = 1
259 275
260 276 ## tasks will never be sent to the queue, but executed locally instead.
261 277 celery.always.eager = false
262 278
263 279 ####################################
264 280 ### BEAKER CACHE ####
265 281 ####################################
266 282 # default cache dir for templates. Putting this into a ramdisk
267 283 ## can boost performance, eg. %(here)s/data_ramdisk
268 284 cache_dir = %(here)s/data
269 285
270 286 ## locking and default file storage for Beaker. Putting this into a ramdisk
271 287 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
272 288 beaker.cache.data_dir = %(here)s/data/cache/beaker_data
273 289 beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock
274 290
275 291 beaker.cache.regions = super_short_term, short_term, long_term, sql_cache_short, auth_plugins, repo_cache_long
276 292
277 293 beaker.cache.super_short_term.type = memory
278 294 beaker.cache.super_short_term.expire = 10
279 295 beaker.cache.super_short_term.key_length = 256
280 296
281 297 beaker.cache.short_term.type = memory
282 298 beaker.cache.short_term.expire = 60
283 299 beaker.cache.short_term.key_length = 256
284 300
285 301 beaker.cache.long_term.type = memory
286 302 beaker.cache.long_term.expire = 36000
287 303 beaker.cache.long_term.key_length = 256
288 304
289 305 beaker.cache.sql_cache_short.type = memory
290 306 beaker.cache.sql_cache_short.expire = 10
291 307 beaker.cache.sql_cache_short.key_length = 256
292 308
293 309 # default is memory cache, configure only if required
294 310 # using multi-node or multi-worker setup
295 311 #beaker.cache.auth_plugins.type = ext:database
296 312 #beaker.cache.auth_plugins.lock_dir = %(here)s/data/cache/auth_plugin_lock
297 313 #beaker.cache.auth_plugins.url = postgresql://postgres:secret@localhost/rhodecode
298 314 #beaker.cache.auth_plugins.url = mysql://root:secret@127.0.0.1/rhodecode
299 315 #beaker.cache.auth_plugins.sa.pool_recycle = 3600
300 316 #beaker.cache.auth_plugins.sa.pool_size = 10
301 317 #beaker.cache.auth_plugins.sa.max_overflow = 0
302 318
303 319 beaker.cache.repo_cache_long.type = memorylru_base
304 320 beaker.cache.repo_cache_long.max_items = 4096
305 321 beaker.cache.repo_cache_long.expire = 2592000
306 322
307 323 # default is memorylru_base cache, configure only if required
308 324 # using multi-node or multi-worker setup
309 325 #beaker.cache.repo_cache_long.type = ext:memcached
310 326 #beaker.cache.repo_cache_long.url = localhost:11211
311 327 #beaker.cache.repo_cache_long.expire = 1209600
312 328 #beaker.cache.repo_cache_long.key_length = 256
313 329
314 330 ####################################
315 331 ### BEAKER SESSION ####
316 332 ####################################
317 333
318 334 ## .session.type is type of storage options for the session, current allowed
319 ## types are file, ext:memcached, ext:database, and memory(default).
335 ## types are file, ext:memcached, ext:database, and memory (default).
320 336 beaker.session.type = file
321 337 beaker.session.data_dir = %(here)s/data/sessions/data
322 338
323 339 ## db based session, fast, and allows easy management over logged in users ##
324 340 #beaker.session.type = ext:database
325 341 #beaker.session.table_name = db_session
326 342 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
327 343 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
328 344 #beaker.session.sa.pool_recycle = 3600
329 345 #beaker.session.sa.echo = false
330 346
331 347 beaker.session.key = rhodecode
332 348 beaker.session.secret = develop-rc-uytcxaz
333 349 beaker.session.lock_dir = %(here)s/data/sessions/lock
334 350
335 351 ## Secure encrypted cookie. Requires AES and AES python libraries
336 352 ## you must disable beaker.session.secret to use this
337 353 #beaker.session.encrypt_key = <key_for_encryption>
338 354 #beaker.session.validate_key = <validation_key>
339 355
340 356 ## sets session as invalid(also logging out user) if it haven not been
341 357 ## accessed for given amount of time in seconds
342 358 beaker.session.timeout = 2592000
343 359 beaker.session.httponly = true
344 360 #beaker.session.cookie_path = /<your-prefix>
345 361
346 362 ## uncomment for https secure cookie
347 363 beaker.session.secure = false
348 364
349 365 ## auto save the session to not to use .save()
350 366 beaker.session.auto = false
351 367
352 368 ## default cookie expiration time in seconds, set to `true` to set expire
353 369 ## at browser close
354 370 #beaker.session.cookie_expires = 3600
355 371
356 372 ###################################
357 373 ## SEARCH INDEXING CONFIGURATION ##
358 374 ###################################
375 ## Full text search indexer is available in rhodecode-tools under
376 ## `rhodecode-tools index` command
359 377
378 # WHOOSH Backend, doesn't require additional services to run
379 # it works good with few dozen repos
360 380 search.module = rhodecode.lib.index.whoosh
361 381 search.location = %(here)s/data/index
362 382
383
363 384 ###################################
364 ## ERROR AND LOG HANDLING SYSTEM ##
385 ## APPENLIGHT CONFIG ##
365 386 ###################################
366 387
367 388 ## Appenlight is tailored to work with RhodeCode, see
368 389 ## http://appenlight.com for details how to obtain an account
369 390
370 391 ## appenlight integration enabled
371 392 appenlight = false
372 393
373 394 appenlight.server_url = https://api.appenlight.com
374 395 appenlight.api_key = YOUR_API_KEY
375 ;appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
396 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
376 397
377 398 # used for JS client
378 399 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
379 400
380 401 ## TWEAK AMOUNT OF INFO SENT HERE
381 402
382 403 ## enables 404 error logging (default False)
383 404 appenlight.report_404 = false
384 405
385 406 ## time in seconds after request is considered being slow (default 1)
386 407 appenlight.slow_request_time = 1
387 408
388 409 ## record slow requests in application
389 410 ## (needs to be enabled for slow datastore recording and time tracking)
390 411 appenlight.slow_requests = true
391 412
392 413 ## enable hooking to application loggers
393 414 appenlight.logging = true
394 415
395 416 ## minimum log level for log capture
396 417 appenlight.logging.level = WARNING
397 418
398 419 ## send logs only from erroneous/slow requests
399 420 ## (saves API quota for intensive logging)
400 421 appenlight.logging_on_error = false
401 422
402 423 ## list of additonal keywords that should be grabbed from environ object
403 424 ## can be string with comma separated list of words in lowercase
404 425 ## (by default client will always send following info:
405 426 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
406 427 ## start with HTTP* this list be extended with additional keywords here
407 428 appenlight.environ_keys_whitelist =
408 429
409 430 ## list of keywords that should be blanked from request object
410 431 ## can be string with comma separated list of words in lowercase
411 432 ## (by default client will always blank keys that contain following words
412 433 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
413 434 ## this list be extended with additional keywords set here
414 435 appenlight.request_keys_blacklist =
415 436
416 437 ## list of namespaces that should be ignores when gathering log entries
417 438 ## can be string with comma separated list of namespaces
418 439 ## (by default the client ignores own entries: appenlight_client.client)
419 440 appenlight.log_namespace_blacklist =
420 441
421 442
422 443 ################################################################################
423 444 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
424 445 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
425 446 ## execute malicious code after an exception is raised. ##
426 447 ################################################################################
427 448 #set debug = false
428 449
429 450
430 451 ##############
431 452 ## STYLING ##
432 453 ##############
433 454 debug_style = true
434 455
435 456 #########################################################
436 457 ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ###
437 458 #########################################################
438 459 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
439 460 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
440 461 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode
441 462
442 463 # see sqlalchemy docs for other advanced settings
443 464
444 465 ## print the sql statements to output
445 466 sqlalchemy.db1.echo = false
446 467 ## recycle the connections after this ammount of seconds
447 468 sqlalchemy.db1.pool_recycle = 3600
448 469 sqlalchemy.db1.convert_unicode = true
449 470
450 471 ## the number of connections to keep open inside the connection pool.
451 472 ## 0 indicates no limit
452 473 #sqlalchemy.db1.pool_size = 5
453 474
454 475 ## the number of connections to allow in connection pool "overflow", that is
455 476 ## connections that can be opened above and beyond the pool_size setting,
456 477 ## which defaults to five.
457 478 #sqlalchemy.db1.max_overflow = 10
458 479
459 480
460 481 ##################
461 482 ### VCS CONFIG ###
462 483 ##################
463 484 vcs.server.enable = true
464 485 vcs.server = localhost:9900
465 # Available protocols: pyro4, http
466 vcs.server.protocol = pyro4
467 486
468 # available impl:
469 # vcsserver.scm_app (EE only, for testing),
470 # rhodecode.lib.middleware.utils.scm_app_http
471 # pyro4
487 ## Web server connectivity protocol, responsible for web based VCS operatations
488 ## Available protocols are:
489 ## `pyro4` - using pyro4 server
490 ## `http` - using http-rpc backend
491 #vcs.server.protocol = http
492
493 ## Push/Pull operations protocol, available options are:
494 ## `pyro4` - using pyro4 server
495 ## `rhodecode.lib.middleware.utils.scm_app_http` - Http based, recommended
496 ## `vcsserver.scm_app` - internal app (EE only)
472 497 #vcs.scm_app_implementation = rhodecode.lib.middleware.utils.scm_app_http
473 498
499 ## Push/Pull operations hooks protocol, available options are:
500 ## `pyro4` - using pyro4 server
501 ## `http` - using http-rpc backend
502 #vcs.hooks.protocol = http
503
474 504 vcs.server.log_level = debug
505 ## Start VCSServer with this instance as a subprocess, usefull for development
475 506 vcs.start_server = true
476 507 vcs.backends = hg, git, svn
477 508 vcs.connection_timeout = 3600
478 509 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
479 510 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible
480 511 #vcs.svn.compatible_version = pre-1.8-compatible
481 512
482 513 ################################
483 514 ### LOGGING CONFIGURATION ####
484 515 ################################
485 516 [loggers]
486 517 keys = root, routes, rhodecode, sqlalchemy, beaker, pyro4, templates, whoosh_indexer
487 518
488 519 [handlers]
489 520 keys = console, console_sql
490 521
491 522 [formatters]
492 523 keys = generic, color_formatter, color_formatter_sql
493 524
494 525 #############
495 526 ## LOGGERS ##
496 527 #############
497 528 [logger_root]
498 529 level = NOTSET
499 530 handlers = console
500 531
501 532 [logger_routes]
502 533 level = DEBUG
503 534 handlers =
504 535 qualname = routes.middleware
505 536 ## "level = DEBUG" logs the route matched and routing variables.
506 537 propagate = 1
507 538
508 539 [logger_beaker]
509 540 level = DEBUG
510 541 handlers =
511 542 qualname = beaker.container
512 543 propagate = 1
513 544
514 545 [logger_pyro4]
515 546 level = DEBUG
516 547 handlers =
517 548 qualname = Pyro4
518 549 propagate = 1
519 550
520 551 [logger_templates]
521 552 level = INFO
522 553 handlers =
523 554 qualname = pylons.templating
524 555 propagate = 1
525 556
526 557 [logger_rhodecode]
527 558 level = DEBUG
528 559 handlers =
529 560 qualname = rhodecode
530 561 propagate = 1
531 562
532 563 [logger_sqlalchemy]
533 564 level = INFO
534 565 handlers = console_sql
535 566 qualname = sqlalchemy.engine
536 567 propagate = 0
537 568
538 569 [logger_whoosh_indexer]
539 570 level = DEBUG
540 571 handlers =
541 572 qualname = whoosh_indexer
542 573 propagate = 1
543 574
544 575 ##############
545 576 ## HANDLERS ##
546 577 ##############
547 578
548 579 [handler_console]
549 580 class = StreamHandler
550 581 args = (sys.stderr,)
551 582 level = DEBUG
552 583 formatter = color_formatter
553 584
554 585 [handler_console_sql]
555 586 class = StreamHandler
556 587 args = (sys.stderr,)
557 588 level = DEBUG
558 589 formatter = color_formatter_sql
559 590
560 591 ################
561 592 ## FORMATTERS ##
562 593 ################
563 594
564 595 [formatter_generic]
565 596 class = rhodecode.lib.logging_formatter.Pyro4AwareFormatter
566 597 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
567 598 datefmt = %Y-%m-%d %H:%M:%S
568 599
569 600 [formatter_color_formatter]
570 601 class = rhodecode.lib.logging_formatter.ColorFormatter
571 602 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
572 603 datefmt = %Y-%m-%d %H:%M:%S
573 604
574 605 [formatter_color_formatter_sql]
575 606 class = rhodecode.lib.logging_formatter.ColorFormatterSql
576 607 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
577 608 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,551 +1,577 b''
1 1 ################################################################################
2 2 ################################################################################
3 3 # RhodeCode Enterprise - configuration file #
4 4 # Built-in functions and variables #
5 5 # The %(here)s variable will be replaced with the parent directory of this file#
6 6 # #
7 7 ################################################################################
8 8
9 9 [DEFAULT]
10 10 debug = true
11 11 pdebug = false
12 12 ################################################################################
13 13 ## Uncomment and replace with the email address which should receive ##
14 14 ## any error reports after an application crash ##
15 15 ## Additionally these settings will be used by the RhodeCode mailing system ##
16 16 ################################################################################
17 17 #email_to = admin@localhost
18 18 #error_email_from = paste_error@localhost
19 19 #app_email_from = rhodecode-noreply@localhost
20 20 #error_message =
21 21 #email_prefix = [RhodeCode]
22 22
23 23 #smtp_server = mail.server.com
24 24 #smtp_username =
25 25 #smtp_password =
26 26 #smtp_port =
27 27 #smtp_use_tls = false
28 28 #smtp_use_ssl = true
29 29 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
30 30 #smtp_auth =
31 31
32 32 [server:main]
33 33 ## COMMON ##
34 34 host = 127.0.0.1
35 35 port = 5000
36 36
37 ##########################
38 ## WAITRESS WSGI SERVER ##
39 ##########################
40 use = egg:waitress#main
37 ##################################
38 ## WAITRESS WSGI SERVER ##
39 ## Recommended for Development ##
40 ##################################
41 #use = egg:waitress#main
41 42 ## number of worker threads
42 threads = 5
43 #threads = 5
43 44 ## MAX BODY SIZE 100GB
44 max_request_body_size = 107374182400
45 #max_request_body_size = 107374182400
45 46 ## Use poll instead of select, fixes file descriptors limits problems.
46 47 ## May not work on old windows systems.
47 asyncore_use_poll = true
48 #asyncore_use_poll = true
48 49
49 50
50 51 ##########################
51 52 ## GUNICORN WSGI SERVER ##
52 53 ##########################
53 54 ## run with gunicorn --log-config <inifile.ini> --paste <inifile.ini>
54 #use = egg:gunicorn#main
55 use = egg:gunicorn#main
55 56 ## Sets the number of process workers. You must set `instance_id = *`
56 57 ## when this option is set to more than one worker, recommended
57 58 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
58 59 ## The `instance_id = *` must be set in the [app:main] section below
59 #workers = 1
60 workers = 2
60 61 ## number of threads for each of the worker, must be set to 1 for gevent
61 62 ## generally recommened to be at 1
62 63 #threads = 1
63 64 ## process name
64 #proc_name = rhodecode
65 proc_name = rhodecode
65 66 ## type of worker class, one of sync, gevent
66 67 ## recommended for bigger setup is using of of other than sync one
67 #worker_class = sync
68 worker_class = sync
68 69 ## The maximum number of simultaneous clients. Valid only for Gevent
69 70 #worker_connections = 10
70 71 ## max number of requests that worker will handle before being gracefully
71 72 ## restarted, could prevent memory leaks
72 #max_requests = 1000
73 #max_requests_jitter = 30
74 ## ammount of time a worker can spend with handling a request before it
73 max_requests = 1000
74 max_requests_jitter = 30
75 ## amount of time a worker can spend with handling a request before it
75 76 ## gets killed and restarted. Set to 6hrs
76 #timeout = 21600
77 timeout = 21600
77 78
78 79
79 80 ## prefix middleware for RhodeCode, disables force_https flag.
80 81 ## allows to set RhodeCode under a prefix in server.
81 82 ## eg https://server.com/<prefix>. Enable `filter-with =` option below as well.
82 83 #[filter:proxy-prefix]
83 84 #use = egg:PasteDeploy#prefix
84 85 #prefix = /<your-prefix>
85 86
86 87 [app:main]
87 88 use = egg:rhodecode-enterprise-ce
88 89 ## enable proxy prefix middleware, defined below
89 90 #filter-with = proxy-prefix
90 91
91 92 full_stack = true
92 93
93 94 ## Serve static files via RhodeCode, disable to serve them via HTTP server
94 95 static_files = true
95 96
96 97 ## Optional Languages
97 98 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
98 99 lang = en
99 100
100 101 ## perform a full repository scan on each server start, this should be
101 102 ## set to false after first startup, to allow faster server restarts.
102 103 startup.import_repos = false
103 104
104 105 ## Uncomment and set this path to use archive download cache.
105 106 ## Once enabled, generated archives will be cached at this location
106 107 ## and served from the cache during subsequent requests for the same archive of
107 108 ## the repository.
108 109 #archive_cache_dir = /tmp/tarballcache
109 110
110 111 ## change this to unique ID for security
111 112 app_instance_uuid = rc-production
112 113
113 114 ## cut off limit for large diffs (size in bytes)
114 115 cut_off_limit_diff = 1024000
115 116 cut_off_limit_file = 256000
116 117
117 118 ## use cache version of scm repo everywhere
118 119 vcs_full_cache = true
119 120
120 121 ## force https in RhodeCode, fixes https redirects, assumes it's always https
121 122 ## Normally this is controlled by proper http flags sent from http server
122 123 force_https = false
123 124
124 125 ## use Strict-Transport-Security headers
125 126 use_htsts = false
126 127
127 128 ## number of commits stats will parse on each iteration
128 129 commit_parse_limit = 25
129 130
130 131 ## git rev filter option, --all is the default filter, if you need to
131 132 ## hide all refs in changelog switch this to --branches --tags
132 133 git_rev_filter = --branches --tags
133 134
134 135 # Set to true if your repos are exposed using the dumb protocol
135 136 git_update_server_info = false
136 137
137 138 ## RSS/ATOM feed options
138 139 rss_cut_off_limit = 256000
139 140 rss_items_per_page = 10
140 141 rss_include_diff = false
141 142
142 143 ## gist URL alias, used to create nicer urls for gist. This should be an
143 144 ## url that does rewrites to _admin/gists/<gistid>.
144 145 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
145 146 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/<gistid>
146 147 gist_alias_url =
147 148
148 149 ## List of controllers (using glob pattern syntax) that AUTH TOKENS could be
149 150 ## used for access.
150 151 ## Adding ?auth_token = <token> to the url authenticates this request as if it
151 152 ## came from the the logged in user who own this authentication token.
152 153 ##
153 154 ## Syntax is <ControllerClass>:<function_pattern>.
154 155 ## To enable access to raw_files put `FilesController:raw`.
155 156 ## To enable access to patches add `ChangesetController:changeset_patch`.
156 157 ## The list should be "," separated and on a single line.
157 158 ##
158 159 ## Recommended controllers to enable:
159 160 # ChangesetController:changeset_patch,
160 161 # ChangesetController:changeset_raw,
161 162 # FilesController:raw,
162 163 # FilesController:archivefile,
163 164 # GistsController:*,
164 165 api_access_controllers_whitelist =
165 166
166 167 ## default encoding used to convert from and to unicode
167 168 ## can be also a comma separated list of encoding in case of mixed encodings
168 169 default_encoding = UTF-8
169 170
170 171 ## instance-id prefix
171 172 ## a prefix key for this instance used for cache invalidation when running
172 173 ## multiple instances of rhodecode, make sure it's globally unique for
173 174 ## all running rhodecode instances. Leave empty if you don't use it
174 175 instance_id =
175 176
177 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
178 ## of an authentication plugin also if it is disabled by it's settings.
179 ## This could be useful if you are unable to log in to the system due to broken
180 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
181 ## module to log in again and fix the settings.
182 ##
183 ## Available builtin plugin IDs (hash is part of the ID):
184 ## egg:rhodecode-enterprise-ce#rhodecode
185 ## egg:rhodecode-enterprise-ce#pam
186 ## egg:rhodecode-enterprise-ce#ldap
187 ## egg:rhodecode-enterprise-ce#jasig_cas
188 ## egg:rhodecode-enterprise-ce#headers
189 ## egg:rhodecode-enterprise-ce#crowd
190 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
191
176 192 ## alternative return HTTP header for failed authentication. Default HTTP
177 193 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
178 194 ## handling that causing a series of failed authentication calls.
179 195 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
180 196 ## This will be served instead of default 401 on bad authnetication
181 197 auth_ret_code =
182 198
183 199 ## use special detection method when serving auth_ret_code, instead of serving
184 200 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
185 201 ## and then serve auth_ret_code to clients
186 202 auth_ret_code_detection = false
187 203
188 204 ## locking return code. When repository is locked return this HTTP code. 2XX
189 205 ## codes don't break the transactions while 4XX codes do
190 206 lock_ret_code = 423
191 207
192 208 ## allows to change the repository location in settings page
193 209 allow_repo_location_change = true
194 210
195 211 ## allows to setup custom hooks in settings page
196 212 allow_custom_hooks_settings = true
197 213
198 214 ## generated license token, goto license page in RhodeCode settings to obtain
199 215 ## new token
200 216 license_token =
201 217
202 218 ## supervisor connection uri, for managing supervisor and logs.
203 219 supervisor.uri =
204 220 ## supervisord group name/id we only want this RC instance to handle
205 221 supervisor.group_id = prod
206 222
207 223 ## Display extended labs settings
208 224 labs_settings_active = true
209 225
210 226 ####################################
211 227 ### CELERY CONFIG ####
212 228 ####################################
213 229 use_celery = false
214 230 broker.host = localhost
215 231 broker.vhost = rabbitmqhost
216 232 broker.port = 5672
217 233 broker.user = rabbitmq
218 234 broker.password = qweqwe
219 235
220 236 celery.imports = rhodecode.lib.celerylib.tasks
221 237
222 238 celery.result.backend = amqp
223 239 celery.result.dburi = amqp://
224 240 celery.result.serialier = json
225 241
226 242 #celery.send.task.error.emails = true
227 243 #celery.amqp.task.result.expires = 18000
228 244
229 245 celeryd.concurrency = 2
230 246 #celeryd.log.file = celeryd.log
231 247 celeryd.log.level = debug
232 248 celeryd.max.tasks.per.child = 1
233 249
234 250 ## tasks will never be sent to the queue, but executed locally instead.
235 251 celery.always.eager = false
236 252
237 253 ####################################
238 254 ### BEAKER CACHE ####
239 255 ####################################
240 256 # default cache dir for templates. Putting this into a ramdisk
241 257 ## can boost performance, eg. %(here)s/data_ramdisk
242 258 cache_dir = %(here)s/data
243 259
244 260 ## locking and default file storage for Beaker. Putting this into a ramdisk
245 261 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
246 262 beaker.cache.data_dir = %(here)s/data/cache/beaker_data
247 263 beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock
248 264
249 265 beaker.cache.regions = super_short_term, short_term, long_term, sql_cache_short, auth_plugins, repo_cache_long
250 266
251 267 beaker.cache.super_short_term.type = memory
252 268 beaker.cache.super_short_term.expire = 10
253 269 beaker.cache.super_short_term.key_length = 256
254 270
255 271 beaker.cache.short_term.type = memory
256 272 beaker.cache.short_term.expire = 60
257 273 beaker.cache.short_term.key_length = 256
258 274
259 275 beaker.cache.long_term.type = memory
260 276 beaker.cache.long_term.expire = 36000
261 277 beaker.cache.long_term.key_length = 256
262 278
263 279 beaker.cache.sql_cache_short.type = memory
264 280 beaker.cache.sql_cache_short.expire = 10
265 281 beaker.cache.sql_cache_short.key_length = 256
266 282
267 283 # default is memory cache, configure only if required
268 284 # using multi-node or multi-worker setup
269 285 #beaker.cache.auth_plugins.type = ext:database
270 286 #beaker.cache.auth_plugins.lock_dir = %(here)s/data/cache/auth_plugin_lock
271 287 #beaker.cache.auth_plugins.url = postgresql://postgres:secret@localhost/rhodecode
272 288 #beaker.cache.auth_plugins.url = mysql://root:secret@127.0.0.1/rhodecode
273 289 #beaker.cache.auth_plugins.sa.pool_recycle = 3600
274 290 #beaker.cache.auth_plugins.sa.pool_size = 10
275 291 #beaker.cache.auth_plugins.sa.max_overflow = 0
276 292
277 293 beaker.cache.repo_cache_long.type = memorylru_base
278 294 beaker.cache.repo_cache_long.max_items = 4096
279 295 beaker.cache.repo_cache_long.expire = 2592000
280 296
281 297 # default is memorylru_base cache, configure only if required
282 298 # using multi-node or multi-worker setup
283 299 #beaker.cache.repo_cache_long.type = ext:memcached
284 300 #beaker.cache.repo_cache_long.url = localhost:11211
285 301 #beaker.cache.repo_cache_long.expire = 1209600
286 302 #beaker.cache.repo_cache_long.key_length = 256
287 303
288 304 ####################################
289 305 ### BEAKER SESSION ####
290 306 ####################################
291 307
292 308 ## .session.type is type of storage options for the session, current allowed
293 ## types are file, ext:memcached, ext:database, and memory(default).
309 ## types are file, ext:memcached, ext:database, and memory (default).
294 310 beaker.session.type = file
295 311 beaker.session.data_dir = %(here)s/data/sessions/data
296 312
297 313 ## db based session, fast, and allows easy management over logged in users ##
298 314 #beaker.session.type = ext:database
299 315 #beaker.session.table_name = db_session
300 316 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
301 317 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
302 318 #beaker.session.sa.pool_recycle = 3600
303 319 #beaker.session.sa.echo = false
304 320
305 321 beaker.session.key = rhodecode
306 322 beaker.session.secret = production-rc-uytcxaz
307 #beaker.session.lock_dir = %(here)s/data/sessions/lock
323 beaker.session.lock_dir = %(here)s/data/sessions/lock
308 324
309 325 ## Secure encrypted cookie. Requires AES and AES python libraries
310 326 ## you must disable beaker.session.secret to use this
311 327 #beaker.session.encrypt_key = <key_for_encryption>
312 328 #beaker.session.validate_key = <validation_key>
313 329
314 330 ## sets session as invalid(also logging out user) if it haven not been
315 331 ## accessed for given amount of time in seconds
316 332 beaker.session.timeout = 2592000
317 333 beaker.session.httponly = true
318 334 #beaker.session.cookie_path = /<your-prefix>
319 335
320 336 ## uncomment for https secure cookie
321 337 beaker.session.secure = false
322 338
323 339 ## auto save the session to not to use .save()
324 340 beaker.session.auto = false
325 341
326 342 ## default cookie expiration time in seconds, set to `true` to set expire
327 343 ## at browser close
328 344 #beaker.session.cookie_expires = 3600
329 345
330 346 ###################################
331 347 ## SEARCH INDEXING CONFIGURATION ##
332 348 ###################################
349 ## Full text search indexer is available in rhodecode-tools under
350 ## `rhodecode-tools index` command
333 351
352 # WHOOSH Backend, doesn't require additional services to run
353 # it works good with few dozen repos
334 354 search.module = rhodecode.lib.index.whoosh
335 355 search.location = %(here)s/data/index
336 356
357
337 358 ###################################
338 ## ERROR AND LOG HANDLING SYSTEM ##
359 ## APPENLIGHT CONFIG ##
339 360 ###################################
340 361
341 362 ## Appenlight is tailored to work with RhodeCode, see
342 363 ## http://appenlight.com for details how to obtain an account
343 364
344 365 ## appenlight integration enabled
345 366 appenlight = false
346 367
347 368 appenlight.server_url = https://api.appenlight.com
348 369 appenlight.api_key = YOUR_API_KEY
349 ;appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
370 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
350 371
351 372 # used for JS client
352 373 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
353 374
354 375 ## TWEAK AMOUNT OF INFO SENT HERE
355 376
356 377 ## enables 404 error logging (default False)
357 378 appenlight.report_404 = false
358 379
359 380 ## time in seconds after request is considered being slow (default 1)
360 381 appenlight.slow_request_time = 1
361 382
362 383 ## record slow requests in application
363 384 ## (needs to be enabled for slow datastore recording and time tracking)
364 385 appenlight.slow_requests = true
365 386
366 387 ## enable hooking to application loggers
367 388 appenlight.logging = true
368 389
369 390 ## minimum log level for log capture
370 391 appenlight.logging.level = WARNING
371 392
372 393 ## send logs only from erroneous/slow requests
373 394 ## (saves API quota for intensive logging)
374 395 appenlight.logging_on_error = false
375 396
376 397 ## list of additonal keywords that should be grabbed from environ object
377 398 ## can be string with comma separated list of words in lowercase
378 399 ## (by default client will always send following info:
379 400 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
380 401 ## start with HTTP* this list be extended with additional keywords here
381 402 appenlight.environ_keys_whitelist =
382 403
383 404 ## list of keywords that should be blanked from request object
384 405 ## can be string with comma separated list of words in lowercase
385 406 ## (by default client will always blank keys that contain following words
386 407 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
387 408 ## this list be extended with additional keywords set here
388 409 appenlight.request_keys_blacklist =
389 410
390 411 ## list of namespaces that should be ignores when gathering log entries
391 412 ## can be string with comma separated list of namespaces
392 413 ## (by default the client ignores own entries: appenlight_client.client)
393 414 appenlight.log_namespace_blacklist =
394 415
395 416
396 417 ################################################################################
397 418 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
398 419 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
399 420 ## execute malicious code after an exception is raised. ##
400 421 ################################################################################
401 422 set debug = false
402 423
403 424
404 ##############
405 ## STYLING ##
406 ##############
407 debug_style = false
408
409 425 #########################################################
410 426 ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ###
411 427 #########################################################
412 428 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
413 429 sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
414 430 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode
415 431
416 432 # see sqlalchemy docs for other advanced settings
417 433
418 434 ## print the sql statements to output
419 435 sqlalchemy.db1.echo = false
420 436 ## recycle the connections after this ammount of seconds
421 437 sqlalchemy.db1.pool_recycle = 3600
422 438 sqlalchemy.db1.convert_unicode = true
423 439
424 440 ## the number of connections to keep open inside the connection pool.
425 441 ## 0 indicates no limit
426 442 #sqlalchemy.db1.pool_size = 5
427 443
428 444 ## the number of connections to allow in connection pool "overflow", that is
429 445 ## connections that can be opened above and beyond the pool_size setting,
430 446 ## which defaults to five.
431 447 #sqlalchemy.db1.max_overflow = 10
432 448
433 449
434 450 ##################
435 451 ### VCS CONFIG ###
436 452 ##################
437 453 vcs.server.enable = true
438 454 vcs.server = localhost:9900
439 # Available protocols: pyro4, http
440 vcs.server.protocol = pyro4
441 455
442 # available impl:
443 # vcsserver.scm_app (EE only, for testing),
444 # rhodecode.lib.middleware.utils.scm_app_http
445 # pyro4
456 ## Web server connectivity protocol, responsible for web based VCS operatations
457 ## Available protocols are:
458 ## `pyro4` - using pyro4 server
459 ## `http` - using http-rpc backend
460 #vcs.server.protocol = http
461
462 ## Push/Pull operations protocol, available options are:
463 ## `pyro4` - using pyro4 server
464 ## `rhodecode.lib.middleware.utils.scm_app_http` - Http based, recommended
465 ## `vcsserver.scm_app` - internal app (EE only)
446 466 #vcs.scm_app_implementation = rhodecode.lib.middleware.utils.scm_app_http
447 467
468 ## Push/Pull operations hooks protocol, available options are:
469 ## `pyro4` - using pyro4 server
470 ## `http` - using http-rpc backend
471 #vcs.hooks.protocol = http
472
448 473 vcs.server.log_level = info
474 ## Start VCSServer with this instance as a subprocess, usefull for development
449 475 vcs.start_server = false
450 476 vcs.backends = hg, git, svn
451 477 vcs.connection_timeout = 3600
452 478 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
453 479 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible
454 480 #vcs.svn.compatible_version = pre-1.8-compatible
455 481
456 482 ################################
457 483 ### LOGGING CONFIGURATION ####
458 484 ################################
459 485 [loggers]
460 486 keys = root, routes, rhodecode, sqlalchemy, beaker, pyro4, templates, whoosh_indexer
461 487
462 488 [handlers]
463 489 keys = console, console_sql
464 490
465 491 [formatters]
466 492 keys = generic, color_formatter, color_formatter_sql
467 493
468 494 #############
469 495 ## LOGGERS ##
470 496 #############
471 497 [logger_root]
472 498 level = NOTSET
473 499 handlers = console
474 500
475 501 [logger_routes]
476 502 level = DEBUG
477 503 handlers =
478 504 qualname = routes.middleware
479 505 ## "level = DEBUG" logs the route matched and routing variables.
480 506 propagate = 1
481 507
482 508 [logger_beaker]
483 509 level = DEBUG
484 510 handlers =
485 511 qualname = beaker.container
486 512 propagate = 1
487 513
488 514 [logger_pyro4]
489 515 level = DEBUG
490 516 handlers =
491 517 qualname = Pyro4
492 518 propagate = 1
493 519
494 520 [logger_templates]
495 521 level = INFO
496 522 handlers =
497 523 qualname = pylons.templating
498 524 propagate = 1
499 525
500 526 [logger_rhodecode]
501 527 level = DEBUG
502 528 handlers =
503 529 qualname = rhodecode
504 530 propagate = 1
505 531
506 532 [logger_sqlalchemy]
507 533 level = INFO
508 534 handlers = console_sql
509 535 qualname = sqlalchemy.engine
510 536 propagate = 0
511 537
512 538 [logger_whoosh_indexer]
513 539 level = DEBUG
514 540 handlers =
515 541 qualname = whoosh_indexer
516 542 propagate = 1
517 543
518 544 ##############
519 545 ## HANDLERS ##
520 546 ##############
521 547
522 548 [handler_console]
523 549 class = StreamHandler
524 550 args = (sys.stderr,)
525 551 level = INFO
526 552 formatter = generic
527 553
528 554 [handler_console_sql]
529 555 class = StreamHandler
530 556 args = (sys.stderr,)
531 557 level = WARN
532 558 formatter = generic
533 559
534 560 ################
535 561 ## FORMATTERS ##
536 562 ################
537 563
538 564 [formatter_generic]
539 565 class = rhodecode.lib.logging_formatter.Pyro4AwareFormatter
540 566 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
541 567 datefmt = %Y-%m-%d %H:%M:%S
542 568
543 569 [formatter_color_formatter]
544 570 class = rhodecode.lib.logging_formatter.ColorFormatter
545 571 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
546 572 datefmt = %Y-%m-%d %H:%M:%S
547 573
548 574 [formatter_color_formatter_sql]
549 575 class = rhodecode.lib.logging_formatter.ColorFormatterSql
550 576 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
551 577 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,215 +1,219 b''
1 1 # Nix environment for the community edition
2 2 #
3 3 # This shall be as lean as possible, just producing the Enterprise
4 4 # derivation. For advanced tweaks to pimp up the development environment we use
5 5 # "shell.nix" so that it does not have to clutter this file.
6 6
7 7 { pkgs ? (import <nixpkgs> {})
8 8 , pythonPackages ? "python27Packages"
9 9 , pythonExternalOverrides ? self: super: {}
10 10 , doCheck ? true
11 11 }:
12 12
13 13 let pkgs_ = pkgs; in
14 14
15 15 let
16 16 pkgs = pkgs_.overridePackages (self: super: {
17 17 # Override subversion derivation to
18 18 # - activate python bindings
19 19 # - set version to 1.8
20 20 subversion = super.subversion18.override {
21 21 httpSupport = true;
22 22 pythonBindings = true;
23 23 python = self.python27Packages.python;
24 24 };
25 25 });
26 26
27 27 inherit (pkgs.lib) fix extends;
28 28
29 29 basePythonPackages = with builtins; if isAttrs pythonPackages
30 30 then pythonPackages
31 31 else getAttr pythonPackages pkgs;
32 32
33 33 elem = builtins.elem;
34 34 basename = path: with pkgs.lib; last (splitString "/" path);
35 35 startsWith = prefix: full: let
36 36 actualPrefix = builtins.substring 0 (builtins.stringLength prefix) full;
37 37 in actualPrefix == prefix;
38 38
39 39 src-filter = path: type: with pkgs.lib;
40 40 let
41 41 ext = last (splitString "." path);
42 42 in
43 43 !elem (basename path) [
44 44 ".git" ".hg" "__pycache__" ".eggs" "node_modules"
45 45 "build" "data" "tmp"] &&
46 46 !elem ext ["egg-info" "pyc"] &&
47 47 !startsWith "result" path;
48 48
49 49 rhodecode-enterprise-ce-src = builtins.filterSource src-filter ./.;
50 50
51 51 # Load the generated node packages
52 52 nodePackages = pkgs.callPackage "${pkgs.path}/pkgs/top-level/node-packages.nix" rec {
53 53 self = nodePackages;
54 54 generated = pkgs.callPackage ./pkgs/node-packages.nix { inherit self; };
55 55 };
56 56
57 57 # TODO: Should be taken automatically out of the generates packages.
58 58 # apps.nix has one solution for this, although I'd prefer to have the deps
59 59 # from package.json mapped in here.
60 60 nodeDependencies = with nodePackages; [
61 61 grunt
62 62 grunt-contrib-concat
63 63 grunt-contrib-jshint
64 64 grunt-contrib-less
65 65 grunt-contrib-watch
66 66 jshint
67 67 ];
68 68
69 69 pythonGeneratedPackages = self: basePythonPackages.override (a: {
70 70 inherit self;
71 71 })
72 72 // (scopedImport {
73 73 self = self;
74 74 super = basePythonPackages;
75 75 inherit pkgs;
76 76 inherit (pkgs) fetchurl fetchgit;
77 77 } ./pkgs/python-packages.nix);
78 78
79 79 pythonOverrides = import ./pkgs/python-packages-overrides.nix {
80 80 inherit
81 81 basePythonPackages
82 82 pkgs;
83 83 };
84 84
85 85 pythonLocalOverrides = self: super: {
86 86 rhodecode-enterprise-ce =
87 87 let
88 version = "${builtins.readFile ./rhodecode/VERSION}";
88 version = builtins.readFile ./rhodecode/VERSION;
89 89 linkNodeModules = ''
90 90 echo "Link node packages"
91 91 # TODO: check if this adds stuff as a dependency, closure size
92 92 rm -fr node_modules
93 93 mkdir -p node_modules
94 94 ${pkgs.lib.concatMapStrings (dep: ''
95 95 ln -sfv ${dep}/lib/node_modules/${dep.pkgName} node_modules/
96 96 '') nodeDependencies}
97 97 echo "DONE: Link node packages"
98 98 '';
99 99 in super.rhodecode-enterprise-ce.override (attrs: {
100 100
101 101 inherit doCheck;
102 102 name = "rhodecode-enterprise-ce-${version}";
103 103 version = version;
104 104 src = rhodecode-enterprise-ce-src;
105 105
106 106 buildInputs =
107 107 attrs.buildInputs ++
108 108 (with self; [
109 109 pkgs.nodePackages.grunt-cli
110 110 pkgs.subversion
111 111 pytest-catchlog
112 112 rc_testdata
113 113 ]);
114 114
115 115 propagatedBuildInputs = attrs.propagatedBuildInputs ++ (with self; [
116 116 rhodecode-tools
117 117 ]);
118 118
119 119 # TODO: johbo: Make a nicer way to expose the parts. Maybe
120 120 # pkgs/default.nix?
121 121 passthru = {
122 inherit myPythonPackagesUnfix;
122 inherit
123 pythonLocalOverrides
124 myPythonPackagesUnfix;
123 125 pythonPackages = self;
124 126 };
125 127
126 128 LC_ALL = "en_US.UTF-8";
127 129 LOCALE_ARCHIVE =
128 130 if pkgs.stdenv ? glibc
129 131 then "${pkgs.glibcLocales}/lib/locale/locale-archive"
130 132 else "";
131 133
132 134 # Somewhat snappier setup of the development environment
133 135 # TODO: move into shell.nix
134 136 # TODO: think of supporting a stable path again, so that multiple shells
135 137 # can share it.
136 138 shellHook = ''
137 139 tmp_path=$(mktemp -d)
138 140 export PATH="$tmp_path/bin:$PATH"
139 141 export PYTHONPATH="$tmp_path/${self.python.sitePackages}:$PYTHONPATH"
140 142 mkdir -p $tmp_path/${self.python.sitePackages}
141 143 python setup.py develop --prefix $tmp_path --allow-hosts ""
142 144 '' + linkNodeModules;
143 145
144 146 preCheck = ''
145 147 export PATH="$out/bin:$PATH"
146 148 '';
147 149
148 150 postCheck = ''
149 151 rm -rf $out/lib/${self.python.libPrefix}/site-packages/pytest_pylons
150 152 rm -rf $out/lib/${self.python.libPrefix}/site-packages/rhodecode/tests
151 153 '';
152 154
153 155 preBuild = linkNodeModules + ''
154 156 grunt
155 157 rm -fr node_modules
156 158 '';
157 159
158 160 postInstall = ''
159 161 # python based programs need to be wrapped
160 162 ln -s ${self.supervisor}/bin/supervisor* $out/bin/
161 163 ln -s ${self.gunicorn}/bin/gunicorn $out/bin/
162 164 ln -s ${self.PasteScript}/bin/paster $out/bin/
165 ln -s ${self.pyramid}/bin/* $out/bin/ #*/
163 166
164 167 # rhodecode-tools
165 168 # TODO: johbo: re-think this. Do the tools import anything from enterprise?
166 169 ln -s ${self.rhodecode-tools}/bin/rhodecode-* $out/bin/
167 170
168 171 # note that condition should be restricted when adding further tools
169 172 for file in $out/bin/*; do #*/
170 173 wrapProgram $file \
171 174 --prefix PYTHONPATH : $PYTHONPATH \
175 --prefix PATH : $PATH \
172 176 --set PYTHONHASHSEED random
173 177 done
174 178
175 179 mkdir $out/etc
176 180 cp configs/production.ini $out/etc
177 181
178 182 echo "Writing meta information for rccontrol to nix-support/rccontrol"
179 183 mkdir -p $out/nix-support/rccontrol
180 184 cp -v rhodecode/VERSION $out/nix-support/rccontrol/version
181 185 echo "DONE: Meta information for rccontrol written"
182 186
183 187 # TODO: johbo: Make part of ac-tests
184 188 if [ ! -f rhodecode/public/js/scripts.js ]; then
185 189 echo "Missing scripts.js"
186 190 exit 1
187 191 fi
188 192 if [ ! -f rhodecode/public/css/style.css ]; then
189 193 echo "Missing style.css"
190 194 exit 1
191 195 fi
192 196 '';
193 197
194 198 });
195 199
196 200 rc_testdata = self.buildPythonPackage rec {
197 201 name = "rc_testdata-0.7.0";
198 202 src = pkgs.fetchhg {
199 203 url = "https://code.rhodecode.com/upstream/rc_testdata";
200 204 rev = "v0.7.0";
201 205 sha256 = "0w3z0zn8lagr707v67lgys23sl6pbi4xg7pfvdbw58h3q384h6rx";
202 206 };
203 207 };
204 208
205 209 };
206 210
207 211 # Apply all overrides and fix the final package set
208 212 myPythonPackagesUnfix =
209 213 (extends pythonExternalOverrides
210 214 (extends pythonLocalOverrides
211 215 (extends pythonOverrides
212 216 pythonGeneratedPackages)));
213 217 myPythonPackages = (fix myPythonPackagesUnfix);
214 218
215 219 in myPythonPackages.rhodecode-enterprise-ce
@@ -1,33 +1,33 b''
1 1 Apache Reverse Proxy
2 2 ^^^^^^^^^^^^^^^^^^^^
3 3
4 4 Here is a sample configuration file for using Apache as a reverse proxy.
5 5
6 6 .. code-block:: apache
7 7
8 8 <VirtualHost *:80>
9 9 ServerName hg.myserver.com
10 10 ServerAlias hg.myserver.com
11 11
12 ## uncomment root directive if you want to serve static files by nginx
13 ## requires static_files = false in .ini file
14 DocumentRoot /path/to/installation/rhodecode/public
12 ## uncomment root directive if you want to serve static files by
13 ## Apache requires static_files = false in .ini file
14 #DocumentRoot /path/to/rhodecode/installation/public
15 15
16 16 <Proxy *>
17 17 Order allow,deny
18 18 Allow from all
19 19 </Proxy>
20 20
21 #important !
22 #Directive to properly generate url (clone url) for pylons
21 ## Important !
22 ## Directive to properly generate url (clone url) for pylons
23 23 ProxyPreserveHost On
24 24
25 #rhodecode instance
26 ProxyPass / http://127.0.0.1:5000/
27 ProxyPassReverse / http://127.0.0.1:5000/
25 ## RhodeCode instance running
26 ProxyPass / http://127.0.0.1:10002/
27 ProxyPassReverse / http://127.0.0.1:10002/
28 28
29 #to enable https use line below
29 ## to enable https use line below
30 30 #SetEnvIf X-Url-Scheme https HTTPS=1
31 31
32 32 </VirtualHost>
33 33
@@ -1,235 +1,272 b''
1 1 .. _indexing-ref:
2 2
3 3 Full-text Search
4 4 ----------------
5 5
6 By default |RCM| uses `Whoosh`_ to index |repos| and provide full-text search.
6 By default |RC| is configured to use `Whoosh`_ to index |repos| and
7 provide full-text search.
8
9 |RCE| also provides support for `Elasticsearch`_ as a backend for scalable
10 search. See :ref:`enable-elasticsearch` for details.
11
12 Indexing
13 ^^^^^^^^
14
7 15 To run the indexer you need to use an |authtoken| with admin rights to all
8 16 |repos|.
9 17
10 18 To index new content added, you have the option to set the indexer up in a
11 19 number of ways, for example:
12 20
13 21 * Call the indexer via a cron job. We recommend running this nightly,
14 22 unless you need everything indexed immediately.
15 23 * Set the indexer to infinitely loop and reindex as soon as it has run its
16 24 cycle.
17 25 * Hook the indexer up with your CI server to reindex after each push.
18 26
19 27 The indexer works by indexing new commits added since the last run. If you
20 28 wish to build a brand new index from scratch each time,
21 29 use the ``force`` option in the configuration file.
22 30
23 31 .. important::
24 32
25 33 You need to have |RCT| installed, see :ref:`install-tools`. Since |RCE|
26 34 3.5.0 they are installed by default.
27 35
28 36 To set up indexing, use the following steps:
29 37
30 38 1. :ref:`config-rhoderc`, if running tools remotely.
31 39 2. :ref:`run-index`
32 40 3. :ref:`set-index`
33 41 4. :ref:`advanced-indexing`
34 42
35 43 .. _config-rhoderc:
36 44
37 45 Configure the ``.rhoderc`` File
38 46 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
39 47
40 48 |RCT| uses the :file:`/home/{user}/.rhoderc` file for connection details
41 49 to |RCM| instances. If this file is not automatically created,
42 50 you can configure it using the following example. You need to configure the
43 51 details for each instance you want to index.
44 52
45 53 .. code-block:: bash
46 54
47 55 # Check the instance details
48 56 # of the instance you want to index
49 57 $ rccontrol status
50 58
51 59 - NAME: enterprise-1
52 60 - STATUS: RUNNING
53 61 - TYPE: Momentum
54 62 - VERSION: 1.5.0
55 63 - URL: http://127.0.0.1:10000
56 64
57 65 To get your API Token, on the |RCM| interface go to
58 66 :menuselection:`username --> My Account --> Auth tokens`
59 67
60 68 .. code-block:: ini
61 69
62 70 # Configure .rhoderc with matching details
63 71 # This allows the indexer to connect to the instance
64 72 [instance:enterprise-1]
65 73 api_host = http://127.0.0.1:10000
66 74 api_key = <auth token goes here>
67 75 repo_dir = /home/<username>/repos
68 76
69 77 .. _run-index:
70 78
71 79 Run the Indexer
72 80 ^^^^^^^^^^^^^^^
73 81
74 82 Run the indexer using the following command, and specify the instance you
75 83 want to index:
76 84
77 85 .. code-block:: bash
78 86
79 87 # From inside a virtualevv
80 88 (venv)$ rhodecode-index --instance-name=enterprise-1
81 89
82 90 # Using default installation
83 91 $ /home/user/.rccontrol/enterprise-4/profile/bin/rhodecode-index \
84 92 --instance-name=enterprise-4
85 93
86 94 # Using a custom mapping file
87 95 $ /home/user/.rccontrol/enterprise-4/profile/bin/rhodecode-index \
88 96 --instance-name=enterprise-4 \
89 97 --mapping=/home/user/.rccontrol/enterprise-4/mapping.ini
90 98
91 99 .. note::
92 100
93 101 |RCT| require |PY| 2.7 to run.
94 102
95 103 .. _set-index:
96 104
97 105 Schedule the Indexer
98 106 ^^^^^^^^^^^^^^^^^^^^
99 107
100 108 To schedule the indexer, configure the crontab file to run the indexer inside
101 109 your |RCT| virtualenv using the following steps.
102 110
103 111 1. Open the crontab file, using ``crontab -e``.
104 112 2. Add the indexer to the crontab, and schedule it to run as regularly as you
105 113 wish.
106 114 3. Save the file.
107 115
108 116 .. code-block:: bash
109 117
110 118 $ crontab -e
111 119
112 120 # The virtualenv can be called using its full path, so for example you can
113 121 # put this example into the crontab
114 122
115 123 # Run the indexer daily at 4am using the default mapping settings
116 124 * 4 * * * /home/ubuntu/.virtualenv/rhodecode-venv/bin/rhodecode-index \
117 125 --instance-name=enterprise-1
118 126
119 127 # Run the indexer every Sunday at 3am using default mapping
120 128 * 3 * * 0 /home/ubuntu/.virtualenv/rhodecode-venv/bin/rhodecode-index \
121 129 --instance-name=enterprise-1
122 130
123 131 # Run the indexer every 15 minutes
124 132 # using a specially configured mapping file
125 133 */15 * * * * ~/.rccontrol/enterprise-4/profile/bin/rhodecode-index \
126 134 --instance-name=enterprise-4 \
127 135 --mapping=/home/user/.rccontrol/enterprise-4/mapping.ini
128 136
129 137 .. _advanced-indexing:
130 138
131 139 Advanced Indexing
132 140 ^^^^^^^^^^^^^^^^^
133 141
134 142 |RCT| indexes based on the :file:`mapping.ini` file. To configure your index,
135 143 you can specify different options in this file. The default location is:
136 144
137 145 * :file:`/home/{user}/.rccontrol/{instance-id}/mapping.ini`, using default
138 146 |RCT|.
139 147 * :file:`~/venv/lib/python2.7/site-packages/rhodecode_tools/templates/mapping.ini`,
140 148 when using ``virtualenv``.
141 149
142 150 .. note::
143 151
144 152 If you need to create the :file:`mapping.ini` file, use the |RCT|
145 153 ``rhodecode-index --create-mapping path/to/file`` API call. For details,
146 154 see the :ref:`tools-cli` section.
147 155
148 156 The indexer runs in a random order to prevent a failing |repo| from stopping
149 157 a build. To configure different indexing scenarios, set the following options
150 158 inside the :file:`mapping.ini` and specify the altered file using the
151 159 ``--mapping`` option.
152 160
153 161 * ``index_files`` : Index the specified file types.
154 162 * ``skip_files`` : Do not index the specified file types.
155 163 * ``index_files_content`` : Index the content of the specified file types.
156 164 * ``skip_files_content`` : Do not index the content of the specified files.
157 165 * ``force`` : Create a fresh index on each run.
158 166 * ``max_filesize`` : Files larger than the set size will not be indexed.
159 167 * ``commit_parse_limit`` : Set the batch size when indexing commit messages.
160 168 Set to a lower number to lessen memory load.
161 169 * ``repo_limit`` : Set the maximum number or |repos| indexed per run.
162 170 * ``[INCLUDE]`` : Set |repos| you want indexed. This takes precedent over
163 171 ``[EXCLUDE]``.
164 172 * ``[EXCLUDE]`` : Set |repos| you do not want indexed. Exclude can be used to
165 173 not index branches, forks, or log |repos|.
166 174
167 175 At the end of the file you can specify conditions for specific |repos| that
168 176 will override the default values. To configure your indexer,
169 177 use the following example :file:`mapping.ini` file.
170 178
171 179 .. code-block:: ini
172 180
173 181 [__DEFAULT__]
174 182 # default patterns for indexing files and content of files.
175 183 # Binary files are skipped by default.
176 184
177 185 # Index python and markdown files
178 186 index_files = *.py, *.md
179 187
180 188 # Do not index these file types
181 189 skip_files = *.svg, *.log, *.dump, *.txt
182 190
183 191 # Index both file types and their content
184 192 index_files_content = *.cpp, *.ini, *.py
185 193
186 194 # Index file names, but not file content
187 195 skip_files_content = *.svg,
188 196
189 197 # Force rebuilding an index from scratch. Each repository will be rebuild
190 198 # from scratch with a global flag. Use local flag to rebuild single repos
191 199 force = false
192 200
193 201 # Do not index files larger than 385KB
194 202 max_filesize = 385KB
195 203
196 204 # Limit commit indexing to 500 per batch
197 205 commit_parse_limit = 500
198 206
199 207 # Limit each index run to 25 repos
200 208 repo_limit = 25
201 209
202 210 # __INCLUDE__ is more important that __EXCLUDE__.
203 211
204 212 [__INCLUDE__]
205 213 # Include all repos with these names
206 214
207 215 docs/* = 1
208 216 lib/* = 1
209 217
210 218 [__EXCLUDE__]
211 219 # Do not include the following repo in index
212 220
213 221 dev-docs/* = 1
214 222 legacy-repos/* = 1
215 223 *-dev/* = 1
216 224
217 225 # Each repo that needs special indexing is a separate section below.
218 226 # In each section set the options to override the global configuration
219 227 # parameters above.
220 228 # If special settings are not configured, the global configuration values
221 229 # above are inherited. If no special repositories are
222 230 # defined here RhodeCode will use the API to ask for all repositories
223 231
224 232 # For this repo use different settings
225 233 [special-repo]
226 234 commit_parse_limit = 20,
227 235 skip_files = *.idea, *.xml,
228 236
229 237 # For another repo use different settings
230 238 [another-special-repo]
231 239 index_files = *,
232 240 max_filesize = 800MB
233 241 commit_parse_limit = 20000
234 242
243 .. _enable-elasticsearch:
244
245 Enabling Elasticsearch
246 ^^^^^^^^^^^^^^^^^^^^^^
247
248 1. Open the :file:`rhodecode.ini` file for the instance you wish to edit. The
249 default location is
250 :file:`home/{user}/.rccontrol/{instance-id}/rhodecode.ini`
251 2. Find the search configuration section:
252
253 .. code-block:: ini
254
255 ###################################
256 ## SEARCH INDEXING CONFIGURATION ##
257 ###################################
258
259 search.module = rhodecode.lib.index.whoosh
260 search.location = %(here)s/data/index
261
262 and change it to:
263
264 .. code-block:: ini
265
266 search.module = rc_elasticsearch
267 search.location = http://localhost:9200/
268
269 where ``search.location`` points to the elasticsearch server.
270
235 271 .. _Whoosh: https://pypi.python.org/pypi/Whoosh/
272 .. _Elasticsearch: https://www.elastic.co/ No newline at end of file
@@ -1,71 +1,72 b''
1 1 Nginx Configuration Example
2 2 ---------------------------
3 3
4 4 Use the following example to configure Nginx as a your web server.
5 5
6 6 .. code-block:: nginx
7 7
8 8 upstream rc {
9 9
10 server 127.0.0.1:5000;
10 server 127.0.0.1:10002;
11 11
12 12 # add more instances for load balancing
13 # server 127.0.0.1:5001;
14 # server 127.0.0.1:5002;
13 # server 127.0.0.1:10003;
14 # server 127.0.0.1:10004;
15 15 }
16 16
17 17 ## gist alias
18 18
19 19 server {
20 20 listen 443;
21 21 server_name gist.myserver.com;
22 22 access_log /var/log/nginx/gist.access.log;
23 23 error_log /var/log/nginx/gist.error.log;
24 24
25 25 ssl on;
26 26 ssl_certificate gist.rhodecode.myserver.com.crt;
27 27 ssl_certificate_key gist.rhodecode.myserver.com.key;
28 28
29 29 ssl_session_timeout 5m;
30 30
31 31 ssl_protocols SSLv3 TLSv1;
32 32 ssl_ciphers DHE-RSA-AES256-SHA:DHE-RSA-AES128-SHA:EDH-RSA-DES-CBC3-SHA:AES256-SHA:DES-CBC3-SHA:AES128-SHA:RC4-SHA:RC4-MD5;
33 33 ssl_prefer_server_ciphers on;
34 34 add_header Strict-Transport-Security "max-age=31536000; includeSubdomains;";
35 35
36 36 # Diffie-Hellman parameter for DHE ciphersuites, recommended 2048 bits
37 37 ssl_dhparam /etc/nginx/ssl/dhparam.pem;
38 38
39 39 rewrite ^/(.+)$ https://rhodecode.myserver.com/_admin/gists/$1;
40 40 rewrite (.*) https://rhodecode.myserver.com/_admin/gists;
41 41 }
42 42
43 43 server {
44 44 listen 443;
45 45 server_name rhodecode.myserver.com;
46 46 access_log /var/log/nginx/rhodecode.access.log;
47 47 error_log /var/log/nginx/rhodecode.error.log;
48 48
49 49 ssl on;
50 50 ssl_certificate rhodecode.myserver.com.crt;
51 51 ssl_certificate_key rhodecode.myserver.com.key;
52 52
53 53 ssl_session_timeout 5m;
54 54
55 55 ssl_protocols SSLv3 TLSv1;
56 56 ssl_ciphers DHE-RSA-AES256-SHA:DHE-RSA-AES128-SHA:EDH-RSA-DES-CBC3-SHA:AES256-SHA:DES-CBC3-SHA:AES128-SHA:RC4-SHA:RC4-MD5;
57 57 ssl_prefer_server_ciphers on;
58 58
59 59 ## uncomment root directive if you want to serve static files by nginx
60 60 ## requires static_files = false in .ini file
61 # root /path/to/installation/rhodecode/public;
61 # root /path/to/rhodecode/installation/public;
62 62
63 63 include /etc/nginx/proxy.conf;
64 location / {
65 try_files $uri @rhode;
66 }
64
65 location / {
66 try_files $uri @rhode;
67 }
67 68
68 69 location @rhode {
69 proxy_pass http://rc;
70 }
70 proxy_pass http://rc;
71 }
71 72 }
@@ -1,163 +1,171 b''
1 1 .. _system-overview-ref:
2 2
3 3 System Overview
4 4 ===============
5 5
6 6 Latest Version
7 7 --------------
8 8
9 9 * |release| on Unix and Windows systems.
10 10
11 11 System Architecture
12 12 -------------------
13 13
14 14 The following diagram shows a typical production architecture.
15 15
16 16 .. image:: ../images/architecture-diagram.png
17 17 :align: center
18 18
19 19 Supported Operating Systems
20 20 ---------------------------
21 21
22 22 Linux
23 23 ^^^^^
24 24
25 25 * Ubuntu 14.04
26 26 * CentOS 6.2 and 7
27 27 * Debian 7.8
28 28 * RedHat Fedora
29 29 * Arch Linux
30 30 * SUSE Linux
31 31
32 32 Windows
33 33 ^^^^^^^
34 34
35 35 * Windows Vista Ultimate 64bit
36 36 * Windows 7 Ultimate 64bit
37 37 * Windows 8 Professional 64bit
38 38 * Windows 8.1 Enterprise 64bit
39 39 * Windows Server 2008 64bit
40 40 * Windows Server 2008-R2 64bit
41 41 * Windows Server 2012 64bit
42 42
43 43 Supported Databases
44 44 -------------------
45 45
46 46 * SQLite
47 47 * MySQL
48 48 * MariaDB
49 49 * PostgreSQL
50 50
51 51 Supported Browsers
52 52 ------------------
53 53
54 54 * Chrome
55 55 * Safari
56 56 * Firefox
57 57 * Internet Explorer 10 & 11
58 58
59 59 System Requirements
60 60 -------------------
61 61
62 62 |RCM| performs best on machines with ultra-fast hard disks. Generally disk
63 63 performance is more important than CPU performance. In a corporate production
64 64 environment handling 1000s of users and |repos| you should deploy on a 12+
65 65 core 64GB RAM server. In short, the more RAM the better.
66 66
67
68 For example:
69
70 - for team of 1 - 5 active users you can run on 1GB RAM machine with 1CPU
71 - above 250 active users, |RCM| needs at least 8GB of memory.
72 Number of CPUs is less important, but recommended to have at least 2-3 CPUs
73
74
67 75 .. _config-rce-files:
68 76
69 77 Configuration Files
70 78 -------------------
71 79
72 80 * :file:`/home/{user}/.rccontrol/{instance-id}/rhodecode.ini`
73 81 * :file:`/home/{user}/.rccontrol/{instance-id}/mapping.ini`
74 82 * :file:`/home/{user}/.rccontrol/{vcsserver-id}/vcsserver.ini`
75 83 * :file:`/home/{user}/.rccontrol/supervisor/supervisord.ini`
76 84 * :file:`/home/{user}/.rccontrol.ini`
77 85 * :file:`/home/{user}/.rhoderc`
78 86 * :file:`/home/{user}/.rccontrol/cache/MANIFEST`
79 87
80 88 For more information, see the :ref:`config-files` section.
81 89
82 90 Log Files
83 91 ---------
84 92
85 93 * :file:`/home/{user}/.rccontrol/{instance-id}/enterprise.log`
86 94 * :file:`/home/{user}/.rccontrol/{vcsserver-id}/vcsserver.log`
87 95 * :file:`/home/{user}/.rccontrol/supervisor/supervisord.log`
88 96 * :file:`/tmp/rccontrol.log`
89 97 * :file:`/tmp/rhodecode_tools.log`
90 98
91 99 Storage Files
92 100 -------------
93 101
94 102 * :file:`/home/{user}/.rccontrol/{instance-id}/data/index/{index-file.toc}`
95 103 * :file:`/home/{user}/repos/.rc_gist_store`
96 104 * :file:`/home/{user}/.rccontrol/{instance-id}/rhodecode.db`
97 105 * :file:`/opt/rhodecode/store/{unique-hash}`
98 106
99 107 Default Repositories Location
100 108 -----------------------------
101 109
102 110 * :file:`/home/{user}/repos`
103 111
104 112 Connection Methods
105 113 ------------------
106 114
107 115 * HTTPS
108 116 * SSH
109 117 * |RCM| API
110 118
111 119 Internationalization Support
112 120 ----------------------------
113 121
114 122 Currently available in the following languages, see `Transifex`_ for the
115 123 latest details. If you want a new language added, please contact us. To
116 124 configure your language settings, see the :ref:`set-lang` section.
117 125
118 126 .. hlist::
119 127
120 128 * Belorussian
121 129 * Chinese
122 130 * French
123 131 * German
124 132 * Italian
125 133 * Japanese
126 134 * Portuguese
127 135 * Polish
128 136 * Russian
129 137 * Spanish
130 138
131 139 Licencing Information
132 140 ---------------------
133 141
134 142 * See licencing information `here`_
135 143
136 144 Peer-to-peer Failover Support
137 145 -----------------------------
138 146
139 147 * Yes
140 148
141 149 Additional Binaries
142 150 -------------------
143 151
144 152 * Yes, see :ref:`rhodecode-nix-ref` for full details.
145 153
146 154 Remote Connectivity
147 155 -------------------
148 156
149 157 * Available
150 158
151 159 Executable Files
152 160 ----------------
153 161
154 162 Windows: :file:`RhodeCode-installer-{version}.exe`
155 163
156 164 Deprecated Support
157 165 ------------------
158 166
159 167 - Internet Explorer 8 support deprecated since version 3.7.0.
160 168 - Internet Explorer 9 support deprecated since version 3.8.0.
161 169
162 170 .. _here: https://rhodecode.com/licenses/
163 171 .. _Transifex: https://www.transifex.com/projects/p/RhodeCode/
@@ -1,32 +1,34 b''
1 1 # Try and keep this list alphabetical
2 2 # ui is for user interface elements and messages
3 3 # button - that's obvious
4 4
5 5 rst_epilog = '''
6 6 .. |AE| replace:: Appenlight
7 7 .. |authtoken| replace:: Authentication Token
8 8 .. |authtokens| replace:: **Auth Tokens**
9 9 .. |git| replace:: Git
10 10 .. |hg| replace:: Mercurial
11 11 .. |svn| replace:: Subversion
12 12 .. |LDAP| replace:: LDAP / Active Directory
13 13 .. |os| replace:: operating system
14 14 .. |OS| replace:: Operating System
15 15 .. |PY| replace:: Python
16 16 .. |pr| replace:: pull request
17 17 .. |prs| replace:: pull requests
18 18 .. |psf| replace:: Python Software Foundation
19 19 .. |repo| replace:: repository
20 20 .. |repos| replace:: repositories
21 21 .. |RCI| replace:: RhodeCode Control
22 22 .. |RCC| replace:: RhodeCode Control
23 23 .. |RCV| replace:: RhodeCode Enterprise
24 24 .. |RCM| replace:: RhodeCode Enterprise
25 25 .. |RCE| replace:: RhodeCode Enterprise
26 .. |RCCE| replace:: RhodeCode Community
27 .. |RCEE| replace:: RhodeCode Enterprise
26 28 .. |RCX| replace:: RhodeCode Extensions
27 29 .. |RCT| replace:: RhodeCode Tools
28 30 .. |RCEBOLD| replace:: **RhodeCode Enterprise**
29 31 .. |RCEITALICS| replace:: `RhodeCode Enterprise`
30 32 .. |RC| replace:: RhodeCode
31 33 .. |RNS| replace:: Release Notes
32 34 '''
@@ -1,38 +1,38 b''
1 1 .. _config-database:
2 2
3 3 Make Database Changes
4 4 ---------------------
5 5
6 6 .. important::
7 7
8 If you do change the |repo| database that |RCM| uses, then you will need to
8 If you do change the |repo| database that |RCEE| uses, then you will need to
9 9 upgrade the database, and also remap and rescan the |repos|. More detailed
10 10 information is available in the
11 11 :ref:`Alternative upgrade documentation <control:install-port>`.
12 12
13 If you need to change database connection details for a |RCM| instance,
13 If you need to change database connection details for a |RCEE| instance,
14 14 use the following steps:
15 15
16 16 1. Open the :file:`rhodecode.ini` file for the instance you wish to edit. The
17 17 default location is
18 18 :file:`home/{user}/.rccontrol/{instance-id}/rhodecode.ini`
19 19 2. When you open the file, find the database configuration section,
20 20 and use the below example to change the
21 21 connection details:
22 22
23 23 .. code-block:: ini
24 24
25 25 #########################################################
26 26 ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ###
27 27 #########################################################
28 28
29 29 # Default SQLite config
30 30 sqlalchemy.db1.url = sqlite:////home/brian/.rccontrol/enterprise-1/rhodecode.db
31 31
32 32 # Use this example for a PostgreSQL
33 33 sqlalchemy.db1.url = postgresql://postgres:qwe@localhost/rhodecode
34 34
35 35 # see sqlalchemy docs for other advanced settings
36 36 sqlalchemy.db1.echo = false
37 37 sqlalchemy.db1.pool_recycle = 3600
38 38 sqlalchemy.db1.convert_unicode = true
@@ -1,102 +1,109 b''
1 1 .. _quick-start:
2 2
3 3 Quick Start Guide
4 4 =================
5 5
6 6 .. important::
7 7
8 8 These are quick start instructions. To optimize your |RCE|,
9 9 |RCC|, and |RCT| usage, read the more detailed instructions in our guides.
10 10 For detailed installation instructions, see
11 11 :ref:`RhodeCode Control Documentation <control:rcc>`
12 12
13 13 .. tip::
14 14
15 15 If using a non-SQLite database, install and configure the database, create
16 16 a new user, and grant permissions. You will be prompted for this user's
17 17 credentials during |RCE| installation. See the relevant database
18 18 documentation for more details.
19 19
20 To get |RCM| up and running, run through the below steps:
20 To get |RCE| up and running, run through the below steps:
21 21
22 22 1. Download the latest |RCC| installer from your `rhodecode.com`_ profile
23 page. If you don't have an account, sign up at `rhodecode.com/register`_.
23 or main page.
24 If you don't have an account, sign up at `rhodecode.com/register`_.
25
24 26 2. Run the |RCC| installer and accept the End User Licence using the
25 27 following example:
26 28
27 29 .. code-block:: bash
28 30
29 31 $ chmod 755 RhodeCode-installer-linux-*
30 32 $ ./RhodeCode-installer-linux-*
31 33
32 34 3. Install a VCS Server, and configure it to start at boot.
33 35
34 36 .. code-block:: bash
35 37
36 38 $ rccontrol install VCSServer
37 39
38 40 Agree to the licence agreement? [y/N]: y
39 41 IP to start the server on [127.0.0.1]:
40 42 Port for the server to start [10005]:
41 43 Creating new instance: vcsserver-1
42 44 Installing RhodeCode VCSServer
43 45 Configuring RhodeCode VCS Server ...
44 46 Supervisord state is: RUNNING
45 47 Added process group vcsserver-1
46 48
47 49
48 4. Install |RCE|. If using MySQL or PostgreSQL, during installation you'll be
49 asked for your database credentials, so have them at hand. You don't need
50 any for SQLite.
50 4. Install |RCEE| or |RCCE|. If using MySQL or PostgreSQL, during
51 installation you'll be asked for your database credentials, so have them at hand.
52 Mysql or Postgres needs to be running and a new database needs to be created.
53 You don't need any credentials or to create a database for SQLite.
51 54
52 55 .. code-block:: bash
53 56 :emphasize-lines: 11-16
54 57
58 $ rccontrol install Community
59
60 or
61
55 62 $ rccontrol install Enterprise
56 63
57 64 Username [admin]: username
58 65 Password (min 6 chars):
59 66 Repeat for confirmation:
60 67 Email: your@mail.com
61 68 Respositories location [/home/brian/repos]:
62 69 IP to start the Enterprise server on [127.0.0.1]:
63 70 Port for the Enterprise server to use [10004]:
64 71 Database type - [s]qlite, [m]ysql, [p]ostresql:
65 72 PostgreSQL selected
66 73 Database host [127.0.0.1]:
67 74 Database port [5432]:
68 75 Database username: db-user-name
69 76 Database password: somepassword
70 77 Database name: example-db-name
71 78
72 5. Check the status of your installation. You |RCE| instance runs on the URL
73 displayed in the status message.
79 5. Check the status of your installation. You |RCEE|/|RCCE| instance runs
80 on the URL displayed in the status message.
74 81
75 82 .. code-block:: bash
76 83
77 84 $ rccontrol status
78 85
79 86 - NAME: enterprise-1
80 87 - STATUS: RUNNING
81 88 - TYPE: Enterprise
82 - VERSION: 3.3.0
89 - VERSION: 4.1.0
83 90 - URL: http://127.0.0.1:10003
84 91
85 92 - NAME: vcsserver-1
86 93 - STATUS: RUNNING
87 94 - TYPE: VCSServer
88 - VERSION: 3.3.0
95 - VERSION: 4.1.0
89 96 - URL: http://127.0.0.1:10001
90 97
91 98 .. note::
92 99
93 100 Recommended post quick start install instructions:
94 101
95 102 * Read the documentation
96 103 * Carry out the :ref:`rhodecode-post-instal-ref`
97 104 * Set up :ref:`indexing-ref`
98 105 * Familiarise yourself with the :ref:`rhodecode-admin-ref` section.
99 106
100 107 .. _rhodecode.com/download/: https://rhodecode.com/download/
101 108 .. _rhodecode.com: https://rhodecode.com/
102 109 .. _rhodecode.com/register: https://rhodecode.com/register/
@@ -1,58 +1,64 b''
1 1 |RCE| 4.0.0 |RNS|
2 2 -----------------
3 3
4 4 Release Date
5 5 ^^^^^^^^^^^^
6 6
7 7 - 2016-05-24
8 8
9 9 General
10 10 ^^^^^^^
11 11
12 12 - Introduced Pyramid as a Pylons framework replacement. (porting still ongoing).
13 13 Added few of components as plugins. Exposed RhodeCode plugins API for 3rd
14 14 parties to extend RhodeCode functionality with custom pyramid apps. Pyramid
15 15 is also our route to python3 support.
16 16 - Various UX/UI improvements.
17 17 - new summary page
18 18 - new file browser (more consistent)
19 19 - re-done admin section and added Panels
20 20 - various other tweaks and improvements
21 21 - Alternative fast and scalable HTTP based communication backend for VCServer.
22 22 It soon will replace Pyro4.
23 23 - Rewrote few caching techniques used and simplified those
24 24
25 25
26 26 New Features
27 27 ^^^^^^^^^^^^
28 28
29 29 - RhodeCode code-review live chat (EE only). A live communication
30 30 tool built right into the code-review process to quickly
31 31 collaborate on crucial parts of code.
32 32
33 33 - Elastic Search backend (EE only). Alternative backend to existing
34 34 Whoosh to handle, large amount of data for full text search.
35 35
36 36 - Social Auth (EE only): added new social authentication backends including:
37 37 Github, Twitter, Bitbucket and Google. It's possible now to use your
38 38 Google account to log in to RhodeCode and take advantage of things like 2FA.
39 39
40 - Search: full text search now properly orders commits by date, and shows line
41 numbers for file content search.
42
43
40 44 Security
41 45 ^^^^^^^^
42 46
43 47 - Added new action loggers for actions like adding/revoking permissions.
44 48
45 49
46 50 Performance
47 51 ^^^^^^^^^^^
48 52
49 - Optimized admin pannels to faster load large ammount of data
53 - Optimized admin panels to faster load large amount of data
50 54 - Improved file tree loading speed
55 - New HTTP backend is ~10% faster, and doesn't require so many threads
56 for vcsserver
51 57
52 58
53 59 Fixes
54 60 ^^^^^
55 61
56 62 - Fixed backreferences to user group when deleting users
57 63 - Fixed LDAP group user-group matching
58 64 - Improved SVN support for various commands (MKOL, etc) No newline at end of file
@@ -1,76 +1,80 b''
1 1 .. _rhodecode-release-notes-ref:
2 2
3 3 Release Notes
4 4 =============
5 5
6 6 |RCE| 4.x Versions
7 7 ------------------
8 8
9 .. toctree::
10 :maxdepth: 1
11
12 release-notes-4.1.0.rst
9 13 release-notes-4.0.1.rst
10 14 release-notes-4.0.0.rst
11 15
12 16 |RCE| 3.x Versions
13 17 ------------------
14 18
15 19 .. toctree::
16 20 :maxdepth: 1
17 21
18 22 release-notes-3.8.4.rst
19 23 release-notes-3.8.3.rst
20 24 release-notes-3.8.2.rst
21 25 release-notes-3.8.1.rst
22 26 release-notes-3.8.0.rst
23 27 release-notes-3.7.1.rst
24 28 release-notes-3.7.0.rst
25 29 release-notes-3.6.1.rst
26 30 release-notes-3.6.0.rst
27 31 release-notes-3.5.2.rst
28 32 release-notes-3.5.1.rst
29 33 release-notes-3.5.0.rst
30 34 release-notes-3.4.1.rst
31 35 release-notes-3.4.0.rst
32 36 release-notes-3.3.4.rst
33 37 release-notes-3.3.3.rst
34 38 release-notes-3.3.2.rst
35 39 release-notes-3.3.1.rst
36 40 release-notes-3.3.0.rst
37 41 release-notes-3.2.3.rst
38 42 release-notes-3.2.2.rst
39 43 release-notes-3.2.1.rst
40 44 release-notes-3.2.0.rst
41 45 release-notes-3.1.1.rst
42 46 release-notes-3.1.0.rst
43 47 release-notes-3.0.2.rst
44 48 release-notes-3.0.1.rst
45 49 release-notes-3.0.0.rst
46 50
47 51 |RCE| 2.x Versions
48 52 ------------------
49 53
50 54 .. toctree::
51 55 :maxdepth: 1
52 56
53 57 release-notes-2.2.8.rst
54 58 release-notes-2.2.7.rst
55 59 release-notes-2.2.6.rst
56 60 release-notes-2.2.5.rst
57 61 release-notes-2.2.4.rst
58 62 release-notes-2.2.3.rst
59 63 release-notes-2.2.2.rst
60 64 release-notes-2.2.1.rst
61 65 release-notes-2.2.0.rst
62 66 release-notes-2.1.0.rst
63 67 release-notes-2.0.2.rst
64 68 release-notes-2.0.1.rst
65 69 release-notes-2.0.0.rst
66 70
67 71 |RCE| 1.x Versions
68 72 ------------------
69 73
70 74 .. toctree::
71 75 :maxdepth: 1
72 76
73 77 release-notes-1.7.2.rst
74 78 release-notes-1.7.1.rst
75 79 release-notes-1.7.0.rst
76 80 release-notes-1.6.0.rst
@@ -1,13 +1,12 b''
1 1 diff --git a/requirements.txt b/requirements.txt
2 2 --- a/requirements.txt
3 3 +++ b/requirements.txt
4 @@ -1,8 +1,8 @@
5 click==5.1
6 future==0.14.3
4 @@ -3,7 +3,7 @@future==0.14.3
7 5 six==1.9.0
8 6 mako==1.0.1
9 7 markupsafe==0.23
10 8 -requests==2.5.1
11 9 +requests
10 #responses
12 11 whoosh==2.7.0
13 pyelasticsearch==1.4
12 elasticsearch==2.3.0 No newline at end of file
@@ -1,151 +1,165 b''
1 1 # Overrides for the generated python-packages.nix
2 2 #
3 3 # This function is intended to be used as an extension to the generated file
4 4 # python-packages.nix. The main objective is to add needed dependencies of C
5 5 # libraries and tweak the build instructions where needed.
6 6
7 7 { pkgs, basePythonPackages }:
8 8
9 9 let
10 10 sed = "sed -i";
11 11 in
12 12
13 13 self: super: {
14 14
15 15 gnureadline = super.gnureadline.override (attrs: {
16 16 buildInputs = attrs.buildInputs ++ [
17 17 pkgs.ncurses
18 18 ];
19 19 patchPhase = ''
20 20 substituteInPlace setup.py --replace "/bin/bash" "${pkgs.bash}/bin/bash"
21 21 '';
22 22 });
23 23
24 gunicorn = super.gunicorn.override (attrs: {
25 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
26 # johbo: futures is needed as long as we are on Python 2, otherwise
27 # gunicorn explodes if used with multiple threads per worker.
28 self.futures
29 ];
30 });
31
32 ipython = super.ipython.override (attrs: {
33 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
34 self.gnureadline
35 ];
36 });
37
24 38 kombu = super.kombu.override (attrs: {
25 39 # The current version of kombu needs some patching to work with the
26 40 # other libs. Should be removed once we update celery and kombu.
27 41 patches = [
28 42 ./patch-kombu-py-2-7-11.diff
29 43 ./patch-kombu-msgpack.diff
30 44 ];
31 45 });
32 46
33 47 lxml = super.lxml.override (attrs: {
34 48 buildInputs = with self; [
35 49 pkgs.libxml2
36 50 pkgs.libxslt
37 51 ];
38 52 });
39 53
40 54 MySQL-python = super.MySQL-python.override (attrs: {
41 55 buildInputs = attrs.buildInputs ++ [
42 56 pkgs.openssl
43 57 ];
44 58 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
45 59 pkgs.mysql.lib
46 60 pkgs.zlib
47 61 ];
48 62 });
49 63
50 64 psutil = super.psutil.override (attrs: {
51 65 buildInputs = attrs.buildInputs ++
52 66 pkgs.lib.optional pkgs.stdenv.isDarwin pkgs.darwin.IOKit;
53 67 });
54 68
55 69 psycopg2 = super.psycopg2.override (attrs: {
56 70 buildInputs = attrs.buildInputs ++
57 71 pkgs.lib.optional pkgs.stdenv.isDarwin pkgs.openssl;
58 72 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
59 73 pkgs.postgresql
60 74 ];
61 75 });
62 76
63 77 pycurl = super.pycurl.override (attrs: {
64 78 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
65 79 pkgs.curl
66 80 pkgs.openssl
67 81 ];
68 82 preConfigure = ''
69 83 substituteInPlace setup.py --replace '--static-libs' '--libs'
70 84 export PYCURL_SSL_LIBRARY=openssl
71 85 '';
72 86 });
73 87
74 88 Pylons = super.Pylons.override (attrs: {
75 89 name = "Pylons-1.0.1-patch1";
76 90 src = pkgs.fetchgit {
77 91 url = "https://code.rhodecode.com/upstream/pylons";
78 92 rev = "707354ee4261b9c10450404fc9852ccea4fd667d";
79 93 sha256 = "b2763274c2780523a335f83a1df65be22ebe4ff413a7bc9e9288d23c1f62032e";
80 94 };
81 95 });
82 96
83 97 pyramid = super.pyramid.override (attrs: {
84 98 postFixup = ''
85 99 wrapPythonPrograms
86 100 # TODO: johbo: "wrapPython" adds this magic line which
87 101 # confuses pserve.
88 102 ${sed} '/import sys; sys.argv/d' $out/bin/.pserve-wrapped
89 103 '';
90 104 });
91 105
92 106 Pyro4 = super.Pyro4.override (attrs: {
93 107 # TODO: Was not able to generate this version, needs further
94 108 # investigation.
95 109 name = "Pyro4-4.35";
96 110 src = pkgs.fetchurl {
97 111 url = "https://pypi.python.org/packages/source/P/Pyro4/Pyro4-4.35.src.tar.gz";
98 112 md5 = "cbe6cb855f086a0f092ca075005855f3";
99 113 };
100 114 });
101 115
102 116 pysqlite = super.pysqlite.override (attrs: {
103 117 propagatedBuildInputs = [
104 118 pkgs.sqlite
105 119 ];
106 120 });
107 121
108 122 pytest-runner = super.pytest-runner.override (attrs: {
109 123 propagatedBuildInputs = [
110 124 self.setuptools-scm
111 125 ];
112 126 });
113 127
114 128 python-ldap = super.python-ldap.override (attrs: {
115 129 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
116 130 pkgs.cyrus_sasl
117 131 pkgs.openldap
118 132 pkgs.openssl
119 133 ];
120 134 NIX_CFLAGS_COMPILE = "-I${pkgs.cyrus_sasl}/include/sasl";
121 135 });
122 136
123 137 python-pam = super.python-pam.override (attrs:
124 138 let
125 139 includeLibPam = pkgs.stdenv.isLinux;
126 140 in {
127 141 # TODO: johbo: Move the option up into the default.nix, we should
128 142 # include python-pam only on supported platforms.
129 143 propagatedBuildInputs = attrs.propagatedBuildInputs ++
130 144 pkgs.lib.optional includeLibPam [
131 145 pkgs.pam
132 146 ];
133 147 # TODO: johbo: Check if this can be avoided, or transform into
134 148 # a real patch
135 149 patchPhase = pkgs.lib.optionals includeLibPam ''
136 150 substituteInPlace pam.py \
137 151 --replace 'find_library("pam")' '"${pkgs.pam}/lib/libpam.so.0"'
138 152 '';
139 153 });
140 154
141 155 rhodecode-tools = super.rhodecode-tools.override (attrs: {
142 156 patches = [
143 157 ./patch-rhodecode-tools-setup.diff
144 158 ];
145 159 });
146 160
147 161 # Avoid that setuptools is replaced, this leads to trouble
148 162 # with buildPythonPackage.
149 163 setuptools = basePythonPackages.setuptools;
150 164
151 165 }
@@ -1,1273 +1,1263 b''
1 1 {
2 2 Babel = super.buildPythonPackage {
3 3 name = "Babel-1.3";
4 4 buildInputs = with self; [];
5 5 doCheck = false;
6 6 propagatedBuildInputs = with self; [pytz];
7 7 src = fetchurl {
8 8 url = "https://pypi.python.org/packages/33/27/e3978243a03a76398c384c83f7ca879bc6e8f1511233a621fcada135606e/Babel-1.3.tar.gz";
9 9 md5 = "5264ceb02717843cbc9ffce8e6e06bdb";
10 10 };
11 11 };
12 12 Beaker = super.buildPythonPackage {
13 13 name = "Beaker-1.7.0";
14 14 buildInputs = with self; [];
15 15 doCheck = false;
16 16 propagatedBuildInputs = with self; [];
17 17 src = fetchurl {
18 18 url = "https://pypi.python.org/packages/97/8e/409d2e7c009b8aa803dc9e6f239f1db7c3cdf578249087a404e7c27a505d/Beaker-1.7.0.tar.gz";
19 19 md5 = "386be3f7fe427358881eee4622b428b3";
20 20 };
21 21 };
22 22 CProfileV = super.buildPythonPackage {
23 23 name = "CProfileV-1.0.6";
24 24 buildInputs = with self; [];
25 25 doCheck = false;
26 26 propagatedBuildInputs = with self; [bottle];
27 27 src = fetchurl {
28 28 url = "https://pypi.python.org/packages/eb/df/983a0b6cfd3ac94abf023f5011cb04f33613ace196e33f53c86cf91850d5/CProfileV-1.0.6.tar.gz";
29 29 md5 = "08c7c242b6e64237bc53c5d13537e03d";
30 30 };
31 31 };
32 32 Fabric = super.buildPythonPackage {
33 33 name = "Fabric-1.10.0";
34 34 buildInputs = with self; [];
35 35 doCheck = false;
36 36 propagatedBuildInputs = with self; [paramiko];
37 37 src = fetchurl {
38 38 url = "https://pypi.python.org/packages/e3/5f/b6ebdb5241d5ec9eab582a5c8a01255c1107da396f849e538801d2fe64a5/Fabric-1.10.0.tar.gz";
39 39 md5 = "2cb96473387f0e7aa035210892352f4a";
40 40 };
41 41 };
42 42 FormEncode = super.buildPythonPackage {
43 43 name = "FormEncode-1.2.4";
44 44 buildInputs = with self; [];
45 45 doCheck = false;
46 46 propagatedBuildInputs = with self; [];
47 47 src = fetchurl {
48 48 url = "https://pypi.python.org/packages/8e/59/0174271a6f004512e0201188593e6d319db139d14cb7490e488bbb078015/FormEncode-1.2.4.tar.gz";
49 49 md5 = "6bc17fb9aed8aea198975e888e2077f4";
50 50 };
51 51 };
52 52 Jinja2 = super.buildPythonPackage {
53 53 name = "Jinja2-2.7.3";
54 54 buildInputs = with self; [];
55 55 doCheck = false;
56 56 propagatedBuildInputs = with self; [MarkupSafe];
57 57 src = fetchurl {
58 58 url = "https://pypi.python.org/packages/b0/73/eab0bca302d6d6a0b5c402f47ad1760dc9cb2dd14bbc1873ad48db258e4d/Jinja2-2.7.3.tar.gz";
59 59 md5 = "b9dffd2f3b43d673802fe857c8445b1a";
60 60 };
61 61 };
62 62 Mako = super.buildPythonPackage {
63 63 name = "Mako-1.0.1";
64 64 buildInputs = with self; [];
65 65 doCheck = false;
66 66 propagatedBuildInputs = with self; [MarkupSafe];
67 67 src = fetchurl {
68 68 url = "https://pypi.python.org/packages/8e/a4/aa56533ecaa5f22ca92428f74e074d0c9337282933c722391902c8f9e0f8/Mako-1.0.1.tar.gz";
69 69 md5 = "9f0aafd177b039ef67b90ea350497a54";
70 70 };
71 71 };
72 72 Markdown = super.buildPythonPackage {
73 73 name = "Markdown-2.6.2";
74 74 buildInputs = with self; [];
75 75 doCheck = false;
76 76 propagatedBuildInputs = with self; [];
77 77 src = fetchurl {
78 78 url = "https://pypi.python.org/packages/62/8b/83658b5f6c220d5fcde9f9852d46ea54765d734cfbc5a9f4c05bfc36db4d/Markdown-2.6.2.tar.gz";
79 79 md5 = "256d19afcc564dc4ce4c229bb762f7ae";
80 80 };
81 81 };
82 82 MarkupSafe = super.buildPythonPackage {
83 83 name = "MarkupSafe-0.23";
84 84 buildInputs = with self; [];
85 85 doCheck = false;
86 86 propagatedBuildInputs = with self; [];
87 87 src = fetchurl {
88 88 url = "https://pypi.python.org/packages/c0/41/bae1254e0396c0cc8cf1751cb7d9afc90a602353695af5952530482c963f/MarkupSafe-0.23.tar.gz";
89 89 md5 = "f5ab3deee4c37cd6a922fb81e730da6e";
90 90 };
91 91 };
92 92 MySQL-python = super.buildPythonPackage {
93 93 name = "MySQL-python-1.2.5";
94 94 buildInputs = with self; [];
95 95 doCheck = false;
96 96 propagatedBuildInputs = with self; [];
97 97 src = fetchurl {
98 98 url = "https://pypi.python.org/packages/a5/e9/51b544da85a36a68debe7a7091f068d802fc515a3a202652828c73453cad/MySQL-python-1.2.5.zip";
99 99 md5 = "654f75b302db6ed8dc5a898c625e030c";
100 100 };
101 101 };
102 102 Paste = super.buildPythonPackage {
103 103 name = "Paste-2.0.2";
104 104 buildInputs = with self; [];
105 105 doCheck = false;
106 106 propagatedBuildInputs = with self; [six];
107 107 src = fetchurl {
108 108 url = "https://pypi.python.org/packages/d5/8d/0f8ac40687b97ff3e07ebd1369be20bdb3f93864d2dc3c2ff542edb4ce50/Paste-2.0.2.tar.gz";
109 109 md5 = "4bfc8a7eaf858f6309d2ac0f40fc951c";
110 110 };
111 111 };
112 112 PasteDeploy = super.buildPythonPackage {
113 113 name = "PasteDeploy-1.5.2";
114 114 buildInputs = with self; [];
115 115 doCheck = false;
116 116 propagatedBuildInputs = with self; [];
117 117 src = fetchurl {
118 118 url = "https://pypi.python.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
119 119 md5 = "352b7205c78c8de4987578d19431af3b";
120 120 };
121 121 };
122 122 PasteScript = super.buildPythonPackage {
123 123 name = "PasteScript-1.7.5";
124 124 buildInputs = with self; [];
125 125 doCheck = false;
126 126 propagatedBuildInputs = with self; [Paste PasteDeploy];
127 127 src = fetchurl {
128 128 url = "https://pypi.python.org/packages/a5/05/fc60efa7c2f17a1dbaeccb2a903a1e90902d92b9d00eebabe3095829d806/PasteScript-1.7.5.tar.gz";
129 129 md5 = "4c72d78dcb6bb993f30536842c16af4d";
130 130 };
131 131 };
132 132 Pygments = super.buildPythonPackage {
133 133 name = "Pygments-2.0.2";
134 134 buildInputs = with self; [];
135 135 doCheck = false;
136 136 propagatedBuildInputs = with self; [];
137 137 src = fetchurl {
138 138 url = "https://pypi.python.org/packages/f4/c6/bdbc5a8a112256b2b6136af304dbae93d8b1ef8738ff2d12a51018800e46/Pygments-2.0.2.tar.gz";
139 139 md5 = "238587a1370d62405edabd0794b3ec4a";
140 140 };
141 141 };
142 142 Pylons = super.buildPythonPackage {
143 143 name = "Pylons-1.0.1";
144 144 buildInputs = with self; [];
145 145 doCheck = false;
146 146 propagatedBuildInputs = with self; [Routes WebHelpers Beaker Paste PasteDeploy PasteScript FormEncode simplejson decorator nose Mako WebError WebTest Tempita MarkupSafe WebOb];
147 147 src = fetchurl {
148 148 url = "https://pypi.python.org/packages/a2/69/b835a6bad00acbfeed3f33c6e44fa3f936efc998c795bfb15c61a79ecf62/Pylons-1.0.1.tar.gz";
149 149 md5 = "6cb880d75fa81213192142b07a6e4915";
150 150 };
151 151 };
152 152 Pyro4 = super.buildPythonPackage {
153 153 name = "Pyro4-4.41";
154 154 buildInputs = with self; [];
155 155 doCheck = false;
156 156 propagatedBuildInputs = with self; [serpent];
157 157 src = fetchurl {
158 158 url = "https://pypi.python.org/packages/56/2b/89b566b4bf3e7f8ba790db2d1223852f8cb454c52cab7693dd41f608ca2a/Pyro4-4.41.tar.gz";
159 159 md5 = "ed69e9bfafa9c06c049a87cb0c4c2b6c";
160 160 };
161 161 };
162 162 Routes = super.buildPythonPackage {
163 163 name = "Routes-1.13";
164 164 buildInputs = with self; [];
165 165 doCheck = false;
166 166 propagatedBuildInputs = with self; [repoze.lru];
167 167 src = fetchurl {
168 168 url = "https://pypi.python.org/packages/88/d3/259c3b3cde8837eb9441ab5f574a660e8a4acea8f54a078441d4d2acac1c/Routes-1.13.tar.gz";
169 169 md5 = "d527b0ab7dd9172b1275a41f97448783";
170 170 };
171 171 };
172 172 SQLAlchemy = super.buildPythonPackage {
173 173 name = "SQLAlchemy-0.9.9";
174 174 buildInputs = with self; [];
175 175 doCheck = false;
176 176 propagatedBuildInputs = with self; [];
177 177 src = fetchurl {
178 178 url = "https://pypi.python.org/packages/28/f7/1bbfd0d8597e8c358d5e15a166a486ad82fc5579b4e67b6ef7c05b1d182b/SQLAlchemy-0.9.9.tar.gz";
179 179 md5 = "8a10a9bd13ed3336ef7333ac2cc679ff";
180 180 };
181 181 };
182 182 Sphinx = super.buildPythonPackage {
183 183 name = "Sphinx-1.2.2";
184 184 buildInputs = with self; [];
185 185 doCheck = false;
186 186 propagatedBuildInputs = with self; [Pygments docutils Jinja2];
187 187 src = fetchurl {
188 188 url = "https://pypi.python.org/packages/0a/50/34017e6efcd372893a416aba14b84a1a149fc7074537b0e9cb6ca7b7abe9/Sphinx-1.2.2.tar.gz";
189 189 md5 = "3dc73ccaa8d0bfb2d62fb671b1f7e8a4";
190 190 };
191 191 };
192 192 Tempita = super.buildPythonPackage {
193 193 name = "Tempita-0.5.2";
194 194 buildInputs = with self; [];
195 195 doCheck = false;
196 196 propagatedBuildInputs = with self; [];
197 197 src = fetchurl {
198 198 url = "https://pypi.python.org/packages/56/c8/8ed6eee83dbddf7b0fc64dd5d4454bc05e6ccaafff47991f73f2894d9ff4/Tempita-0.5.2.tar.gz";
199 199 md5 = "4c2f17bb9d481821c41b6fbee904cea1";
200 200 };
201 201 };
202 202 URLObject = super.buildPythonPackage {
203 203 name = "URLObject-2.4.0";
204 204 buildInputs = with self; [];
205 205 doCheck = false;
206 206 propagatedBuildInputs = with self; [];
207 207 src = fetchurl {
208 208 url = "https://pypi.python.org/packages/cb/b6/e25e58500f9caef85d664bec71ec67c116897bfebf8622c32cb75d1ca199/URLObject-2.4.0.tar.gz";
209 209 md5 = "2ed819738a9f0a3051f31dc9924e3065";
210 210 };
211 211 };
212 212 WebError = super.buildPythonPackage {
213 213 name = "WebError-0.10.3";
214 214 buildInputs = with self; [];
215 215 doCheck = false;
216 216 propagatedBuildInputs = with self; [WebOb Tempita Pygments Paste];
217 217 src = fetchurl {
218 218 url = "https://pypi.python.org/packages/35/76/e7e5c2ce7e9c7f31b54c1ff295a495886d1279a002557d74dd8957346a79/WebError-0.10.3.tar.gz";
219 219 md5 = "84b9990b0baae6fd440b1e60cdd06f9a";
220 220 };
221 221 };
222 222 WebHelpers = super.buildPythonPackage {
223 223 name = "WebHelpers-1.3";
224 224 buildInputs = with self; [];
225 225 doCheck = false;
226 226 propagatedBuildInputs = with self; [MarkupSafe];
227 227 src = fetchurl {
228 228 url = "https://pypi.python.org/packages/ee/68/4d07672821d514184357f1552f2dad923324f597e722de3b016ca4f7844f/WebHelpers-1.3.tar.gz";
229 229 md5 = "32749ffadfc40fea51075a7def32588b";
230 230 };
231 231 };
232 232 WebHelpers2 = super.buildPythonPackage {
233 233 name = "WebHelpers2-2.0";
234 234 buildInputs = with self; [];
235 235 doCheck = false;
236 236 propagatedBuildInputs = with self; [MarkupSafe six];
237 237 src = fetchurl {
238 238 url = "https://pypi.python.org/packages/ff/30/56342c6ea522439e3662427c8d7b5e5b390dff4ff2dc92d8afcb8ab68b75/WebHelpers2-2.0.tar.gz";
239 239 md5 = "0f6b68d70c12ee0aed48c00b24da13d3";
240 240 };
241 241 };
242 242 WebOb = super.buildPythonPackage {
243 243 name = "WebOb-1.3.1";
244 244 buildInputs = with self; [];
245 245 doCheck = false;
246 246 propagatedBuildInputs = with self; [];
247 247 src = fetchurl {
248 248 url = "https://pypi.python.org/packages/16/78/adfc0380b8a0d75b2d543fa7085ba98a573b1ae486d9def88d172b81b9fa/WebOb-1.3.1.tar.gz";
249 249 md5 = "20918251c5726956ba8fef22d1556177";
250 250 };
251 251 };
252 252 WebTest = super.buildPythonPackage {
253 253 name = "WebTest-1.4.3";
254 254 buildInputs = with self; [];
255 255 doCheck = false;
256 256 propagatedBuildInputs = with self; [WebOb];
257 257 src = fetchurl {
258 258 url = "https://pypi.python.org/packages/51/3d/84fd0f628df10b30c7db87895f56d0158e5411206b721ca903cb51bfd948/WebTest-1.4.3.zip";
259 259 md5 = "631ce728bed92c681a4020a36adbc353";
260 260 };
261 261 };
262 262 Whoosh = super.buildPythonPackage {
263 263 name = "Whoosh-2.7.0";
264 264 buildInputs = with self; [];
265 265 doCheck = false;
266 266 propagatedBuildInputs = with self; [];
267 267 src = fetchurl {
268 268 url = "https://pypi.python.org/packages/1c/dc/2f0231ff3875ded36df8c1ab851451e51a237dc0e5a86d3d96036158da94/Whoosh-2.7.0.zip";
269 269 md5 = "7abfd970f16fadc7311960f3fa0bc7a9";
270 270 };
271 271 };
272 272 alembic = super.buildPythonPackage {
273 273 name = "alembic-0.8.4";
274 274 buildInputs = with self; [];
275 275 doCheck = false;
276 276 propagatedBuildInputs = with self; [SQLAlchemy Mako python-editor];
277 277 src = fetchurl {
278 278 url = "https://pypi.python.org/packages/ca/7e/299b4499b5c75e5a38c5845145ad24755bebfb8eec07a2e1c366b7181eeb/alembic-0.8.4.tar.gz";
279 279 md5 = "5f95d8ee62b443f9b37eb5bee76c582d";
280 280 };
281 281 };
282 282 amqplib = super.buildPythonPackage {
283 283 name = "amqplib-1.0.2";
284 284 buildInputs = with self; [];
285 285 doCheck = false;
286 286 propagatedBuildInputs = with self; [];
287 287 src = fetchurl {
288 288 url = "https://pypi.python.org/packages/75/b7/8c2429bf8d92354a0118614f9a4d15e53bc69ebedce534284111de5a0102/amqplib-1.0.2.tgz";
289 289 md5 = "5c92f17fbedd99b2b4a836d4352d1e2f";
290 290 };
291 291 };
292 292 anyjson = super.buildPythonPackage {
293 293 name = "anyjson-0.3.3";
294 294 buildInputs = with self; [];
295 295 doCheck = false;
296 296 propagatedBuildInputs = with self; [];
297 297 src = fetchurl {
298 298 url = "https://pypi.python.org/packages/c3/4d/d4089e1a3dd25b46bebdb55a992b0797cff657b4477bc32ce28038fdecbc/anyjson-0.3.3.tar.gz";
299 299 md5 = "2ea28d6ec311aeeebaf993cb3008b27c";
300 300 };
301 301 };
302 302 appenlight-client = super.buildPythonPackage {
303 303 name = "appenlight-client-0.6.14";
304 304 buildInputs = with self; [];
305 305 doCheck = false;
306 306 propagatedBuildInputs = with self; [WebOb requests];
307 307 src = fetchurl {
308 308 url = "https://pypi.python.org/packages/4d/e0/23fee3ebada8143f707e65c06bcb82992040ee64ea8355e044ed55ebf0c1/appenlight_client-0.6.14.tar.gz";
309 309 md5 = "578c69b09f4356d898fff1199b98a95c";
310 310 };
311 311 };
312 312 authomatic = super.buildPythonPackage {
313 313 name = "authomatic-0.1.0.post1";
314 314 buildInputs = with self; [];
315 315 doCheck = false;
316 316 propagatedBuildInputs = with self; [];
317 317 src = fetchurl {
318 318 url = "https://pypi.python.org/packages/08/1a/8a930461e604c2d5a7a871e1ac59fa82ccf994c32e807230c8d2fb07815a/Authomatic-0.1.0.post1.tar.gz";
319 319 md5 = "be3f3ce08747d776aae6d6cc8dcb49a9";
320 320 };
321 321 };
322 322 backport-ipaddress = super.buildPythonPackage {
323 323 name = "backport-ipaddress-0.1";
324 324 buildInputs = with self; [];
325 325 doCheck = false;
326 326 propagatedBuildInputs = with self; [];
327 327 src = fetchurl {
328 328 url = "https://pypi.python.org/packages/d3/30/54c6dab05a4dec44db25ff309f1fbb6b7a8bde3f2bade38bb9da67bbab8f/backport_ipaddress-0.1.tar.gz";
329 329 md5 = "9c1f45f4361f71b124d7293a60006c05";
330 330 };
331 331 };
332 332 bottle = super.buildPythonPackage {
333 333 name = "bottle-0.12.8";
334 334 buildInputs = with self; [];
335 335 doCheck = false;
336 336 propagatedBuildInputs = with self; [];
337 337 src = fetchurl {
338 338 url = "https://pypi.python.org/packages/52/df/e4a408f3a7af396d186d4ecd3b389dd764f0f943b4fa8d257bfe7b49d343/bottle-0.12.8.tar.gz";
339 339 md5 = "13132c0a8f607bf860810a6ee9064c5b";
340 340 };
341 341 };
342 342 bumpversion = super.buildPythonPackage {
343 343 name = "bumpversion-0.5.3";
344 344 buildInputs = with self; [];
345 345 doCheck = false;
346 346 propagatedBuildInputs = with self; [];
347 347 src = fetchurl {
348 348 url = "https://pypi.python.org/packages/14/41/8c9da3549f8e00c84f0432c3a8cf8ed6898374714676aab91501d48760db/bumpversion-0.5.3.tar.gz";
349 349 md5 = "c66a3492eafcf5ad4b024be9fca29820";
350 350 };
351 351 };
352 352 celery = super.buildPythonPackage {
353 353 name = "celery-2.2.10";
354 354 buildInputs = with self; [];
355 355 doCheck = false;
356 356 propagatedBuildInputs = with self; [python-dateutil anyjson kombu pyparsing];
357 357 src = fetchurl {
358 358 url = "https://pypi.python.org/packages/b1/64/860fd50e45844c83442e7953effcddeff66b2851d90b2d784f7201c111b8/celery-2.2.10.tar.gz";
359 359 md5 = "898bc87e54f278055b561316ba73e222";
360 360 };
361 361 };
362 certifi = super.buildPythonPackage {
363 name = "certifi-2016.2.28";
364 buildInputs = with self; [];
365 doCheck = false;
366 propagatedBuildInputs = with self; [];
367 src = fetchurl {
368 url = "https://pypi.python.org/packages/5c/f8/f6c54727c74579c6bbe5926f5deb9677c5810a33e11da58d1a4e2d09d041/certifi-2016.2.28.tar.gz";
369 md5 = "5d672aa766e1f773c75cfeccd02d3650";
370 };
371 };
372 362 click = super.buildPythonPackage {
373 363 name = "click-5.1";
374 364 buildInputs = with self; [];
375 365 doCheck = false;
376 366 propagatedBuildInputs = with self; [];
377 367 src = fetchurl {
378 368 url = "https://pypi.python.org/packages/b7/34/a496632c4fb6c1ee76efedf77bb8d28b29363d839953d95095b12defe791/click-5.1.tar.gz";
379 369 md5 = "9c5323008cccfe232a8b161fc8196d41";
380 370 };
381 371 };
382 372 colander = super.buildPythonPackage {
383 373 name = "colander-1.2";
384 374 buildInputs = with self; [];
385 375 doCheck = false;
386 376 propagatedBuildInputs = with self; [translationstring iso8601];
387 377 src = fetchurl {
388 378 url = "https://pypi.python.org/packages/14/23/c9ceba07a6a1dc0eefbb215fc0dc64aabc2b22ee756bc0f0c13278fa0887/colander-1.2.tar.gz";
389 379 md5 = "83db21b07936a0726e588dae1914b9ed";
390 380 };
391 381 };
392 382 configobj = super.buildPythonPackage {
393 383 name = "configobj-5.0.6";
394 384 buildInputs = with self; [];
395 385 doCheck = false;
396 386 propagatedBuildInputs = with self; [six];
397 387 src = fetchurl {
398 388 url = "https://pypi.python.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz";
399 389 md5 = "e472a3a1c2a67bb0ec9b5d54c13a47d6";
400 390 };
401 391 };
402 392 cov-core = super.buildPythonPackage {
403 393 name = "cov-core-1.15.0";
404 394 buildInputs = with self; [];
405 395 doCheck = false;
406 396 propagatedBuildInputs = with self; [coverage];
407 397 src = fetchurl {
408 398 url = "https://pypi.python.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
409 399 md5 = "f519d4cb4c4e52856afb14af52919fe6";
410 400 };
411 401 };
412 402 coverage = super.buildPythonPackage {
413 403 name = "coverage-3.7.1";
414 404 buildInputs = with self; [];
415 405 doCheck = false;
416 406 propagatedBuildInputs = with self; [];
417 407 src = fetchurl {
418 408 url = "https://pypi.python.org/packages/09/4f/89b06c7fdc09687bca507dc411c342556ef9c5a3b26756137a4878ff19bf/coverage-3.7.1.tar.gz";
419 409 md5 = "c47b36ceb17eaff3ecfab3bcd347d0df";
420 410 };
421 411 };
422 412 cssselect = super.buildPythonPackage {
423 413 name = "cssselect-0.9.1";
424 414 buildInputs = with self; [];
425 415 doCheck = false;
426 416 propagatedBuildInputs = with self; [];
427 417 src = fetchurl {
428 418 url = "https://pypi.python.org/packages/aa/e5/9ee1460d485b94a6d55732eb7ad5b6c084caf73dd6f9cb0bb7d2a78fafe8/cssselect-0.9.1.tar.gz";
429 419 md5 = "c74f45966277dc7a0f768b9b0f3522ac";
430 420 };
431 421 };
432 422 decorator = super.buildPythonPackage {
433 423 name = "decorator-3.4.2";
434 424 buildInputs = with self; [];
435 425 doCheck = false;
436 426 propagatedBuildInputs = with self; [];
437 427 src = fetchurl {
438 428 url = "https://pypi.python.org/packages/35/3a/42566eb7a2cbac774399871af04e11d7ae3fc2579e7dae85213b8d1d1c57/decorator-3.4.2.tar.gz";
439 429 md5 = "9e0536870d2b83ae27d58dbf22582f4d";
440 430 };
441 431 };
442 432 docutils = super.buildPythonPackage {
443 433 name = "docutils-0.12";
444 434 buildInputs = with self; [];
445 435 doCheck = false;
446 436 propagatedBuildInputs = with self; [];
447 437 src = fetchurl {
448 438 url = "https://pypi.python.org/packages/37/38/ceda70135b9144d84884ae2fc5886c6baac4edea39550f28bcd144c1234d/docutils-0.12.tar.gz";
449 439 md5 = "4622263b62c5c771c03502afa3157768";
450 440 };
451 441 };
452 442 dogpile.cache = super.buildPythonPackage {
453 443 name = "dogpile.cache-0.5.7";
454 444 buildInputs = with self; [];
455 445 doCheck = false;
456 446 propagatedBuildInputs = with self; [dogpile.core];
457 447 src = fetchurl {
458 448 url = "https://pypi.python.org/packages/07/74/2a83bedf758156d9c95d112691bbad870d3b77ccbcfb781b4ef836ea7d96/dogpile.cache-0.5.7.tar.gz";
459 449 md5 = "3e58ce41af574aab41d78e9c4190f194";
460 450 };
461 451 };
462 452 dogpile.core = super.buildPythonPackage {
463 453 name = "dogpile.core-0.4.1";
464 454 buildInputs = with self; [];
465 455 doCheck = false;
466 456 propagatedBuildInputs = with self; [];
467 457 src = fetchurl {
468 458 url = "https://pypi.python.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
469 459 md5 = "01cb19f52bba3e95c9b560f39341f045";
470 460 };
471 461 };
472 462 dulwich = super.buildPythonPackage {
473 463 name = "dulwich-0.12.0";
474 464 buildInputs = with self; [];
475 465 doCheck = false;
476 466 propagatedBuildInputs = with self; [];
477 467 src = fetchurl {
478 468 url = "https://pypi.python.org/packages/6f/04/fbe561b6d45c0ec758330d5b7f5ba4b6cb4f1ca1ab49859d2fc16320da75/dulwich-0.12.0.tar.gz";
479 469 md5 = "f3a8a12bd9f9dd8c233e18f3d49436fa";
480 470 };
481 471 };
482 472 ecdsa = super.buildPythonPackage {
483 473 name = "ecdsa-0.11";
484 474 buildInputs = with self; [];
485 475 doCheck = false;
486 476 propagatedBuildInputs = with self; [];
487 477 src = fetchurl {
488 478 url = "https://pypi.python.org/packages/6c/3f/92fe5dcdcaa7bd117be21e5520c9a54375112b66ec000d209e9e9519fad1/ecdsa-0.11.tar.gz";
489 479 md5 = "8ef586fe4dbb156697d756900cb41d7c";
490 480 };
491 481 };
492 482 elasticsearch = super.buildPythonPackage {
493 name = "elasticsearch-1.9.0";
483 name = "elasticsearch-2.3.0";
494 484 buildInputs = with self; [];
495 485 doCheck = false;
496 486 propagatedBuildInputs = with self; [urllib3];
497 487 src = fetchurl {
498 url = "https://pypi.python.org/packages/13/9b/540e311b31a10c2a904acfb08030c656047e5c7ba479d35df2799e5dccfe/elasticsearch-1.9.0.tar.gz";
499 md5 = "3550390baea1639479f79758d66ab032";
488 url = "https://pypi.python.org/packages/10/35/5fd52c5f0b0ee405ed4b5195e8bce44c5e041787680dc7b94b8071cac600/elasticsearch-2.3.0.tar.gz";
489 md5 = "2550f3b51629cf1ef9636608af92c340";
490 };
491 };
492 elasticsearch-dsl = super.buildPythonPackage {
493 name = "elasticsearch-dsl-2.0.0";
494 buildInputs = with self; [];
495 doCheck = false;
496 propagatedBuildInputs = with self; [six python-dateutil elasticsearch];
497 src = fetchurl {
498 url = "https://pypi.python.org/packages/4e/5d/e788ae8dbe2ff4d13426db0a027533386a5c276c77a2654dc0e2007ce04a/elasticsearch-dsl-2.0.0.tar.gz";
499 md5 = "4cdfec81bb35383dd3b7d02d7dc5ee68";
500 500 };
501 501 };
502 502 flake8 = super.buildPythonPackage {
503 503 name = "flake8-2.4.1";
504 504 buildInputs = with self; [];
505 505 doCheck = false;
506 506 propagatedBuildInputs = with self; [pyflakes pep8 mccabe];
507 507 src = fetchurl {
508 508 url = "https://pypi.python.org/packages/8f/b5/9a73c66c7dba273bac8758398f060c008a25f3e84531063b42503b5d0a95/flake8-2.4.1.tar.gz";
509 509 md5 = "ed45d3db81a3b7c88bd63c6e37ca1d65";
510 510 };
511 511 };
512 512 future = super.buildPythonPackage {
513 513 name = "future-0.14.3";
514 514 buildInputs = with self; [];
515 515 doCheck = false;
516 516 propagatedBuildInputs = with self; [];
517 517 src = fetchurl {
518 518 url = "https://pypi.python.org/packages/83/80/8ef3a11a15f8eaafafa0937b20c1b3f73527e69ab6b3fa1cf94a5a96aabb/future-0.14.3.tar.gz";
519 519 md5 = "e94079b0bd1fc054929e8769fc0f6083";
520 520 };
521 521 };
522 522 futures = super.buildPythonPackage {
523 523 name = "futures-3.0.2";
524 524 buildInputs = with self; [];
525 525 doCheck = false;
526 526 propagatedBuildInputs = with self; [];
527 527 src = fetchurl {
528 528 url = "https://pypi.python.org/packages/f8/e7/fc0fcbeb9193ba2d4de00b065e7fd5aecd0679e93ce95a07322b2b1434f4/futures-3.0.2.tar.gz";
529 529 md5 = "42aaf1e4de48d6e871d77dc1f9d96d5a";
530 530 };
531 531 };
532 532 gnureadline = super.buildPythonPackage {
533 533 name = "gnureadline-6.3.3";
534 534 buildInputs = with self; [];
535 535 doCheck = false;
536 536 propagatedBuildInputs = with self; [];
537 537 src = fetchurl {
538 538 url = "https://pypi.python.org/packages/3a/ee/2c3f568b0a74974791ac590ec742ef6133e2fbd287a074ba72a53fa5e97c/gnureadline-6.3.3.tar.gz";
539 539 md5 = "c4af83c9a3fbeac8f2da9b5a7c60e51c";
540 540 };
541 541 };
542 542 gprof2dot = super.buildPythonPackage {
543 name = "gprof2dot-2015.12.1";
543 name = "gprof2dot-2015.12.01";
544 544 buildInputs = with self; [];
545 545 doCheck = false;
546 546 propagatedBuildInputs = with self; [];
547 547 src = fetchurl {
548 548 url = "https://pypi.python.org/packages/b9/34/7bf93c1952d40fa5c95ad963f4d8344b61ef58558632402eca18e6c14127/gprof2dot-2015.12.1.tar.gz";
549 549 md5 = "e23bf4e2f94db032750c193384b4165b";
550 550 };
551 551 };
552 552 greenlet = super.buildPythonPackage {
553 name = "greenlet-0.4.7";
553 name = "greenlet-0.4.9";
554 554 buildInputs = with self; [];
555 555 doCheck = false;
556 556 propagatedBuildInputs = with self; [];
557 557 src = fetchurl {
558 url = "https://pypi.python.org/packages/7a/9f/a1a0d9bdf3203ae1502c5a8434fe89d323599d78a106985bc327351a69d4/greenlet-0.4.7.zip";
559 md5 = "c2333a8ff30fa75c5d5ec0e67b461086";
558 url = "https://pypi.python.org/packages/4e/3d/9d421539b74e33608b245092870156b2e171fb49f2b51390aa4641eecb4a/greenlet-0.4.9.zip";
559 md5 = "c6659cdb2a5e591723e629d2eef22e82";
560 560 };
561 561 };
562 562 gunicorn = super.buildPythonPackage {
563 563 name = "gunicorn-19.6.0";
564 564 buildInputs = with self; [];
565 565 doCheck = false;
566 566 propagatedBuildInputs = with self; [];
567 567 src = fetchurl {
568 568 url = "https://pypi.python.org/packages/84/ce/7ea5396efad1cef682bbc4068e72a0276341d9d9d0f501da609fab9fcb80/gunicorn-19.6.0.tar.gz";
569 569 md5 = "338e5e8a83ea0f0625f768dba4597530";
570 570 };
571 571 };
572 572 infrae.cache = super.buildPythonPackage {
573 573 name = "infrae.cache-1.0.1";
574 574 buildInputs = with self; [];
575 575 doCheck = false;
576 576 propagatedBuildInputs = with self; [Beaker repoze.lru];
577 577 src = fetchurl {
578 578 url = "https://pypi.python.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
579 579 md5 = "b09076a766747e6ed2a755cc62088e32";
580 580 };
581 581 };
582 582 invoke = super.buildPythonPackage {
583 583 name = "invoke-0.11.1";
584 584 buildInputs = with self; [];
585 585 doCheck = false;
586 586 propagatedBuildInputs = with self; [];
587 587 src = fetchurl {
588 588 url = "https://pypi.python.org/packages/d3/bb/36a5558ea19882073def7b0edeef4a0e6282056fed96506dd10b1d532bd4/invoke-0.11.1.tar.gz";
589 589 md5 = "3d4ecbe26779ceef1046ecf702c9c4a8";
590 590 };
591 591 };
592 592 ipdb = super.buildPythonPackage {
593 593 name = "ipdb-0.8";
594 594 buildInputs = with self; [];
595 595 doCheck = false;
596 596 propagatedBuildInputs = with self; [ipython];
597 597 src = fetchurl {
598 598 url = "https://pypi.python.org/packages/f0/25/d7dd430ced6cd8dc242a933c8682b5dbf32eb4011d82f87e34209e5ec845/ipdb-0.8.zip";
599 599 md5 = "96dca0712efa01aa5eaf6b22071dd3ed";
600 600 };
601 601 };
602 602 ipython = super.buildPythonPackage {
603 603 name = "ipython-3.1.0";
604 604 buildInputs = with self; [];
605 605 doCheck = false;
606 propagatedBuildInputs = with self; [gnureadline];
606 propagatedBuildInputs = with self; [];
607 607 src = fetchurl {
608 608 url = "https://pypi.python.org/packages/06/91/120c0835254c120af89f066afaabf81289bc2726c1fc3ca0555df6882f58/ipython-3.1.0.tar.gz";
609 609 md5 = "a749d90c16068687b0ec45a27e72ef8f";
610 610 };
611 611 };
612 612 iso8601 = super.buildPythonPackage {
613 613 name = "iso8601-0.1.11";
614 614 buildInputs = with self; [];
615 615 doCheck = false;
616 616 propagatedBuildInputs = with self; [];
617 617 src = fetchurl {
618 618 url = "https://pypi.python.org/packages/c0/75/c9209ee4d1b5975eb8c2cba4428bde6b61bd55664a98290dd015cdb18e98/iso8601-0.1.11.tar.gz";
619 619 md5 = "b06d11cd14a64096f907086044f0fe38";
620 620 };
621 621 };
622 622 itsdangerous = super.buildPythonPackage {
623 623 name = "itsdangerous-0.24";
624 624 buildInputs = with self; [];
625 625 doCheck = false;
626 626 propagatedBuildInputs = with self; [];
627 627 src = fetchurl {
628 628 url = "https://pypi.python.org/packages/dc/b4/a60bcdba945c00f6d608d8975131ab3f25b22f2bcfe1dab221165194b2d4/itsdangerous-0.24.tar.gz";
629 629 md5 = "a3d55aa79369aef5345c036a8a26307f";
630 630 };
631 631 };
632 632 kombu = super.buildPythonPackage {
633 633 name = "kombu-1.5.1";
634 634 buildInputs = with self; [];
635 635 doCheck = false;
636 636 propagatedBuildInputs = with self; [anyjson amqplib];
637 637 src = fetchurl {
638 638 url = "https://pypi.python.org/packages/19/53/74bf2a624644b45f0850a638752514fc10a8e1cbd738f10804951a6df3f5/kombu-1.5.1.tar.gz";
639 639 md5 = "50662f3c7e9395b3d0721fb75d100b63";
640 640 };
641 641 };
642 642 lxml = super.buildPythonPackage {
643 643 name = "lxml-3.4.4";
644 644 buildInputs = with self; [];
645 645 doCheck = false;
646 646 propagatedBuildInputs = with self; [];
647 647 src = fetchurl {
648 648 url = "https://pypi.python.org/packages/63/c7/4f2a2a4ad6c6fa99b14be6b3c1cece9142e2d915aa7c43c908677afc8fa4/lxml-3.4.4.tar.gz";
649 649 md5 = "a9a65972afc173ec7a39c585f4eea69c";
650 650 };
651 651 };
652 652 mccabe = super.buildPythonPackage {
653 653 name = "mccabe-0.3";
654 654 buildInputs = with self; [];
655 655 doCheck = false;
656 656 propagatedBuildInputs = with self; [];
657 657 src = fetchurl {
658 658 url = "https://pypi.python.org/packages/c9/2e/75231479e11a906b64ac43bad9d0bb534d00080b18bdca8db9da46e1faf7/mccabe-0.3.tar.gz";
659 659 md5 = "81640948ff226f8c12b3277059489157";
660 660 };
661 661 };
662 662 meld3 = super.buildPythonPackage {
663 663 name = "meld3-1.0.2";
664 664 buildInputs = with self; [];
665 665 doCheck = false;
666 666 propagatedBuildInputs = with self; [];
667 667 src = fetchurl {
668 668 url = "https://pypi.python.org/packages/45/a0/317c6422b26c12fe0161e936fc35f36552069ba8e6f7ecbd99bbffe32a5f/meld3-1.0.2.tar.gz";
669 669 md5 = "3ccc78cd79cffd63a751ad7684c02c91";
670 670 };
671 671 };
672 672 mock = super.buildPythonPackage {
673 673 name = "mock-1.0.1";
674 674 buildInputs = with self; [];
675 675 doCheck = false;
676 676 propagatedBuildInputs = with self; [];
677 677 src = fetchurl {
678 678 url = "https://pypi.python.org/packages/15/45/30273ee91feb60dabb8fbb2da7868520525f02cf910279b3047182feed80/mock-1.0.1.zip";
679 679 md5 = "869f08d003c289a97c1a6610faf5e913";
680 680 };
681 681 };
682 682 msgpack-python = super.buildPythonPackage {
683 683 name = "msgpack-python-0.4.6";
684 684 buildInputs = with self; [];
685 685 doCheck = false;
686 686 propagatedBuildInputs = with self; [];
687 687 src = fetchurl {
688 688 url = "https://pypi.python.org/packages/15/ce/ff2840885789ef8035f66cd506ea05bdb228340307d5e71a7b1e3f82224c/msgpack-python-0.4.6.tar.gz";
689 689 md5 = "8b317669314cf1bc881716cccdaccb30";
690 690 };
691 691 };
692 692 nose = super.buildPythonPackage {
693 693 name = "nose-1.3.6";
694 694 buildInputs = with self; [];
695 695 doCheck = false;
696 696 propagatedBuildInputs = with self; [];
697 697 src = fetchurl {
698 698 url = "https://pypi.python.org/packages/70/c7/469e68148d17a0d3db5ed49150242fd70a74a8147b8f3f8b87776e028d99/nose-1.3.6.tar.gz";
699 699 md5 = "0ca546d81ca8309080fc80cb389e7a16";
700 700 };
701 701 };
702 702 objgraph = super.buildPythonPackage {
703 703 name = "objgraph-2.0.0";
704 704 buildInputs = with self; [];
705 705 doCheck = false;
706 706 propagatedBuildInputs = with self; [];
707 707 src = fetchurl {
708 708 url = "https://pypi.python.org/packages/d7/33/ace750b59247496ed769b170586c5def7202683f3d98e737b75b767ff29e/objgraph-2.0.0.tar.gz";
709 709 md5 = "25b0d5e5adc74aa63ead15699614159c";
710 710 };
711 711 };
712 712 packaging = super.buildPythonPackage {
713 713 name = "packaging-15.2";
714 714 buildInputs = with self; [];
715 715 doCheck = false;
716 716 propagatedBuildInputs = with self; [];
717 717 src = fetchurl {
718 718 url = "https://pypi.python.org/packages/24/c4/185da1304f07047dc9e0c46c31db75c0351bd73458ac3efad7da3dbcfbe1/packaging-15.2.tar.gz";
719 719 md5 = "c16093476f6ced42128bf610e5db3784";
720 720 };
721 721 };
722 722 paramiko = super.buildPythonPackage {
723 723 name = "paramiko-1.15.1";
724 724 buildInputs = with self; [];
725 725 doCheck = false;
726 726 propagatedBuildInputs = with self; [pycrypto ecdsa];
727 727 src = fetchurl {
728 728 url = "https://pypi.python.org/packages/04/2b/a22d2a560c1951abbbf95a0628e245945565f70dc082d9e784666887222c/paramiko-1.15.1.tar.gz";
729 729 md5 = "48c274c3f9b1282932567b21f6acf3b5";
730 730 };
731 731 };
732 732 pep8 = super.buildPythonPackage {
733 733 name = "pep8-1.5.7";
734 734 buildInputs = with self; [];
735 735 doCheck = false;
736 736 propagatedBuildInputs = with self; [];
737 737 src = fetchurl {
738 738 url = "https://pypi.python.org/packages/8b/de/259f5e735897ada1683489dd514b2a1c91aaa74e5e6b68f80acf128a6368/pep8-1.5.7.tar.gz";
739 739 md5 = "f6adbdd69365ecca20513c709f9b7c93";
740 740 };
741 741 };
742 742 psutil = super.buildPythonPackage {
743 743 name = "psutil-2.2.1";
744 744 buildInputs = with self; [];
745 745 doCheck = false;
746 746 propagatedBuildInputs = with self; [];
747 747 src = fetchurl {
748 748 url = "https://pypi.python.org/packages/df/47/ee54ef14dd40f8ce831a7581001a5096494dc99fe71586260ca6b531fe86/psutil-2.2.1.tar.gz";
749 749 md5 = "1a2b58cd9e3a53528bb6148f0c4d5244";
750 750 };
751 751 };
752 752 psycopg2 = super.buildPythonPackage {
753 753 name = "psycopg2-2.6";
754 754 buildInputs = with self; [];
755 755 doCheck = false;
756 756 propagatedBuildInputs = with self; [];
757 757 src = fetchurl {
758 758 url = "https://pypi.python.org/packages/dd/c7/9016ff8ff69da269b1848276eebfb264af5badf6b38caad805426771f04d/psycopg2-2.6.tar.gz";
759 759 md5 = "fbbb039a8765d561a1c04969bbae7c74";
760 760 };
761 761 };
762 762 py = super.buildPythonPackage {
763 763 name = "py-1.4.29";
764 764 buildInputs = with self; [];
765 765 doCheck = false;
766 766 propagatedBuildInputs = with self; [];
767 767 src = fetchurl {
768 768 url = "https://pypi.python.org/packages/2a/bc/a1a4a332ac10069b8e5e25136a35e08a03f01fd6ab03d819889d79a1fd65/py-1.4.29.tar.gz";
769 769 md5 = "c28e0accba523a29b35a48bb703fb96c";
770 770 };
771 771 };
772 772 py-bcrypt = super.buildPythonPackage {
773 773 name = "py-bcrypt-0.4";
774 774 buildInputs = with self; [];
775 775 doCheck = false;
776 776 propagatedBuildInputs = with self; [];
777 777 src = fetchurl {
778 778 url = "https://pypi.python.org/packages/68/b1/1c3068c5c4d2e35c48b38dcc865301ebfdf45f54507086ac65ced1fd3b3d/py-bcrypt-0.4.tar.gz";
779 779 md5 = "dd8b367d6b716a2ea2e72392525f4e36";
780 780 };
781 781 };
782 782 pycrypto = super.buildPythonPackage {
783 783 name = "pycrypto-2.6.1";
784 784 buildInputs = with self; [];
785 785 doCheck = false;
786 786 propagatedBuildInputs = with self; [];
787 787 src = fetchurl {
788 788 url = "https://pypi.python.org/packages/60/db/645aa9af249f059cc3a368b118de33889219e0362141e75d4eaf6f80f163/pycrypto-2.6.1.tar.gz";
789 789 md5 = "55a61a054aa66812daf5161a0d5d7eda";
790 790 };
791 791 };
792 792 pycurl = super.buildPythonPackage {
793 793 name = "pycurl-7.19.5";
794 794 buildInputs = with self; [];
795 795 doCheck = false;
796 796 propagatedBuildInputs = with self; [];
797 797 src = fetchurl {
798 798 url = "https://pypi.python.org/packages/6c/48/13bad289ef6f4869b1d8fc11ae54de8cfb3cc4a2eb9f7419c506f763be46/pycurl-7.19.5.tar.gz";
799 799 md5 = "47b4eac84118e2606658122104e62072";
800 800 };
801 801 };
802 pyelasticsearch = super.buildPythonPackage {
803 name = "pyelasticsearch-1.4";
804 buildInputs = with self; [];
805 doCheck = false;
806 propagatedBuildInputs = with self; [certifi elasticsearch urllib3 simplejson six];
807 src = fetchurl {
808 url = "https://pypi.python.org/packages/2f/3a/7643cfcfc4cbdbb20ada800bbd54ac9705d0c047d7b8f8d5eeeb3047b4eb/pyelasticsearch-1.4.tar.gz";
809 md5 = "ed61ebb7b253364e55b4923d11e17049";
810 };
811 };
812 802 pyflakes = super.buildPythonPackage {
813 803 name = "pyflakes-0.8.1";
814 804 buildInputs = with self; [];
815 805 doCheck = false;
816 806 propagatedBuildInputs = with self; [];
817 807 src = fetchurl {
818 808 url = "https://pypi.python.org/packages/75/22/a90ec0252f4f87f3ffb6336504de71fe16a49d69c4538dae2f12b9360a38/pyflakes-0.8.1.tar.gz";
819 809 md5 = "905fe91ad14b912807e8fdc2ac2e2c23";
820 810 };
821 811 };
822 812 pyparsing = super.buildPythonPackage {
823 813 name = "pyparsing-1.5.7";
824 814 buildInputs = with self; [];
825 815 doCheck = false;
826 816 propagatedBuildInputs = with self; [];
827 817 src = fetchurl {
828 818 url = "https://pypi.python.org/packages/2e/26/e8fb5b4256a5f5036be7ce115ef8db8d06bc537becfbdc46c6af008314ee/pyparsing-1.5.7.zip";
829 819 md5 = "b86854857a368d6ccb4d5b6e76d0637f";
830 820 };
831 821 };
832 822 pyramid = super.buildPythonPackage {
833 823 name = "pyramid-1.6.1";
834 824 buildInputs = with self; [];
835 825 doCheck = false;
836 826 propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy];
837 827 src = fetchurl {
838 828 url = "https://pypi.python.org/packages/30/b3/fcc4a2a4800cbf21989e00454b5828cf1f7fe35c63e0810b350e56d4c475/pyramid-1.6.1.tar.gz";
839 829 md5 = "b18688ff3cc33efdbb098a35b45dd122";
840 830 };
841 831 };
842 832 pyramid-beaker = super.buildPythonPackage {
843 833 name = "pyramid-beaker-0.8";
844 834 buildInputs = with self; [];
845 835 doCheck = false;
846 836 propagatedBuildInputs = with self; [pyramid Beaker];
847 837 src = fetchurl {
848 838 url = "https://pypi.python.org/packages/d9/6e/b85426e00fd3d57f4545f74e1c3828552d8700f13ededeef9233f7bca8be/pyramid_beaker-0.8.tar.gz";
849 839 md5 = "22f14be31b06549f80890e2c63a93834";
850 840 };
851 841 };
852 842 pyramid-debugtoolbar = super.buildPythonPackage {
853 843 name = "pyramid-debugtoolbar-2.4.2";
854 844 buildInputs = with self; [];
855 845 doCheck = false;
856 846 propagatedBuildInputs = with self; [pyramid pyramid-mako repoze.lru Pygments];
857 847 src = fetchurl {
858 848 url = "https://pypi.python.org/packages/89/00/ed5426ee41ed747ba3ffd30e8230841a6878286ea67d480b1444d24f06a2/pyramid_debugtoolbar-2.4.2.tar.gz";
859 849 md5 = "073ea67086cc4bd5decc3a000853642d";
860 850 };
861 851 };
862 852 pyramid-jinja2 = super.buildPythonPackage {
863 853 name = "pyramid-jinja2-2.5";
864 854 buildInputs = with self; [];
865 855 doCheck = false;
866 856 propagatedBuildInputs = with self; [pyramid zope.deprecation Jinja2 MarkupSafe];
867 857 src = fetchurl {
868 858 url = "https://pypi.python.org/packages/a1/80/595e26ffab7deba7208676b6936b7e5a721875710f982e59899013cae1ed/pyramid_jinja2-2.5.tar.gz";
869 859 md5 = "07cb6547204ac5e6f0b22a954ccee928";
870 860 };
871 861 };
872 862 pyramid-mako = super.buildPythonPackage {
873 863 name = "pyramid-mako-1.0.2";
874 864 buildInputs = with self; [];
875 865 doCheck = false;
876 866 propagatedBuildInputs = with self; [pyramid Mako];
877 867 src = fetchurl {
878 868 url = "https://pypi.python.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
879 869 md5 = "ee25343a97eb76bd90abdc2a774eb48a";
880 870 };
881 871 };
882 872 pysqlite = super.buildPythonPackage {
883 873 name = "pysqlite-2.6.3";
884 874 buildInputs = with self; [];
885 875 doCheck = false;
886 876 propagatedBuildInputs = with self; [];
887 877 src = fetchurl {
888 878 url = "https://pypi.python.org/packages/5c/a6/1c429cd4c8069cf4bfbd0eb4d592b3f4042155a8202df83d7e9b93aa3dc2/pysqlite-2.6.3.tar.gz";
889 879 md5 = "7ff1cedee74646b50117acff87aa1cfa";
890 880 };
891 881 };
892 882 pytest = super.buildPythonPackage {
893 883 name = "pytest-2.8.5";
894 884 buildInputs = with self; [];
895 885 doCheck = false;
896 886 propagatedBuildInputs = with self; [py];
897 887 src = fetchurl {
898 888 url = "https://pypi.python.org/packages/b1/3d/d7ea9b0c51e0cacded856e49859f0a13452747491e842c236bbab3714afe/pytest-2.8.5.zip";
899 889 md5 = "8493b06f700862f1294298d6c1b715a9";
900 890 };
901 891 };
902 892 pytest-catchlog = super.buildPythonPackage {
903 893 name = "pytest-catchlog-1.2.2";
904 894 buildInputs = with self; [];
905 895 doCheck = false;
906 896 propagatedBuildInputs = with self; [py pytest];
907 897 src = fetchurl {
908 898 url = "https://pypi.python.org/packages/f2/2b/2faccdb1a978fab9dd0bf31cca9f6847fbe9184a0bdcc3011ac41dd44191/pytest-catchlog-1.2.2.zip";
909 899 md5 = "09d890c54c7456c818102b7ff8c182c8";
910 900 };
911 901 };
912 902 pytest-cov = super.buildPythonPackage {
913 903 name = "pytest-cov-1.8.1";
914 904 buildInputs = with self; [];
915 905 doCheck = false;
916 906 propagatedBuildInputs = with self; [py pytest coverage cov-core];
917 907 src = fetchurl {
918 908 url = "https://pypi.python.org/packages/11/4b/b04646e97f1721878eb21e9f779102d84dd044d324382263b1770a3e4838/pytest-cov-1.8.1.tar.gz";
919 909 md5 = "76c778afa2494088270348be42d759fc";
920 910 };
921 911 };
922 912 pytest-profiling = super.buildPythonPackage {
923 913 name = "pytest-profiling-1.0.1";
924 914 buildInputs = with self; [];
925 915 doCheck = false;
926 916 propagatedBuildInputs = with self; [six pytest gprof2dot];
927 917 src = fetchurl {
928 918 url = "https://pypi.python.org/packages/d8/67/8ffab73406e22870e07fa4dc8dce1d7689b26dba8efd00161c9b6fc01ec0/pytest-profiling-1.0.1.tar.gz";
929 919 md5 = "354404eb5b3fd4dc5eb7fffbb3d9b68b";
930 920 };
931 921 };
932 922 pytest-runner = super.buildPythonPackage {
933 923 name = "pytest-runner-2.7.1";
934 924 buildInputs = with self; [];
935 925 doCheck = false;
936 926 propagatedBuildInputs = with self; [];
937 927 src = fetchurl {
938 928 url = "https://pypi.python.org/packages/99/6b/c4ff4418d3424d4475b7af60724fd4a5cdd91ed8e489dc9443281f0052bc/pytest-runner-2.7.1.tar.gz";
939 929 md5 = "e56f0bc8d79a6bd91772b44ef4215c7e";
940 930 };
941 931 };
942 932 pytest-timeout = super.buildPythonPackage {
943 933 name = "pytest-timeout-0.4";
944 934 buildInputs = with self; [];
945 935 doCheck = false;
946 936 propagatedBuildInputs = with self; [pytest];
947 937 src = fetchurl {
948 938 url = "https://pypi.python.org/packages/24/48/5f6bd4b8026a26e1dd427243d560a29a0f1b24a5c7cffca4bf049a7bb65b/pytest-timeout-0.4.tar.gz";
949 939 md5 = "03b28aff69cbbfb959ed35ade5fde262";
950 940 };
951 941 };
952 942 python-dateutil = super.buildPythonPackage {
953 943 name = "python-dateutil-1.5";
954 944 buildInputs = with self; [];
955 945 doCheck = false;
956 946 propagatedBuildInputs = with self; [];
957 947 src = fetchurl {
958 948 url = "https://pypi.python.org/packages/b4/7c/df59c89a753eb33c7c44e1dd42de0e9bc2ccdd5a4d576e0bfad97cc280cb/python-dateutil-1.5.tar.gz";
959 949 md5 = "0dcb1de5e5cad69490a3b6ab63f0cfa5";
960 950 };
961 951 };
962 952 python-editor = super.buildPythonPackage {
963 953 name = "python-editor-1.0";
964 954 buildInputs = with self; [];
965 955 doCheck = false;
966 956 propagatedBuildInputs = with self; [];
967 957 src = fetchurl {
968 958 url = "https://pypi.python.org/packages/f5/d9/01eb441489c8bd2adb33ee4f3aea299a3db531a584cb39c57a0ecf516d9c/python-editor-1.0.tar.gz";
969 959 md5 = "a5ead611360b17b52507297d8590b4e8";
970 960 };
971 961 };
972 962 python-ldap = super.buildPythonPackage {
973 963 name = "python-ldap-2.4.19";
974 964 buildInputs = with self; [];
975 965 doCheck = false;
976 966 propagatedBuildInputs = with self; [setuptools];
977 967 src = fetchurl {
978 968 url = "https://pypi.python.org/packages/42/81/1b64838c82e64f14d4e246ff00b52e650a35c012551b891ada2b85d40737/python-ldap-2.4.19.tar.gz";
979 969 md5 = "b941bf31d09739492aa19ef679e94ae3";
980 970 };
981 971 };
982 972 python-memcached = super.buildPythonPackage {
983 973 name = "python-memcached-1.57";
984 974 buildInputs = with self; [];
985 975 doCheck = false;
986 976 propagatedBuildInputs = with self; [six];
987 977 src = fetchurl {
988 978 url = "https://pypi.python.org/packages/52/9d/eebc0dcbc5c7c66840ad207dfc1baa376dadb74912484bff73819cce01e6/python-memcached-1.57.tar.gz";
989 979 md5 = "de21f64b42b2d961f3d4ad7beb5468a1";
990 980 };
991 981 };
992 982 python-pam = super.buildPythonPackage {
993 983 name = "python-pam-1.8.2";
994 984 buildInputs = with self; [];
995 985 doCheck = false;
996 986 propagatedBuildInputs = with self; [];
997 987 src = fetchurl {
998 988 url = "https://pypi.python.org/packages/de/8c/f8f5d38b4f26893af267ea0b39023d4951705ab0413a39e0cf7cf4900505/python-pam-1.8.2.tar.gz";
999 989 md5 = "db71b6b999246fb05d78ecfbe166629d";
1000 990 };
1001 991 };
1002 992 pytz = super.buildPythonPackage {
1003 993 name = "pytz-2015.4";
1004 994 buildInputs = with self; [];
1005 995 doCheck = false;
1006 996 propagatedBuildInputs = with self; [];
1007 997 src = fetchurl {
1008 998 url = "https://pypi.python.org/packages/7e/1a/f43b5c92df7b156822030fed151327ea096bcf417e45acc23bd1df43472f/pytz-2015.4.zip";
1009 999 md5 = "233f2a2b370d03f9b5911700cc9ebf3c";
1010 1000 };
1011 1001 };
1012 1002 pyzmq = super.buildPythonPackage {
1013 1003 name = "pyzmq-14.6.0";
1014 1004 buildInputs = with self; [];
1015 1005 doCheck = false;
1016 1006 propagatedBuildInputs = with self; [];
1017 1007 src = fetchurl {
1018 1008 url = "https://pypi.python.org/packages/8a/3b/5463d5a9d712cd8bbdac335daece0d69f6a6792da4e3dd89956c0db4e4e6/pyzmq-14.6.0.tar.gz";
1019 1009 md5 = "395b5de95a931afa5b14c9349a5b8024";
1020 1010 };
1021 1011 };
1022 1012 recaptcha-client = super.buildPythonPackage {
1023 1013 name = "recaptcha-client-1.0.6";
1024 1014 buildInputs = with self; [];
1025 1015 doCheck = false;
1026 1016 propagatedBuildInputs = with self; [];
1027 1017 src = fetchurl {
1028 1018 url = "https://pypi.python.org/packages/0a/ea/5f2fbbfd894bdac1c68ef8d92019066cfcf9fbff5fe3d728d2b5c25c8db4/recaptcha-client-1.0.6.tar.gz";
1029 1019 md5 = "74228180f7e1fb76c4d7089160b0d919";
1030 1020 };
1031 1021 };
1032 1022 repoze.lru = super.buildPythonPackage {
1033 1023 name = "repoze.lru-0.6";
1034 1024 buildInputs = with self; [];
1035 1025 doCheck = false;
1036 1026 propagatedBuildInputs = with self; [];
1037 1027 src = fetchurl {
1038 1028 url = "https://pypi.python.org/packages/6e/1e/aa15cc90217e086dc8769872c8778b409812ff036bf021b15795638939e4/repoze.lru-0.6.tar.gz";
1039 1029 md5 = "2c3b64b17a8e18b405f55d46173e14dd";
1040 1030 };
1041 1031 };
1042 1032 requests = super.buildPythonPackage {
1043 1033 name = "requests-2.9.1";
1044 1034 buildInputs = with self; [];
1045 1035 doCheck = false;
1046 1036 propagatedBuildInputs = with self; [];
1047 1037 src = fetchurl {
1048 1038 url = "https://pypi.python.org/packages/f9/6d/07c44fb1ebe04d069459a189e7dab9e4abfe9432adcd4477367c25332748/requests-2.9.1.tar.gz";
1049 1039 md5 = "0b7f480d19012ec52bab78292efd976d";
1050 1040 };
1051 1041 };
1052 1042 rhodecode-enterprise-ce = super.buildPythonPackage {
1053 name = "rhodecode-enterprise-ce-4.0.1";
1043 name = "rhodecode-enterprise-ce-4.1.0";
1054 1044 buildInputs = with self; [WebTest configobj cssselect flake8 lxml mock pytest pytest-cov pytest-runner];
1055 1045 doCheck = true;
1056 1046 propagatedBuildInputs = with self; [Babel Beaker FormEncode Mako Markdown MarkupSafe MySQL-python Paste PasteDeploy PasteScript Pygments Pylons Pyro4 Routes SQLAlchemy Tempita URLObject WebError WebHelpers WebHelpers2 WebOb WebTest Whoosh alembic amqplib anyjson appenlight-client authomatic backport-ipaddress celery colander decorator docutils gunicorn infrae.cache ipython iso8601 kombu msgpack-python packaging psycopg2 pycrypto pycurl pyparsing pyramid pyramid-debugtoolbar pyramid-mako pyramid-beaker pysqlite python-dateutil python-ldap python-memcached python-pam recaptcha-client repoze.lru requests simplejson waitress zope.cachedescriptors psutil py-bcrypt];
1057 1047 src = ./.;
1058 1048 };
1059 1049 rhodecode-tools = super.buildPythonPackage {
1060 name = "rhodecode-tools-0.7.1";
1050 name = "rhodecode-tools-0.8.3";
1061 1051 buildInputs = with self; [];
1062 1052 doCheck = false;
1063 propagatedBuildInputs = with self; [click future six Mako MarkupSafe requests Whoosh pyelasticsearch];
1053 propagatedBuildInputs = with self; [click future six Mako MarkupSafe requests Whoosh elasticsearch elasticsearch-dsl];
1064 1054 src = fetchurl {
1065 url = "https://code.rhodecode.com/rhodecode-tools-ce/archive/v0.7.1.zip";
1066 md5 = "91daea803aaa264ce7a8213bc2220d4c";
1055 url = "https://code.rhodecode.com/rhodecode-tools-ce/archive/v0.8.3.zip";
1056 md5 = "9acdfd71b8ddf4056057065f37ab9ccb";
1067 1057 };
1068 1058 };
1069 1059 serpent = super.buildPythonPackage {
1070 1060 name = "serpent-1.12";
1071 1061 buildInputs = with self; [];
1072 1062 doCheck = false;
1073 1063 propagatedBuildInputs = with self; [];
1074 1064 src = fetchurl {
1075 1065 url = "https://pypi.python.org/packages/3b/19/1e0e83b47c09edaef8398655088036e7e67386b5c48770218ebb339fbbd5/serpent-1.12.tar.gz";
1076 1066 md5 = "05869ac7b062828b34f8f927f0457b65";
1077 1067 };
1078 1068 };
1079 1069 setproctitle = super.buildPythonPackage {
1080 1070 name = "setproctitle-1.1.8";
1081 1071 buildInputs = with self; [];
1082 1072 doCheck = false;
1083 1073 propagatedBuildInputs = with self; [];
1084 1074 src = fetchurl {
1085 1075 url = "https://pypi.python.org/packages/33/c3/ad367a4f4f1ca90468863ae727ac62f6edb558fc09a003d344a02cfc6ea6/setproctitle-1.1.8.tar.gz";
1086 1076 md5 = "728f4c8c6031bbe56083a48594027edd";
1087 1077 };
1088 1078 };
1089 1079 setuptools = super.buildPythonPackage {
1090 1080 name = "setuptools-20.8.1";
1091 1081 buildInputs = with self; [];
1092 1082 doCheck = false;
1093 1083 propagatedBuildInputs = with self; [];
1094 1084 src = fetchurl {
1095 1085 url = "https://pypi.python.org/packages/c4/19/c1bdc88b53da654df43770f941079dbab4e4788c2dcb5658fb86259894c7/setuptools-20.8.1.zip";
1096 1086 md5 = "fe58a5cac0df20bb83942b252a4b0543";
1097 1087 };
1098 1088 };
1099 1089 setuptools-scm = super.buildPythonPackage {
1100 1090 name = "setuptools-scm-1.11.0";
1101 1091 buildInputs = with self; [];
1102 1092 doCheck = false;
1103 1093 propagatedBuildInputs = with self; [];
1104 1094 src = fetchurl {
1105 1095 url = "https://pypi.python.org/packages/cd/5f/e3a038292358058d83d764a47d09114aa5a8003ed4529518f9e580f1a94f/setuptools_scm-1.11.0.tar.gz";
1106 1096 md5 = "4c5c896ba52e134bbc3507bac6400087";
1107 1097 };
1108 1098 };
1109 1099 simplejson = super.buildPythonPackage {
1110 1100 name = "simplejson-3.7.2";
1111 1101 buildInputs = with self; [];
1112 1102 doCheck = false;
1113 1103 propagatedBuildInputs = with self; [];
1114 1104 src = fetchurl {
1115 1105 url = "https://pypi.python.org/packages/6d/89/7f13f099344eea9d6722779a1f165087cb559598107844b1ac5dbd831fb1/simplejson-3.7.2.tar.gz";
1116 1106 md5 = "a5fc7d05d4cb38492285553def5d4b46";
1117 1107 };
1118 1108 };
1119 1109 six = super.buildPythonPackage {
1120 1110 name = "six-1.9.0";
1121 1111 buildInputs = with self; [];
1122 1112 doCheck = false;
1123 1113 propagatedBuildInputs = with self; [];
1124 1114 src = fetchurl {
1125 1115 url = "https://pypi.python.org/packages/16/64/1dc5e5976b17466fd7d712e59cbe9fb1e18bec153109e5ba3ed6c9102f1a/six-1.9.0.tar.gz";
1126 1116 md5 = "476881ef4012262dfc8adc645ee786c4";
1127 1117 };
1128 1118 };
1129 1119 subprocess32 = super.buildPythonPackage {
1130 1120 name = "subprocess32-3.2.6";
1131 1121 buildInputs = with self; [];
1132 1122 doCheck = false;
1133 1123 propagatedBuildInputs = with self; [];
1134 1124 src = fetchurl {
1135 1125 url = "https://pypi.python.org/packages/28/8d/33ccbff51053f59ae6c357310cac0e79246bbed1d345ecc6188b176d72c3/subprocess32-3.2.6.tar.gz";
1136 1126 md5 = "754c5ab9f533e764f931136974b618f1";
1137 1127 };
1138 1128 };
1139 1129 supervisor = super.buildPythonPackage {
1140 1130 name = "supervisor-3.1.3";
1141 1131 buildInputs = with self; [];
1142 1132 doCheck = false;
1143 1133 propagatedBuildInputs = with self; [meld3];
1144 1134 src = fetchurl {
1145 1135 url = "https://pypi.python.org/packages/a6/41/65ad5bd66230b173eb4d0b8810230f3a9c59ef52ae066e540b6b99895db7/supervisor-3.1.3.tar.gz";
1146 1136 md5 = "aad263c4fbc070de63dd354864d5e552";
1147 1137 };
1148 1138 };
1149 1139 transifex-client = super.buildPythonPackage {
1150 1140 name = "transifex-client-0.10";
1151 1141 buildInputs = with self; [];
1152 1142 doCheck = false;
1153 1143 propagatedBuildInputs = with self; [];
1154 1144 src = fetchurl {
1155 1145 url = "https://pypi.python.org/packages/f3/4e/7b925192aee656fb3e04fa6381c8b3dc40198047c3b4a356f6cfd642c809/transifex-client-0.10.tar.gz";
1156 1146 md5 = "5549538d84b8eede6b254cd81ae024fa";
1157 1147 };
1158 1148 };
1159 1149 translationstring = super.buildPythonPackage {
1160 1150 name = "translationstring-1.3";
1161 1151 buildInputs = with self; [];
1162 1152 doCheck = false;
1163 1153 propagatedBuildInputs = with self; [];
1164 1154 src = fetchurl {
1165 1155 url = "https://pypi.python.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
1166 1156 md5 = "a4b62e0f3c189c783a1685b3027f7c90";
1167 1157 };
1168 1158 };
1169 1159 trollius = super.buildPythonPackage {
1170 1160 name = "trollius-1.0.4";
1171 1161 buildInputs = with self; [];
1172 1162 doCheck = false;
1173 1163 propagatedBuildInputs = with self; [futures];
1174 1164 src = fetchurl {
1175 1165 url = "https://pypi.python.org/packages/aa/e6/4141db437f55e6ee7a3fb69663239e3fde7841a811b4bef293145ad6c836/trollius-1.0.4.tar.gz";
1176 1166 md5 = "3631a464d49d0cbfd30ab2918ef2b783";
1177 1167 };
1178 1168 };
1179 1169 uWSGI = super.buildPythonPackage {
1180 1170 name = "uWSGI-2.0.11.2";
1181 1171 buildInputs = with self; [];
1182 1172 doCheck = false;
1183 1173 propagatedBuildInputs = with self; [];
1184 1174 src = fetchurl {
1185 1175 url = "https://pypi.python.org/packages/9b/78/918db0cfab0546afa580c1e565209c49aaf1476bbfe491314eadbe47c556/uwsgi-2.0.11.2.tar.gz";
1186 1176 md5 = "1f02dcbee7f6f61de4b1fd68350cf16f";
1187 1177 };
1188 1178 };
1189 1179 urllib3 = super.buildPythonPackage {
1190 1180 name = "urllib3-1.15.1";
1191 1181 buildInputs = with self; [];
1192 1182 doCheck = false;
1193 1183 propagatedBuildInputs = with self; [];
1194 1184 src = fetchurl {
1195 1185 url = "https://pypi.python.org/packages/49/26/a7d12ea00cb4b9fa1e13b5980e5a04a1fe7c477eb8f657ce0b757a7a497d/urllib3-1.15.1.tar.gz";
1196 1186 md5 = "5be254b0dbb55d1307ede99e1895c8dd";
1197 1187 };
1198 1188 };
1199 1189 venusian = super.buildPythonPackage {
1200 1190 name = "venusian-1.0";
1201 1191 buildInputs = with self; [];
1202 1192 doCheck = false;
1203 1193 propagatedBuildInputs = with self; [];
1204 1194 src = fetchurl {
1205 1195 url = "https://pypi.python.org/packages/86/20/1948e0dfc4930ddde3da8c33612f6a5717c0b4bc28f591a5c5cf014dd390/venusian-1.0.tar.gz";
1206 1196 md5 = "dccf2eafb7113759d60c86faf5538756";
1207 1197 };
1208 1198 };
1209 1199 waitress = super.buildPythonPackage {
1210 1200 name = "waitress-0.8.9";
1211 1201 buildInputs = with self; [];
1212 1202 doCheck = false;
1213 1203 propagatedBuildInputs = with self; [setuptools];
1214 1204 src = fetchurl {
1215 1205 url = "https://pypi.python.org/packages/ee/65/fc9dee74a909a1187ca51e4f15ad9c4d35476e4ab5813f73421505c48053/waitress-0.8.9.tar.gz";
1216 1206 md5 = "da3f2e62b3676be5dd630703a68e2a04";
1217 1207 };
1218 1208 };
1219 1209 wsgiref = super.buildPythonPackage {
1220 1210 name = "wsgiref-0.1.2";
1221 1211 buildInputs = with self; [];
1222 1212 doCheck = false;
1223 1213 propagatedBuildInputs = with self; [];
1224 1214 src = fetchurl {
1225 1215 url = "https://pypi.python.org/packages/41/9e/309259ce8dff8c596e8c26df86dbc4e848b9249fd36797fd60be456f03fc/wsgiref-0.1.2.zip";
1226 1216 md5 = "29b146e6ebd0f9fb119fe321f7bcf6cb";
1227 1217 };
1228 1218 };
1229 1219 zope.cachedescriptors = super.buildPythonPackage {
1230 1220 name = "zope.cachedescriptors-4.0.0";
1231 1221 buildInputs = with self; [];
1232 1222 doCheck = false;
1233 1223 propagatedBuildInputs = with self; [setuptools];
1234 1224 src = fetchurl {
1235 1225 url = "https://pypi.python.org/packages/40/33/694b6644c37f28553f4b9f20b3c3a20fb709a22574dff20b5bdffb09ecd5/zope.cachedescriptors-4.0.0.tar.gz";
1236 1226 md5 = "8d308de8c936792c8e758058fcb7d0f0";
1237 1227 };
1238 1228 };
1239 1229 zope.deprecation = super.buildPythonPackage {
1240 1230 name = "zope.deprecation-4.1.2";
1241 1231 buildInputs = with self; [];
1242 1232 doCheck = false;
1243 1233 propagatedBuildInputs = with self; [setuptools];
1244 1234 src = fetchurl {
1245 1235 url = "https://pypi.python.org/packages/c1/d3/3919492d5e57d8dd01b36f30b34fc8404a30577392b1eb817c303499ad20/zope.deprecation-4.1.2.tar.gz";
1246 1236 md5 = "e9a663ded58f4f9f7881beb56cae2782";
1247 1237 };
1248 1238 };
1249 1239 zope.event = super.buildPythonPackage {
1250 1240 name = "zope.event-4.0.3";
1251 1241 buildInputs = with self; [];
1252 1242 doCheck = false;
1253 1243 propagatedBuildInputs = with self; [setuptools];
1254 1244 src = fetchurl {
1255 1245 url = "https://pypi.python.org/packages/c1/29/91ba884d7d6d96691df592e9e9c2bfa57a47040ec1ff47eff18c85137152/zope.event-4.0.3.tar.gz";
1256 1246 md5 = "9a3780916332b18b8b85f522bcc3e249";
1257 1247 };
1258 1248 };
1259 1249 zope.interface = super.buildPythonPackage {
1260 1250 name = "zope.interface-4.1.3";
1261 1251 buildInputs = with self; [];
1262 1252 doCheck = false;
1263 1253 propagatedBuildInputs = with self; [setuptools];
1264 1254 src = fetchurl {
1265 1255 url = "https://pypi.python.org/packages/9d/81/2509ca3c6f59080123c1a8a97125eb48414022618cec0e64eb1313727bfe/zope.interface-4.1.3.tar.gz";
1266 1256 md5 = "9ae3d24c0c7415deb249dd1a132f0f79";
1267 1257 };
1268 1258 };
1269 1259
1270 1260 ### Test requirements
1271 1261
1272 1262
1273 1263 }
@@ -1,151 +1,151 b''
1 1 Babel==1.3
2 2 Beaker==1.7.0
3 3 CProfileV==1.0.6
4 4 Fabric==1.10.0
5 5 FormEncode==1.2.4
6 6 Jinja2==2.7.3
7 7 Mako==1.0.1
8 8 Markdown==2.6.2
9 9 MarkupSafe==0.23
10 10 MySQL-python==1.2.5
11 11 Paste==2.0.2
12 12 PasteDeploy==1.5.2
13 13 PasteScript==1.7.5
14 pyelasticsearch==1.4
15 14 Pygments==2.0.2
16 15
17 16 # TODO: This version is not available on PyPI
18 17 # Pylons==1.0.2.dev20160108
19 18 Pylons==1.0.1
20 19
21 20 # TODO: This version is not available, but newer ones are
22 21 # Pyro4==4.35
23 22 Pyro4==4.41
24 23
25 24 # TODO: This should probably not be in here
26 25 # -e hg+https://johbo@code.rhodecode.com/johbo/rhodecode-fork@3a454bd1f17c0b2b2a951cf2b111e0320d7942a9#egg=RhodeCodeEnterprise-dev
27 26
28 27 # TODO: This is not really a dependency, we should add it only
29 28 # into the development environment, since there it is useful.
30 29 # RhodeCodeVCSServer==3.9.0
31 30
32 31 Routes==1.13
33 32 SQLAlchemy==0.9.9
34 33 Sphinx==1.2.2
35 34 Tempita==0.5.2
36 35 URLObject==2.4.0
37 36 WebError==0.10.3
38 37
39 38 # TODO: This is modified by us, needs a better integration. For now
40 39 # using the latest version before.
41 40 # WebHelpers==1.3.dev20150807
42 41 WebHelpers==1.3
43 42
44 43 WebHelpers2==2.0
45 44 WebOb==1.3.1
46 45 WebTest==1.4.3
47 46 Whoosh==2.7.0
48 47 alembic==0.8.4
49 48 amqplib==1.0.2
50 49 anyjson==0.3.3
51 50 appenlight-client==0.6.14
52 51 authomatic==0.1.0.post1;
53 52 backport-ipaddress==0.1
54 53 bottle==0.12.8
55 54 bumpversion==0.5.3
56 55 celery==2.2.10
57 56 click==5.1
58 57 colander==1.2
59 58 configobj==5.0.6
60 59 cov-core==1.15.0
61 60 coverage==3.7.1
62 61 cssselect==0.9.1
63 62 decorator==3.4.2
64 63 docutils==0.12
65 64 dogpile.cache==0.5.7
66 65 dogpile.core==0.4.1
67 66 dulwich==0.12.0
68 67 ecdsa==0.11
69 68 flake8==2.4.1
70 69 future==0.14.3
71 70 futures==3.0.2
72 71 gprof2dot==2015.12.1
73 greenlet==0.4.7
72 greenlet==0.4.9
74 73 gunicorn==19.6.0
75 74
76 75 # TODO: Needs subvertpy and blows up without Subversion headers,
77 76 # actually we should not need this for Enterprise at all.
78 77 # hgsubversion==1.8.2
79 78
79 gnureadline==6.3.3
80 80 infrae.cache==1.0.1
81 81 invoke==0.11.1
82 82 ipdb==0.8
83 83 ipython==3.1.0
84 84 iso8601==0.1.11
85 85 itsdangerous==0.24
86 86 kombu==1.5.1
87 87 lxml==3.4.4
88 88 mccabe==0.3
89 89 meld3==1.0.2
90 90 mock==1.0.1
91 91 msgpack-python==0.4.6
92 92 nose==1.3.6
93 93 objgraph==2.0.0
94 94 packaging==15.2
95 95 paramiko==1.15.1
96 96 pep8==1.5.7
97 97 psutil==2.2.1
98 98 psycopg2==2.6
99 99 py==1.4.29
100 100 py-bcrypt==0.4
101 101 pycrypto==2.6.1
102 102 pycurl==7.19.5
103 103 pyflakes==0.8.1
104 104 pyparsing==1.5.7
105 105 pyramid==1.6.1
106 106 pyramid-beaker==0.8
107 107 pyramid-debugtoolbar==2.4.2
108 108 pyramid-jinja2==2.5
109 109 pyramid-mako==1.0.2
110 110 pysqlite==2.6.3
111 111 pytest==2.8.5
112 112 pytest-runner==2.7.1
113 113 pytest-catchlog==1.2.2
114 114 pytest-cov==1.8.1
115 115 pytest-profiling==1.0.1
116 116 pytest-timeout==0.4
117 117 python-dateutil==1.5
118 118 python-ldap==2.4.19
119 119 python-memcached==1.57
120 120 python-pam==1.8.2
121 121 pytz==2015.4
122 122 pyzmq==14.6.0
123 123
124 124 # TODO: This is not available in public
125 125 # rc-testdata==0.2.0
126 126
127 https://code.rhodecode.com/rhodecode-tools-ce/archive/v0.7.1.zip#md5=91daea803aaa264ce7a8213bc2220d4c
127 https://code.rhodecode.com/rhodecode-tools-ce/archive/v0.8.3.zip#md5=9acdfd71b8ddf4056057065f37ab9ccb
128 128
129 129
130 130 recaptcha-client==1.0.6
131 131 repoze.lru==0.6
132 132 requests==2.9.1
133 133 serpent==1.12
134 134 setproctitle==1.1.8
135 135 setuptools==20.8.1
136 136 setuptools-scm==1.11.0
137 137 simplejson==3.7.2
138 138 six==1.9.0
139 139 subprocess32==3.2.6
140 140 supervisor==3.1.3
141 141 transifex-client==0.10
142 142 translationstring==1.3
143 143 trollius==1.0.4
144 144 uWSGI==2.0.11.2
145 145 venusian==1.0
146 146 waitress==0.8.9
147 147 wsgiref==0.1.2
148 148 zope.cachedescriptors==4.0.0
149 149 zope.deprecation==4.1.2
150 150 zope.event==4.0.3
151 151 zope.interface==4.1.3
@@ -1,1 +1,1 b''
1 4.0.1 No newline at end of file
1 4.1.0 No newline at end of file
@@ -1,58 +1,58 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22
23 23 RhodeCode, a web based repository management software
24 24 versioning implementation: http://www.python.org/dev/peps/pep-0386/
25 25 """
26 26
27 27 import os
28 28 import sys
29 29 import platform
30 30
31 31 VERSION = tuple(open(os.path.join(
32 32 os.path.dirname(__file__), 'VERSION')).read().split('.'))
33 33
34 34 BACKENDS = {
35 35 'hg': 'Mercurial repository',
36 36 'git': 'Git repository',
37 37 'svn': 'Subversion repository',
38 38 }
39 39
40 40 CELERY_ENABLED = False
41 41 CELERY_EAGER = False
42 42
43 43 # link to config for pylons
44 44 CONFIG = {}
45 45
46 46 # Linked module for extensions
47 47 EXTENSIONS = {}
48 48
49 49 __version__ = ('.'.join((str(each) for each in VERSION[:3])))
50 __dbversion__ = 51 # defines current db version for migrations
50 __dbversion__ = 54 # defines current db version for migrations
51 51 __platform__ = platform.system()
52 52 __license__ = 'AGPLv3, and Commercial License'
53 53 __author__ = 'RhodeCode GmbH'
54 54 __url__ = 'http://rhodecode.com'
55 55
56 56 is_windows = __platform__ in ['Windows']
57 57 is_unix = not is_windows
58 58 is_test = False
@@ -1,144 +1,158 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import mock
22 22 import pytest
23 23
24 24 from rhodecode.model.repo import RepoModel
25 25 from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN
26 26 from rhodecode.api.tests.utils import (
27 build_data, api_call, assert_error, assert_ok, crash)
27 build_data, api_call, assert_error, assert_ok, crash, jsonify)
28 28 from rhodecode.tests.fixture import Fixture
29 29
30 30
31 31 fixture = Fixture()
32 32
33 UPDATE_REPO_NAME = 'api_update_me'
34
35 class SAME_AS_UPDATES(object): """ Constant used for tests below """
33 36
34 37 @pytest.mark.usefixtures("testuser_api", "app")
35 38 class TestApiUpdateRepo(object):
36 @pytest.mark.parametrize("changing_attr, updates", [
37 ('owner', {'owner': TEST_USER_REGULAR_LOGIN}),
38 ('description', {'description': 'new description'}),
39 ('active', {'active': True}),
40 ('active', {'active': False}),
41 ('clone_uri', {'clone_uri': 'http://foo.com/repo'}),
42 ('clone_uri', {'clone_uri': None}),
43 ('landing_rev', {'landing_rev': 'branch:master'}),
44 ('enable_statistics', {'enable_statistics': True}),
45 ('enable_locking', {'enable_locking': True}),
46 ('enable_downloads', {'enable_downloads': True}),
47 ('name', {'name': 'new_repo_name'}),
48 ('repo_group', {'group': 'test_group_for_update'}),
39
40 @pytest.mark.parametrize("updates, expected", [
41 ({'owner': TEST_USER_REGULAR_LOGIN}, SAME_AS_UPDATES),
42 ({'description': 'new description'}, SAME_AS_UPDATES),
43 ({'clone_uri': 'http://foo.com/repo'}, SAME_AS_UPDATES),
44 ({'clone_uri': None}, {'clone_uri': ''}),
45 ({'clone_uri': ''}, {'clone_uri': ''}),
46 ({'landing_rev': 'branch:master'}, {'landing_rev': ['branch','master']}),
47 ({'enable_statistics': True}, SAME_AS_UPDATES),
48 ({'enable_locking': True}, SAME_AS_UPDATES),
49 ({'enable_downloads': True}, SAME_AS_UPDATES),
50 ({'name': 'new_repo_name'}, {'repo_name': 'new_repo_name'}),
51 ({'group': 'test_group_for_update'},
52 {'repo_name': 'test_group_for_update/%s' % UPDATE_REPO_NAME}),
49 53 ])
50 def test_api_update_repo(self, changing_attr, updates, backend):
51 repo_name = 'api_update_me'
54 def test_api_update_repo(self, updates, expected, backend):
55 repo_name = UPDATE_REPO_NAME
52 56 repo = fixture.create_repo(repo_name, repo_type=backend.alias)
53 if changing_attr == 'repo_group':
57 if updates.get('group'):
54 58 fixture.create_repo_group(updates['group'])
55 59
60 expected_api_data = repo.get_api_data(include_secrets=True)
61 if expected is SAME_AS_UPDATES:
62 expected_api_data.update(updates)
63 else:
64 expected_api_data.update(expected)
65
66
56 67 id_, params = build_data(
57 68 self.apikey, 'update_repo', repoid=repo_name, **updates)
58 69 response = api_call(self.app, params)
59 if changing_attr == 'name':
70
71 if updates.get('name'):
60 72 repo_name = updates['name']
61 if changing_attr == 'repo_group':
73 if updates.get('group'):
62 74 repo_name = '/'.join([updates['group'], repo_name])
75
63 76 try:
64 77 expected = {
65 78 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo_name),
66 'repository': repo.get_api_data(include_secrets=True)
79 'repository': jsonify(expected_api_data)
67 80 }
68 81 assert_ok(id_, expected, given=response.body)
69 82 finally:
70 83 fixture.destroy_repo(repo_name)
71 if changing_attr == 'repo_group':
72
84 if updates.get('group'):
73 85 fixture.destroy_repo_group(updates['group'])
74 86
75 87 def test_api_update_repo_fork_of_field(self, backend):
76 88 master_repo = backend.create_repo()
77 89 repo = backend.create_repo()
78
79 90 updates = {
80 91 'fork_of': master_repo.repo_name
81 92 }
93 expected_api_data = repo.get_api_data(include_secrets=True)
94 expected_api_data.update(updates)
95
82 96 id_, params = build_data(
83 97 self.apikey, 'update_repo', repoid=repo.repo_name, **updates)
84 98 response = api_call(self.app, params)
85 99 expected = {
86 100 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo.repo_name),
87 'repository': repo.get_api_data(include_secrets=True)
101 'repository': jsonify(expected_api_data)
88 102 }
89 103 assert_ok(id_, expected, given=response.body)
90 104 result = response.json['result']['repository']
91 105 assert result['fork_of'] == master_repo.repo_name
92 106
93 107 def test_api_update_repo_fork_of_not_found(self, backend):
94 108 master_repo_name = 'fake-parent-repo'
95 109 repo = backend.create_repo()
96 110 updates = {
97 111 'fork_of': master_repo_name
98 112 }
99 113 id_, params = build_data(
100 114 self.apikey, 'update_repo', repoid=repo.repo_name, **updates)
101 115 response = api_call(self.app, params)
102 116 expected = 'repository `{}` does not exist'.format(master_repo_name)
103 117 assert_error(id_, expected, given=response.body)
104 118
105 119 def test_api_update_repo_with_repo_group_not_existing(self):
106 120 repo_name = 'admin_owned'
107 121 fixture.create_repo(repo_name)
108 122 updates = {'group': 'test_group_for_update'}
109 123 id_, params = build_data(
110 124 self.apikey, 'update_repo', repoid=repo_name, **updates)
111 125 response = api_call(self.app, params)
112 126 try:
113 127 expected = 'repository group `%s` does not exist' % (
114 128 updates['group'],)
115 129 assert_error(id_, expected, given=response.body)
116 130 finally:
117 131 fixture.destroy_repo(repo_name)
118 132
119 133 def test_api_update_repo_regular_user_not_allowed(self):
120 134 repo_name = 'admin_owned'
121 135 fixture.create_repo(repo_name)
122 136 updates = {'active': False}
123 137 id_, params = build_data(
124 138 self.apikey_regular, 'update_repo', repoid=repo_name, **updates)
125 139 response = api_call(self.app, params)
126 140 try:
127 141 expected = 'repository `%s` does not exist' % (repo_name,)
128 142 assert_error(id_, expected, given=response.body)
129 143 finally:
130 144 fixture.destroy_repo(repo_name)
131 145
132 146 @mock.patch.object(RepoModel, 'update', crash)
133 147 def test_api_update_repo_exception_occurred(self, backend):
134 repo_name = 'api_update_me'
148 repo_name = UPDATE_REPO_NAME
135 149 fixture.create_repo(repo_name, repo_type=backend.alias)
136 150 id_, params = build_data(
137 151 self.apikey, 'update_repo', repoid=repo_name,
138 152 owner=TEST_USER_ADMIN_LOGIN,)
139 153 response = api_call(self.app, params)
140 154 try:
141 155 expected = 'failed to update repo `%s`' % (repo_name,)
142 156 assert_error(id_, expected, given=response.body)
143 157 finally:
144 158 fixture.destroy_repo(repo_name)
@@ -1,100 +1,108 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import mock
22 22 import pytest
23 23
24 24 from rhodecode.model.user import UserModel
25 25 from rhodecode.model.user_group import UserGroupModel
26 26 from rhodecode.tests import TEST_USER_REGULAR_LOGIN
27 27 from rhodecode.api.tests.utils import (
28 build_data, api_call, assert_error, assert_ok, crash)
28 build_data, api_call, assert_error, assert_ok, crash, jsonify)
29 29
30 30
31 31 @pytest.mark.usefixtures("testuser_api", "app")
32 32 class TestUpdateUserGroup(object):
33 33 @pytest.mark.parametrize("changing_attr, updates", [
34 34 ('group_name', {'group_name': 'new_group_name'}),
35 35 ('group_name', {'group_name': 'test_group_for_update'}),
36 36 ('owner', {'owner': TEST_USER_REGULAR_LOGIN}),
37 37 ('active', {'active': False}),
38 38 ('active', {'active': True})
39 39 ])
40 40 def test_api_update_user_group(self, changing_attr, updates, user_util):
41 41 user_group = user_util.create_user_group()
42 42 group_name = user_group.users_group_name
43 expected_api_data = user_group.get_api_data()
44 expected_api_data.update(updates)
45
43 46 id_, params = build_data(
44 47 self.apikey, 'update_user_group', usergroupid=group_name,
45 48 **updates)
46 49 response = api_call(self.app, params)
50
47 51 expected = {
48 52 'msg': 'updated user group ID:%s %s' % (
49 53 user_group.users_group_id, user_group.users_group_name),
50 'user_group': user_group.get_api_data()
54 'user_group': jsonify(expected_api_data)
51 55 }
52 56 assert_ok(id_, expected, given=response.body)
53 57
54 58 @pytest.mark.parametrize("changing_attr, updates", [
55 59 # TODO: mikhail: decide if we need to test against the commented params
56 60 # ('group_name', {'group_name': 'new_group_name'}),
57 61 # ('group_name', {'group_name': 'test_group_for_update'}),
58 62 ('owner', {'owner': TEST_USER_REGULAR_LOGIN}),
59 63 ('active', {'active': False}),
60 64 ('active', {'active': True})
61 65 ])
62 66 def test_api_update_user_group_regular_user(
63 67 self, changing_attr, updates, user_util):
64 68 user_group = user_util.create_user_group()
65 69 group_name = user_group.users_group_name
70 expected_api_data = user_group.get_api_data()
71 expected_api_data.update(updates)
72
73
66 74 # grant permission to this user
67 75 user = UserModel().get_by_username(self.TEST_USER_LOGIN)
68 76
69 77 user_util.grant_user_permission_to_user_group(
70 78 user_group, user, 'usergroup.admin')
71 79 id_, params = build_data(
72 80 self.apikey_regular, 'update_user_group',
73 81 usergroupid=group_name, **updates)
74 82 response = api_call(self.app, params)
75 83 expected = {
76 84 'msg': 'updated user group ID:%s %s' % (
77 85 user_group.users_group_id, user_group.users_group_name),
78 'user_group': user_group.get_api_data()
86 'user_group': jsonify(expected_api_data)
79 87 }
80 88 assert_ok(id_, expected, given=response.body)
81 89
82 90 def test_api_update_user_group_regular_user_no_permission(self, user_util):
83 91 user_group = user_util.create_user_group()
84 92 group_name = user_group.users_group_name
85 93 id_, params = build_data(
86 94 self.apikey_regular, 'update_user_group', usergroupid=group_name)
87 95 response = api_call(self.app, params)
88 96
89 97 expected = 'user group `%s` does not exist' % (group_name)
90 98 assert_error(id_, expected, given=response.body)
91 99
92 100 @mock.patch.object(UserGroupModel, 'update', crash)
93 101 def test_api_update_user_group_exception_occurred(self, user_util):
94 102 user_group = user_util.create_user_group()
95 103 group_name = user_group.users_group_name
96 104 id_, params = build_data(
97 105 self.apikey, 'update_user_group', usergroupid=group_name)
98 106 response = api_call(self.app, params)
99 107 expected = 'failed to update user group `%s`' % (group_name,)
100 108 assert_error(id_, expected, given=response.body)
@@ -1,1774 +1,1777 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22 import time
23 23
24 24 import colander
25 25
26 26 from rhodecode import BACKENDS
27 27 from rhodecode.api import jsonrpc_method, JSONRPCError, JSONRPCForbidden, json
28 28 from rhodecode.api.utils import (
29 29 has_superadmin_permission, Optional, OAttr, get_repo_or_error,
30 30 get_user_group_or_error, get_user_or_error, has_repo_permissions,
31 31 get_perm_or_error, store_update, get_repo_group_or_error, parse_args,
32 32 get_origin, build_commit_data)
33 33 from rhodecode.lib.auth import (
34 34 HasPermissionAnyApi, HasRepoGroupPermissionAnyApi,
35 35 HasUserGroupPermissionAnyApi)
36 36 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError
37 37 from rhodecode.lib.utils import map_groups
38 38 from rhodecode.lib.utils2 import str2bool, time_to_datetime
39 39 from rhodecode.model.changeset_status import ChangesetStatusModel
40 40 from rhodecode.model.comment import ChangesetCommentsModel
41 41 from rhodecode.model.db import (
42 42 Session, ChangesetStatus, RepositoryField, Repository)
43 43 from rhodecode.model.repo import RepoModel
44 44 from rhodecode.model.repo_group import RepoGroupModel
45 45 from rhodecode.model.scm import ScmModel, RepoList
46 46 from rhodecode.model.settings import SettingsModel
47 47 from rhodecode.model.validation_schema import RepoSchema
48 48
49 49 log = logging.getLogger(__name__)
50 50
51 51
52 52 @jsonrpc_method()
53 53 def get_repo(request, apiuser, repoid, cache=Optional(True)):
54 54 """
55 55 Gets an existing repository by its name or repository_id.
56 56
57 57 The members section so the output returns users groups or users
58 58 associated with that repository.
59 59
60 60 This command can only be run using an |authtoken| with admin rights,
61 61 or users with at least read rights to the |repo|.
62 62
63 63 :param apiuser: This is filled automatically from the |authtoken|.
64 64 :type apiuser: AuthUser
65 65 :param repoid: The repository name or repository id.
66 66 :type repoid: str or int
67 67 :param cache: use the cached value for last changeset
68 68 :type: cache: Optional(bool)
69 69
70 70 Example output:
71 71
72 72 .. code-block:: bash
73 73
74 74 {
75 75 "error": null,
76 76 "id": <repo_id>,
77 77 "result": {
78 78 "clone_uri": null,
79 79 "created_on": "timestamp",
80 80 "description": "repo description",
81 81 "enable_downloads": false,
82 82 "enable_locking": false,
83 83 "enable_statistics": false,
84 84 "followers": [
85 85 {
86 86 "active": true,
87 87 "admin": false,
88 88 "api_key": "****************************************",
89 89 "api_keys": [
90 90 "****************************************"
91 91 ],
92 92 "email": "user@example.com",
93 93 "emails": [
94 94 "user@example.com"
95 95 ],
96 96 "extern_name": "rhodecode",
97 97 "extern_type": "rhodecode",
98 98 "firstname": "username",
99 99 "ip_addresses": [],
100 100 "language": null,
101 101 "last_login": "2015-09-16T17:16:35.854",
102 102 "lastname": "surname",
103 103 "user_id": <user_id>,
104 104 "username": "name"
105 105 }
106 106 ],
107 107 "fork_of": "parent-repo",
108 108 "landing_rev": [
109 109 "rev",
110 110 "tip"
111 111 ],
112 112 "last_changeset": {
113 113 "author": "User <user@example.com>",
114 114 "branch": "default",
115 115 "date": "timestamp",
116 116 "message": "last commit message",
117 117 "parents": [
118 118 {
119 119 "raw_id": "commit-id"
120 120 }
121 121 ],
122 122 "raw_id": "commit-id",
123 123 "revision": <revision number>,
124 124 "short_id": "short id"
125 125 },
126 126 "lock_reason": null,
127 127 "locked_by": null,
128 128 "locked_date": null,
129 129 "members": [
130 130 {
131 131 "name": "super-admin-name",
132 132 "origin": "super-admin",
133 133 "permission": "repository.admin",
134 134 "type": "user"
135 135 },
136 136 {
137 137 "name": "owner-name",
138 138 "origin": "owner",
139 139 "permission": "repository.admin",
140 140 "type": "user"
141 141 },
142 142 {
143 143 "name": "user-group-name",
144 144 "origin": "permission",
145 145 "permission": "repository.write",
146 146 "type": "user_group"
147 147 }
148 148 ],
149 149 "owner": "owner-name",
150 150 "permissions": [
151 151 {
152 152 "name": "super-admin-name",
153 153 "origin": "super-admin",
154 154 "permission": "repository.admin",
155 155 "type": "user"
156 156 },
157 157 {
158 158 "name": "owner-name",
159 159 "origin": "owner",
160 160 "permission": "repository.admin",
161 161 "type": "user"
162 162 },
163 163 {
164 164 "name": "user-group-name",
165 165 "origin": "permission",
166 166 "permission": "repository.write",
167 167 "type": "user_group"
168 168 }
169 169 ],
170 170 "private": true,
171 171 "repo_id": 676,
172 172 "repo_name": "user-group/repo-name",
173 173 "repo_type": "hg"
174 174 }
175 175 }
176 176 """
177 177
178 178 repo = get_repo_or_error(repoid)
179 179 cache = Optional.extract(cache)
180 180 include_secrets = False
181 181 if has_superadmin_permission(apiuser):
182 182 include_secrets = True
183 183 else:
184 184 # check if we have at least read permission for this repo !
185 185 _perms = (
186 186 'repository.admin', 'repository.write', 'repository.read',)
187 187 has_repo_permissions(apiuser, repoid, repo, _perms)
188 188
189 189 permissions = []
190 190 for _user in repo.permissions():
191 191 user_data = {
192 192 'name': _user.username,
193 193 'permission': _user.permission,
194 194 'origin': get_origin(_user),
195 195 'type': "user",
196 196 }
197 197 permissions.append(user_data)
198 198
199 199 for _user_group in repo.permission_user_groups():
200 200 user_group_data = {
201 201 'name': _user_group.users_group_name,
202 202 'permission': _user_group.permission,
203 203 'origin': get_origin(_user_group),
204 204 'type': "user_group",
205 205 }
206 206 permissions.append(user_group_data)
207 207
208 208 following_users = [
209 209 user.user.get_api_data(include_secrets=include_secrets)
210 210 for user in repo.followers]
211 211
212 212 if not cache:
213 213 repo.update_commit_cache()
214 214 data = repo.get_api_data(include_secrets=include_secrets)
215 215 data['members'] = permissions # TODO: this should be deprecated soon
216 216 data['permissions'] = permissions
217 217 data['followers'] = following_users
218 218 return data
219 219
220 220
221 221 @jsonrpc_method()
222 222 def get_repos(request, apiuser):
223 223 """
224 224 Lists all existing repositories.
225 225
226 226 This command can only be run using an |authtoken| with admin rights,
227 227 or users with at least read rights to |repos|.
228 228
229 229 :param apiuser: This is filled automatically from the |authtoken|.
230 230 :type apiuser: AuthUser
231 231
232 232 Example output:
233 233
234 234 .. code-block:: bash
235 235
236 236 id : <id_given_in_input>
237 237 result: [
238 238 {
239 239 "repo_id" : "<repo_id>",
240 240 "repo_name" : "<reponame>"
241 241 "repo_type" : "<repo_type>",
242 242 "clone_uri" : "<clone_uri>",
243 243 "private": : "<bool>",
244 244 "created_on" : "<datetimecreated>",
245 245 "description" : "<description>",
246 246 "landing_rev": "<landing_rev>",
247 247 "owner": "<repo_owner>",
248 248 "fork_of": "<name_of_fork_parent>",
249 249 "enable_downloads": "<bool>",
250 250 "enable_locking": "<bool>",
251 251 "enable_statistics": "<bool>",
252 252 },
253 253 ...
254 254 ]
255 255 error: null
256 256 """
257 257
258 258 include_secrets = has_superadmin_permission(apiuser)
259 259 _perms = ('repository.read', 'repository.write', 'repository.admin',)
260 260 extras = {'user': apiuser}
261 261
262 262 repo_list = RepoList(
263 263 RepoModel().get_all(), perm_set=_perms, extra_kwargs=extras)
264 264 return [repo.get_api_data(include_secrets=include_secrets)
265 265 for repo in repo_list]
266 266
267 267
268 268 @jsonrpc_method()
269 269 def get_repo_changeset(request, apiuser, repoid, revision,
270 270 details=Optional('basic')):
271 271 """
272 272 Returns information about a changeset.
273 273
274 274 Additionally parameters define the amount of details returned by
275 275 this function.
276 276
277 277 This command can only be run using an |authtoken| with admin rights,
278 278 or users with at least read rights to the |repo|.
279 279
280 280 :param apiuser: This is filled automatically from the |authtoken|.
281 281 :type apiuser: AuthUser
282 282 :param repoid: The repository name or repository id
283 283 :type repoid: str or int
284 284 :param revision: revision for which listing should be done
285 285 :type revision: str
286 286 :param details: details can be 'basic|extended|full' full gives diff
287 287 info details like the diff itself, and number of changed files etc.
288 288 :type details: Optional(str)
289 289
290 290 """
291 291 repo = get_repo_or_error(repoid)
292 292 if not has_superadmin_permission(apiuser):
293 293 _perms = (
294 294 'repository.admin', 'repository.write', 'repository.read',)
295 295 has_repo_permissions(apiuser, repoid, repo, _perms)
296 296
297 297 changes_details = Optional.extract(details)
298 298 _changes_details_types = ['basic', 'extended', 'full']
299 299 if changes_details not in _changes_details_types:
300 300 raise JSONRPCError(
301 301 'ret_type must be one of %s' % (
302 302 ','.join(_changes_details_types)))
303 303
304 304 pre_load = ['author', 'branch', 'date', 'message', 'parents',
305 305 'status', '_commit', '_file_paths']
306 306
307 307 try:
308 308 cs = repo.get_commit(commit_id=revision, pre_load=pre_load)
309 309 except TypeError as e:
310 310 raise JSONRPCError(e.message)
311 311 _cs_json = cs.__json__()
312 312 _cs_json['diff'] = build_commit_data(cs, changes_details)
313 313 if changes_details == 'full':
314 314 _cs_json['refs'] = {
315 315 'branches': [cs.branch],
316 316 'bookmarks': getattr(cs, 'bookmarks', []),
317 317 'tags': cs.tags
318 318 }
319 319 return _cs_json
320 320
321 321
322 322 @jsonrpc_method()
323 323 def get_repo_changesets(request, apiuser, repoid, start_rev, limit,
324 324 details=Optional('basic')):
325 325 """
326 Returns a set of changesets limited by the number of commits starting
326 Returns a set of commits limited by the number starting
327 327 from the `start_rev` option.
328 328
329 329 Additional parameters define the amount of details returned by this
330 330 function.
331 331
332 332 This command can only be run using an |authtoken| with admin rights,
333 333 or users with at least read rights to |repos|.
334 334
335 335 :param apiuser: This is filled automatically from the |authtoken|.
336 336 :type apiuser: AuthUser
337 337 :param repoid: The repository name or repository ID.
338 338 :type repoid: str or int
339 339 :param start_rev: The starting revision from where to get changesets.
340 340 :type start_rev: str
341 :param limit: Limit the number of changesets to this amount
341 :param limit: Limit the number of commits to this amount
342 342 :type limit: str or int
343 343 :param details: Set the level of detail returned. Valid option are:
344 344 ``basic``, ``extended`` and ``full``.
345 345 :type details: Optional(str)
346 346
347 347 .. note::
348 348
349 349 Setting the parameter `details` to the value ``full`` is extensive
350 350 and returns details like the diff itself, and the number
351 351 of changed files.
352 352
353 353 """
354 354 repo = get_repo_or_error(repoid)
355 355 if not has_superadmin_permission(apiuser):
356 356 _perms = (
357 357 'repository.admin', 'repository.write', 'repository.read',)
358 358 has_repo_permissions(apiuser, repoid, repo, _perms)
359 359
360 360 changes_details = Optional.extract(details)
361 361 _changes_details_types = ['basic', 'extended', 'full']
362 362 if changes_details not in _changes_details_types:
363 363 raise JSONRPCError(
364 364 'ret_type must be one of %s' % (
365 365 ','.join(_changes_details_types)))
366 366
367 367 limit = int(limit)
368 368 pre_load = ['author', 'branch', 'date', 'message', 'parents',
369 369 'status', '_commit', '_file_paths']
370 370
371 371 vcs_repo = repo.scm_instance()
372 372 # SVN needs a special case to distinguish its index and commit id
373 if vcs_repo.alias == 'svn' and (start_rev == '0'):
373 if vcs_repo and vcs_repo.alias == 'svn' and (start_rev == '0'):
374 374 start_rev = vcs_repo.commit_ids[0]
375 375
376 376 try:
377 commits = repo.scm_instance().get_commits(
377 commits = vcs_repo.get_commits(
378 378 start_id=start_rev, pre_load=pre_load)
379 379 except TypeError as e:
380 380 raise JSONRPCError(e.message)
381 except Exception:
382 log.exception('Fetching of commits failed')
383 raise JSONRPCError('Error occurred during commit fetching')
381 384
382 385 ret = []
383 386 for cnt, commit in enumerate(commits):
384 387 if cnt >= limit != -1:
385 388 break
386 389 _cs_json = commit.__json__()
387 390 _cs_json['diff'] = build_commit_data(commit, changes_details)
388 391 if changes_details == 'full':
389 392 _cs_json['refs'] = {
390 393 'branches': [commit.branch],
391 394 'bookmarks': getattr(commit, 'bookmarks', []),
392 395 'tags': commit.tags
393 396 }
394 397 ret.append(_cs_json)
395 398 return ret
396 399
397 400
398 401 @jsonrpc_method()
399 402 def get_repo_nodes(request, apiuser, repoid, revision, root_path,
400 403 ret_type=Optional('all'), details=Optional('basic')):
401 404 """
402 405 Returns a list of nodes and children in a flat list for a given
403 406 path at given revision.
404 407
405 408 It's possible to specify ret_type to show only `files` or `dirs`.
406 409
407 410 This command can only be run using an |authtoken| with admin rights,
408 411 or users with at least read rights to |repos|.
409 412
410 413 :param apiuser: This is filled automatically from the |authtoken|.
411 414 :type apiuser: AuthUser
412 415 :param repoid: The repository name or repository ID.
413 416 :type repoid: str or int
414 417 :param revision: The revision for which listing should be done.
415 418 :type revision: str
416 419 :param root_path: The path from which to start displaying.
417 420 :type root_path: str
418 421 :param ret_type: Set the return type. Valid options are
419 422 ``all`` (default), ``files`` and ``dirs``.
420 423 :type ret_type: Optional(str)
421 424 :param details: Returns extended information about nodes, such as
422 425 md5, binary, and or content. The valid options are ``basic`` and
423 426 ``full``.
424 427 :type details: Optional(str)
425 428
426 429 Example output:
427 430
428 431 .. code-block:: bash
429 432
430 433 id : <id_given_in_input>
431 434 result: [
432 435 {
433 436 "name" : "<name>"
434 437 "type" : "<type>",
435 438 "binary": "<true|false>" (only in extended mode)
436 439 "md5" : "<md5 of file content>" (only in extended mode)
437 440 },
438 441 ...
439 442 ]
440 443 error: null
441 444 """
442 445
443 446 repo = get_repo_or_error(repoid)
444 447 if not has_superadmin_permission(apiuser):
445 448 _perms = (
446 449 'repository.admin', 'repository.write', 'repository.read',)
447 450 has_repo_permissions(apiuser, repoid, repo, _perms)
448 451
449 452 ret_type = Optional.extract(ret_type)
450 453 details = Optional.extract(details)
451 454 _extended_types = ['basic', 'full']
452 455 if details not in _extended_types:
453 456 raise JSONRPCError(
454 457 'ret_type must be one of %s' % (','.join(_extended_types)))
455 458 extended_info = False
456 459 content = False
457 460 if details == 'basic':
458 461 extended_info = True
459 462
460 463 if details == 'full':
461 464 extended_info = content = True
462 465
463 466 _map = {}
464 467 try:
465 468 # check if repo is not empty by any chance, skip quicker if it is.
466 469 _scm = repo.scm_instance()
467 470 if _scm.is_empty():
468 471 return []
469 472
470 473 _d, _f = ScmModel().get_nodes(
471 474 repo, revision, root_path, flat=False,
472 475 extended_info=extended_info, content=content)
473 476 _map = {
474 477 'all': _d + _f,
475 478 'files': _f,
476 479 'dirs': _d,
477 480 }
478 481 return _map[ret_type]
479 482 except KeyError:
480 483 raise JSONRPCError(
481 484 'ret_type must be one of %s' % (','.join(sorted(_map.keys()))))
482 485 except Exception:
483 486 log.exception("Exception occurred while trying to get repo nodes")
484 487 raise JSONRPCError(
485 488 'failed to get repo: `%s` nodes' % repo.repo_name
486 489 )
487 490
488 491
489 492 @jsonrpc_method()
490 493 def get_repo_refs(request, apiuser, repoid):
491 494 """
492 495 Returns a dictionary of current references. It returns
493 496 bookmarks, branches, closed_branches, and tags for given repository
494 497
495 498 It's possible to specify ret_type to show only `files` or `dirs`.
496 499
497 500 This command can only be run using an |authtoken| with admin rights,
498 501 or users with at least read rights to |repos|.
499 502
500 503 :param apiuser: This is filled automatically from the |authtoken|.
501 504 :type apiuser: AuthUser
502 505 :param repoid: The repository name or repository ID.
503 506 :type repoid: str or int
504 507
505 508 Example output:
506 509
507 510 .. code-block:: bash
508 511
509 512 id : <id_given_in_input>
510 513 result: [
511 514 TODO...
512 515 ]
513 516 error: null
514 517 """
515 518
516 519 repo = get_repo_or_error(repoid)
517 520 if not has_superadmin_permission(apiuser):
518 521 _perms = ('repository.admin', 'repository.write', 'repository.read',)
519 522 has_repo_permissions(apiuser, repoid, repo, _perms)
520 523
521 524 try:
522 525 # check if repo is not empty by any chance, skip quicker if it is.
523 526 vcs_instance = repo.scm_instance()
524 527 refs = vcs_instance.refs()
525 528 return refs
526 529 except Exception:
527 530 log.exception("Exception occurred while trying to get repo refs")
528 531 raise JSONRPCError(
529 532 'failed to get repo: `%s` references' % repo.repo_name
530 533 )
531 534
532 535
533 536 @jsonrpc_method()
534 537 def create_repo(request, apiuser, repo_name, repo_type,
535 538 owner=Optional(OAttr('apiuser')), description=Optional(''),
536 539 private=Optional(False), clone_uri=Optional(None),
537 540 landing_rev=Optional('rev:tip'),
538 541 enable_statistics=Optional(False),
539 542 enable_locking=Optional(False),
540 543 enable_downloads=Optional(False),
541 544 copy_permissions=Optional(False)):
542 545 """
543 546 Creates a repository.
544 547
545 548 * If the repository name contains "/", all the required repository
546 549 groups will be created.
547 550
548 551 For example "foo/bar/baz" will create |repo| groups "foo" and "bar"
549 552 (with "foo" as parent). It will also create the "baz" repository
550 553 with "bar" as |repo| group.
551 554
552 555 This command can only be run using an |authtoken| with at least
553 556 write permissions to the |repo|.
554 557
555 558 :param apiuser: This is filled automatically from the |authtoken|.
556 559 :type apiuser: AuthUser
557 560 :param repo_name: Set the repository name.
558 561 :type repo_name: str
559 562 :param repo_type: Set the repository type; 'hg','git', or 'svn'.
560 563 :type repo_type: str
561 564 :param owner: user_id or username
562 565 :type owner: Optional(str)
563 566 :param description: Set the repository description.
564 567 :type description: Optional(str)
565 568 :param private:
566 569 :type private: bool
567 570 :param clone_uri:
568 571 :type clone_uri: str
569 572 :param landing_rev: <rev_type>:<rev>
570 573 :type landing_rev: str
571 574 :param enable_locking:
572 575 :type enable_locking: bool
573 576 :param enable_downloads:
574 577 :type enable_downloads: bool
575 578 :param enable_statistics:
576 579 :type enable_statistics: bool
577 580 :param copy_permissions: Copy permission from group in which the
578 581 repository is being created.
579 582 :type copy_permissions: bool
580 583
581 584
582 585 Example output:
583 586
584 587 .. code-block:: bash
585 588
586 589 id : <id_given_in_input>
587 590 result: {
588 591 "msg": "Created new repository `<reponame>`",
589 592 "success": true,
590 593 "task": "<celery task id or None if done sync>"
591 594 }
592 595 error: null
593 596
594 597
595 598 Example error output:
596 599
597 600 .. code-block:: bash
598 601
599 602 id : <id_given_in_input>
600 603 result : null
601 604 error : {
602 605 'failed to create repository `<repo_name>`
603 606 }
604 607
605 608 """
606 609 schema = RepoSchema()
607 610 try:
608 611 data = schema.deserialize({
609 612 'repo_name': repo_name
610 613 })
611 614 except colander.Invalid as e:
612 615 raise JSONRPCError("Validation failed: %s" % (e.asdict(),))
613 616 repo_name = data['repo_name']
614 617
615 618 (repo_name_cleaned,
616 619 parent_group_name) = RepoGroupModel()._get_group_name_and_parent(
617 620 repo_name)
618 621
619 622 if not HasPermissionAnyApi(
620 623 'hg.admin', 'hg.create.repository')(user=apiuser):
621 624 # check if we have admin permission for this repo group if given !
622 625
623 626 if parent_group_name:
624 627 repogroupid = parent_group_name
625 628 repo_group = get_repo_group_or_error(parent_group_name)
626 629
627 630 _perms = ('group.admin',)
628 631 if not HasRepoGroupPermissionAnyApi(*_perms)(
629 632 user=apiuser, group_name=repo_group.group_name):
630 633 raise JSONRPCError(
631 634 'repository group `%s` does not exist' % (
632 635 repogroupid,))
633 636 else:
634 637 raise JSONRPCForbidden()
635 638
636 639 if not has_superadmin_permission(apiuser):
637 640 if not isinstance(owner, Optional):
638 641 # forbid setting owner for non-admins
639 642 raise JSONRPCError(
640 643 'Only RhodeCode admin can specify `owner` param')
641 644
642 645 if isinstance(owner, Optional):
643 646 owner = apiuser.user_id
644 647
645 648 owner = get_user_or_error(owner)
646 649
647 650 if RepoModel().get_by_repo_name(repo_name):
648 651 raise JSONRPCError("repo `%s` already exist" % repo_name)
649 652
650 653 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
651 654 if isinstance(private, Optional):
652 655 private = defs.get('repo_private') or Optional.extract(private)
653 656 if isinstance(repo_type, Optional):
654 657 repo_type = defs.get('repo_type')
655 658 if isinstance(enable_statistics, Optional):
656 659 enable_statistics = defs.get('repo_enable_statistics')
657 660 if isinstance(enable_locking, Optional):
658 661 enable_locking = defs.get('repo_enable_locking')
659 662 if isinstance(enable_downloads, Optional):
660 663 enable_downloads = defs.get('repo_enable_downloads')
661 664
662 665 clone_uri = Optional.extract(clone_uri)
663 666 description = Optional.extract(description)
664 667 landing_rev = Optional.extract(landing_rev)
665 668 copy_permissions = Optional.extract(copy_permissions)
666 669
667 670 try:
668 671 # create structure of groups and return the last group
669 672 repo_group = map_groups(repo_name)
670 673 data = {
671 674 'repo_name': repo_name_cleaned,
672 675 'repo_name_full': repo_name,
673 676 'repo_type': repo_type,
674 677 'repo_description': description,
675 678 'owner': owner,
676 679 'repo_private': private,
677 680 'clone_uri': clone_uri,
678 681 'repo_group': repo_group.group_id if repo_group else None,
679 682 'repo_landing_rev': landing_rev,
680 683 'enable_statistics': enable_statistics,
681 684 'enable_locking': enable_locking,
682 685 'enable_downloads': enable_downloads,
683 686 'repo_copy_permissions': copy_permissions,
684 687 }
685 688
686 689 if repo_type not in BACKENDS.keys():
687 690 raise Exception("Invalid backend type %s" % repo_type)
688 691 task = RepoModel().create(form_data=data, cur_user=owner)
689 692 from celery.result import BaseAsyncResult
690 693 task_id = None
691 694 if isinstance(task, BaseAsyncResult):
692 695 task_id = task.task_id
693 696 # no commit, it's done in RepoModel, or async via celery
694 697 return {
695 698 'msg': "Created new repository `%s`" % (repo_name,),
696 699 'success': True, # cannot return the repo data here since fork
697 700 # cann be done async
698 701 'task': task_id
699 702 }
700 703 except Exception:
701 704 log.exception(
702 705 u"Exception while trying to create the repository %s",
703 706 repo_name)
704 707 raise JSONRPCError(
705 708 'failed to create repository `%s`' % (repo_name,))
706 709
707 710
708 711 @jsonrpc_method()
709 712 def add_field_to_repo(request, apiuser, repoid, key, label=Optional(''),
710 713 description=Optional('')):
711 714 """
712 715 Adds an extra field to a repository.
713 716
714 717 This command can only be run using an |authtoken| with at least
715 718 write permissions to the |repo|.
716 719
717 720 :param apiuser: This is filled automatically from the |authtoken|.
718 721 :type apiuser: AuthUser
719 722 :param repoid: Set the repository name or repository id.
720 723 :type repoid: str or int
721 724 :param key: Create a unique field key for this repository.
722 725 :type key: str
723 726 :param label:
724 727 :type label: Optional(str)
725 728 :param description:
726 729 :type description: Optional(str)
727 730 """
728 731 repo = get_repo_or_error(repoid)
729 732 if not has_superadmin_permission(apiuser):
730 733 _perms = ('repository.admin',)
731 734 has_repo_permissions(apiuser, repoid, repo, _perms)
732 735
733 736 label = Optional.extract(label) or key
734 737 description = Optional.extract(description)
735 738
736 739 field = RepositoryField.get_by_key_name(key, repo)
737 740 if field:
738 741 raise JSONRPCError('Field with key '
739 742 '`%s` exists for repo `%s`' % (key, repoid))
740 743
741 744 try:
742 745 RepoModel().add_repo_field(repo, key, field_label=label,
743 746 field_desc=description)
744 747 Session().commit()
745 748 return {
746 749 'msg': "Added new repository field `%s`" % (key,),
747 750 'success': True,
748 751 }
749 752 except Exception:
750 753 log.exception("Exception occurred while trying to add field to repo")
751 754 raise JSONRPCError(
752 755 'failed to create new field for repository `%s`' % (repoid,))
753 756
754 757
755 758 @jsonrpc_method()
756 759 def remove_field_from_repo(request, apiuser, repoid, key):
757 760 """
758 761 Removes an extra field from a repository.
759 762
760 763 This command can only be run using an |authtoken| with at least
761 764 write permissions to the |repo|.
762 765
763 766 :param apiuser: This is filled automatically from the |authtoken|.
764 767 :type apiuser: AuthUser
765 768 :param repoid: Set the repository name or repository ID.
766 769 :type repoid: str or int
767 770 :param key: Set the unique field key for this repository.
768 771 :type key: str
769 772 """
770 773
771 774 repo = get_repo_or_error(repoid)
772 775 if not has_superadmin_permission(apiuser):
773 776 _perms = ('repository.admin',)
774 777 has_repo_permissions(apiuser, repoid, repo, _perms)
775 778
776 779 field = RepositoryField.get_by_key_name(key, repo)
777 780 if not field:
778 781 raise JSONRPCError('Field with key `%s` does not '
779 782 'exists for repo `%s`' % (key, repoid))
780 783
781 784 try:
782 785 RepoModel().delete_repo_field(repo, field_key=key)
783 786 Session().commit()
784 787 return {
785 788 'msg': "Deleted repository field `%s`" % (key,),
786 789 'success': True,
787 790 }
788 791 except Exception:
789 792 log.exception(
790 793 "Exception occurred while trying to delete field from repo")
791 794 raise JSONRPCError(
792 795 'failed to delete field for repository `%s`' % (repoid,))
793 796
794 797
795 798 @jsonrpc_method()
796 799 def update_repo(request, apiuser, repoid, name=Optional(None),
797 800 owner=Optional(OAttr('apiuser')),
798 801 group=Optional(None),
799 802 fork_of=Optional(None),
800 803 description=Optional(''), private=Optional(False),
801 804 clone_uri=Optional(None), landing_rev=Optional('rev:tip'),
802 805 enable_statistics=Optional(False),
803 806 enable_locking=Optional(False),
804 807 enable_downloads=Optional(False),
805 808 fields=Optional('')):
806 809 """
807 810 Updates a repository with the given information.
808 811
809 812 This command can only be run using an |authtoken| with at least
810 813 write permissions to the |repo|.
811 814
812 815 :param apiuser: This is filled automatically from the |authtoken|.
813 816 :type apiuser: AuthUser
814 817 :param repoid: repository name or repository ID.
815 818 :type repoid: str or int
816 819 :param name: Update the |repo| name.
817 820 :type name: str
818 821 :param owner: Set the |repo| owner.
819 822 :type owner: str
820 823 :param group: Set the |repo| group the |repo| belongs to.
821 824 :type group: str
822 825 :param fork_of: Set the master |repo| name.
823 826 :type fork_of: str
824 827 :param description: Update the |repo| description.
825 828 :type description: str
826 829 :param private: Set the |repo| as private. (True | False)
827 830 :type private: bool
828 831 :param clone_uri: Update the |repo| clone URI.
829 832 :type clone_uri: str
830 833 :param landing_rev: Set the |repo| landing revision. Default is
831 834 ``tip``.
832 835 :type landing_rev: str
833 836 :param enable_statistics: Enable statistics on the |repo|,
834 837 (True | False).
835 838 :type enable_statistics: bool
836 839 :param enable_locking: Enable |repo| locking.
837 840 :type enable_locking: bool
838 841 :param enable_downloads: Enable downloads from the |repo|,
839 842 (True | False).
840 843 :type enable_downloads: bool
841 844 :param fields: Add extra fields to the |repo|. Use the following
842 845 example format: ``field_key=field_val,field_key2=fieldval2``.
843 846 Escape ', ' with \,
844 847 :type fields: str
845 848 """
846 849 repo = get_repo_or_error(repoid)
847 850 include_secrets = False
848 851 if has_superadmin_permission(apiuser):
849 852 include_secrets = True
850 853 else:
851 854 _perms = ('repository.admin',)
852 855 has_repo_permissions(apiuser, repoid, repo, _perms)
853 856
854 857 updates = {
855 858 # update function requires this.
856 859 'repo_name': repo.just_name
857 860 }
858 861 repo_group = group
859 862 if not isinstance(repo_group, Optional):
860 863 repo_group = get_repo_group_or_error(repo_group)
861 864 repo_group = repo_group.group_id
862 865
863 866 repo_fork_of = fork_of
864 867 if not isinstance(repo_fork_of, Optional):
865 868 repo_fork_of = get_repo_or_error(repo_fork_of)
866 869 repo_fork_of = repo_fork_of.repo_id
867 870
868 871 try:
869 872 store_update(updates, name, 'repo_name')
870 873 store_update(updates, repo_group, 'repo_group')
871 874 store_update(updates, repo_fork_of, 'fork_id')
872 875 store_update(updates, owner, 'user')
873 876 store_update(updates, description, 'repo_description')
874 877 store_update(updates, private, 'repo_private')
875 878 store_update(updates, clone_uri, 'clone_uri')
876 879 store_update(updates, landing_rev, 'repo_landing_rev')
877 880 store_update(updates, enable_statistics, 'repo_enable_statistics')
878 881 store_update(updates, enable_locking, 'repo_enable_locking')
879 882 store_update(updates, enable_downloads, 'repo_enable_downloads')
880 883
881 884 # extra fields
882 885 fields = parse_args(Optional.extract(fields), key_prefix='ex_')
883 886 if fields:
884 887 updates.update(fields)
885 888
886 889 RepoModel().update(repo, **updates)
887 890 Session().commit()
888 891 return {
889 892 'msg': 'updated repo ID:%s %s' % (
890 893 repo.repo_id, repo.repo_name),
891 894 'repository': repo.get_api_data(
892 895 include_secrets=include_secrets)
893 896 }
894 897 except Exception:
895 898 log.exception(
896 899 u"Exception while trying to update the repository %s",
897 900 repoid)
898 901 raise JSONRPCError('failed to update repo `%s`' % repoid)
899 902
900 903
901 904 @jsonrpc_method()
902 905 def fork_repo(request, apiuser, repoid, fork_name,
903 906 owner=Optional(OAttr('apiuser')),
904 907 description=Optional(''), copy_permissions=Optional(False),
905 908 private=Optional(False), landing_rev=Optional('rev:tip')):
906 909 """
907 910 Creates a fork of the specified |repo|.
908 911
909 912 * If using |RCE| with Celery this will immediately return a success
910 913 message, even though the fork will be created asynchronously.
911 914
912 915 This command can only be run using an |authtoken| with fork
913 916 permissions on the |repo|.
914 917
915 918 :param apiuser: This is filled automatically from the |authtoken|.
916 919 :type apiuser: AuthUser
917 920 :param repoid: Set repository name or repository ID.
918 921 :type repoid: str or int
919 922 :param fork_name: Set the fork name.
920 923 :type fork_name: str
921 924 :param owner: Set the fork owner.
922 925 :type owner: str
923 926 :param description: Set the fork descripton.
924 927 :type description: str
925 928 :param copy_permissions: Copy permissions from parent |repo|. The
926 929 default is False.
927 930 :type copy_permissions: bool
928 931 :param private: Make the fork private. The default is False.
929 932 :type private: bool
930 933 :param landing_rev: Set the landing revision. The default is tip.
931 934
932 935 Example output:
933 936
934 937 .. code-block:: bash
935 938
936 939 id : <id_for_response>
937 940 api_key : "<api_key>"
938 941 args: {
939 942 "repoid" : "<reponame or repo_id>",
940 943 "fork_name": "<forkname>",
941 944 "owner": "<username or user_id = Optional(=apiuser)>",
942 945 "description": "<description>",
943 946 "copy_permissions": "<bool>",
944 947 "private": "<bool>",
945 948 "landing_rev": "<landing_rev>"
946 949 }
947 950
948 951 Example error output:
949 952
950 953 .. code-block:: bash
951 954
952 955 id : <id_given_in_input>
953 956 result: {
954 957 "msg": "Created fork of `<reponame>` as `<forkname>`",
955 958 "success": true,
956 959 "task": "<celery task id or None if done sync>"
957 960 }
958 961 error: null
959 962
960 963 """
961 964 if not has_superadmin_permission(apiuser):
962 965 if not HasPermissionAnyApi('hg.fork.repository')(user=apiuser):
963 966 raise JSONRPCForbidden()
964 967
965 968 repo = get_repo_or_error(repoid)
966 969 repo_name = repo.repo_name
967 970
968 971 (fork_name_cleaned,
969 972 parent_group_name) = RepoGroupModel()._get_group_name_and_parent(
970 973 fork_name)
971 974
972 975 if not has_superadmin_permission(apiuser):
973 976 # check if we have at least read permission for
974 977 # this repo that we fork !
975 978 _perms = (
976 979 'repository.admin', 'repository.write', 'repository.read')
977 980 has_repo_permissions(apiuser, repoid, repo, _perms)
978 981
979 982 if not isinstance(owner, Optional):
980 983 # forbid setting owner for non super admins
981 984 raise JSONRPCError(
982 985 'Only RhodeCode admin can specify `owner` param'
983 986 )
984 987 # check if we have a create.repo permission if not maybe the parent
985 988 # group permission
986 989 if not HasPermissionAnyApi('hg.create.repository')(user=apiuser):
987 990 if parent_group_name:
988 991 repogroupid = parent_group_name
989 992 repo_group = get_repo_group_or_error(parent_group_name)
990 993
991 994 _perms = ('group.admin',)
992 995 if not HasRepoGroupPermissionAnyApi(*_perms)(
993 996 user=apiuser, group_name=repo_group.group_name):
994 997 raise JSONRPCError(
995 998 'repository group `%s` does not exist' % (
996 999 repogroupid,))
997 1000 else:
998 1001 raise JSONRPCForbidden()
999 1002
1000 1003 _repo = RepoModel().get_by_repo_name(fork_name)
1001 1004 if _repo:
1002 1005 type_ = 'fork' if _repo.fork else 'repo'
1003 1006 raise JSONRPCError("%s `%s` already exist" % (type_, fork_name))
1004 1007
1005 1008 if isinstance(owner, Optional):
1006 1009 owner = apiuser.user_id
1007 1010
1008 1011 owner = get_user_or_error(owner)
1009 1012
1010 1013 try:
1011 1014 # create structure of groups and return the last group
1012 1015 repo_group = map_groups(fork_name)
1013 1016 form_data = {
1014 1017 'repo_name': fork_name_cleaned,
1015 1018 'repo_name_full': fork_name,
1016 1019 'repo_group': repo_group.group_id if repo_group else None,
1017 1020 'repo_type': repo.repo_type,
1018 1021 'description': Optional.extract(description),
1019 1022 'private': Optional.extract(private),
1020 1023 'copy_permissions': Optional.extract(copy_permissions),
1021 1024 'landing_rev': Optional.extract(landing_rev),
1022 1025 'fork_parent_id': repo.repo_id,
1023 1026 }
1024 1027
1025 1028 task = RepoModel().create_fork(form_data, cur_user=owner)
1026 1029 # no commit, it's done in RepoModel, or async via celery
1027 1030 from celery.result import BaseAsyncResult
1028 1031 task_id = None
1029 1032 if isinstance(task, BaseAsyncResult):
1030 1033 task_id = task.task_id
1031 1034 return {
1032 1035 'msg': 'Created fork of `%s` as `%s`' % (
1033 1036 repo.repo_name, fork_name),
1034 1037 'success': True, # cannot return the repo data here since fork
1035 1038 # can be done async
1036 1039 'task': task_id
1037 1040 }
1038 1041 except Exception:
1039 1042 log.exception("Exception occurred while trying to fork a repo")
1040 1043 raise JSONRPCError(
1041 1044 'failed to fork repository `%s` as `%s`' % (
1042 1045 repo_name, fork_name))
1043 1046
1044 1047
1045 1048 @jsonrpc_method()
1046 1049 def delete_repo(request, apiuser, repoid, forks=Optional('')):
1047 1050 """
1048 1051 Deletes a repository.
1049 1052
1050 1053 * When the `forks` parameter is set it's possible to detach or delete
1051 1054 forks of deleted repository.
1052 1055
1053 1056 This command can only be run using an |authtoken| with admin
1054 1057 permissions on the |repo|.
1055 1058
1056 1059 :param apiuser: This is filled automatically from the |authtoken|.
1057 1060 :type apiuser: AuthUser
1058 1061 :param repoid: Set the repository name or repository ID.
1059 1062 :type repoid: str or int
1060 1063 :param forks: Set to `detach` or `delete` forks from the |repo|.
1061 1064 :type forks: Optional(str)
1062 1065
1063 1066 Example error output:
1064 1067
1065 1068 .. code-block:: bash
1066 1069
1067 1070 id : <id_given_in_input>
1068 1071 result: {
1069 1072 "msg": "Deleted repository `<reponame>`",
1070 1073 "success": true
1071 1074 }
1072 1075 error: null
1073 1076 """
1074 1077
1075 1078 repo = get_repo_or_error(repoid)
1076 1079 if not has_superadmin_permission(apiuser):
1077 1080 _perms = ('repository.admin',)
1078 1081 has_repo_permissions(apiuser, repoid, repo, _perms)
1079 1082
1080 1083 try:
1081 1084 handle_forks = Optional.extract(forks)
1082 1085 _forks_msg = ''
1083 1086 _forks = [f for f in repo.forks]
1084 1087 if handle_forks == 'detach':
1085 1088 _forks_msg = ' ' + 'Detached %s forks' % len(_forks)
1086 1089 elif handle_forks == 'delete':
1087 1090 _forks_msg = ' ' + 'Deleted %s forks' % len(_forks)
1088 1091 elif _forks:
1089 1092 raise JSONRPCError(
1090 1093 'Cannot delete `%s` it still contains attached forks' %
1091 1094 (repo.repo_name,)
1092 1095 )
1093 1096
1094 1097 RepoModel().delete(repo, forks=forks)
1095 1098 Session().commit()
1096 1099 return {
1097 1100 'msg': 'Deleted repository `%s`%s' % (
1098 1101 repo.repo_name, _forks_msg),
1099 1102 'success': True
1100 1103 }
1101 1104 except Exception:
1102 1105 log.exception("Exception occurred while trying to delete repo")
1103 1106 raise JSONRPCError(
1104 1107 'failed to delete repository `%s`' % (repo.repo_name,)
1105 1108 )
1106 1109
1107 1110
1108 1111 #TODO: marcink, change name ?
1109 1112 @jsonrpc_method()
1110 1113 def invalidate_cache(request, apiuser, repoid, delete_keys=Optional(False)):
1111 1114 """
1112 1115 Invalidates the cache for the specified repository.
1113 1116
1114 1117 This command can only be run using an |authtoken| with admin rights to
1115 1118 the specified repository.
1116 1119
1117 1120 This command takes the following options:
1118 1121
1119 1122 :param apiuser: This is filled automatically from |authtoken|.
1120 1123 :type apiuser: AuthUser
1121 1124 :param repoid: Sets the repository name or repository ID.
1122 1125 :type repoid: str or int
1123 1126 :param delete_keys: This deletes the invalidated keys instead of
1124 1127 just flagging them.
1125 1128 :type delete_keys: Optional(``True`` | ``False``)
1126 1129
1127 1130 Example output:
1128 1131
1129 1132 .. code-block:: bash
1130 1133
1131 1134 id : <id_given_in_input>
1132 1135 result : {
1133 1136 'msg': Cache for repository `<repository name>` was invalidated,
1134 1137 'repository': <repository name>
1135 1138 }
1136 1139 error : null
1137 1140
1138 1141 Example error output:
1139 1142
1140 1143 .. code-block:: bash
1141 1144
1142 1145 id : <id_given_in_input>
1143 1146 result : null
1144 1147 error : {
1145 1148 'Error occurred during cache invalidation action'
1146 1149 }
1147 1150
1148 1151 """
1149 1152
1150 1153 repo = get_repo_or_error(repoid)
1151 1154 if not has_superadmin_permission(apiuser):
1152 1155 _perms = ('repository.admin', 'repository.write',)
1153 1156 has_repo_permissions(apiuser, repoid, repo, _perms)
1154 1157
1155 1158 delete = Optional.extract(delete_keys)
1156 1159 try:
1157 1160 ScmModel().mark_for_invalidation(repo.repo_name, delete=delete)
1158 1161 return {
1159 1162 'msg': 'Cache for repository `%s` was invalidated' % (repoid,),
1160 1163 'repository': repo.repo_name
1161 1164 }
1162 1165 except Exception:
1163 1166 log.exception(
1164 1167 "Exception occurred while trying to invalidate repo cache")
1165 1168 raise JSONRPCError(
1166 1169 'Error occurred during cache invalidation action'
1167 1170 )
1168 1171
1169 1172
1170 1173 #TODO: marcink, change name ?
1171 1174 @jsonrpc_method()
1172 1175 def lock(request, apiuser, repoid, locked=Optional(None),
1173 1176 userid=Optional(OAttr('apiuser'))):
1174 1177 """
1175 1178 Sets the lock state of the specified |repo| by the given user.
1176 1179 From more information, see :ref:`repo-locking`.
1177 1180
1178 1181 * If the ``userid`` option is not set, the repository is locked to the
1179 1182 user who called the method.
1180 1183 * If the ``locked`` parameter is not set, the current lock state of the
1181 1184 repository is displayed.
1182 1185
1183 1186 This command can only be run using an |authtoken| with admin rights to
1184 1187 the specified repository.
1185 1188
1186 1189 This command takes the following options:
1187 1190
1188 1191 :param apiuser: This is filled automatically from the |authtoken|.
1189 1192 :type apiuser: AuthUser
1190 1193 :param repoid: Sets the repository name or repository ID.
1191 1194 :type repoid: str or int
1192 1195 :param locked: Sets the lock state.
1193 1196 :type locked: Optional(``True`` | ``False``)
1194 1197 :param userid: Set the repository lock to this user.
1195 1198 :type userid: Optional(str or int)
1196 1199
1197 1200 Example error output:
1198 1201
1199 1202 .. code-block:: bash
1200 1203
1201 1204 id : <id_given_in_input>
1202 1205 result : {
1203 1206 'repo': '<reponame>',
1204 1207 'locked': <bool: lock state>,
1205 1208 'locked_since': <int: lock timestamp>,
1206 1209 'locked_by': <username of person who made the lock>,
1207 1210 'lock_reason': <str: reason for locking>,
1208 1211 'lock_state_changed': <bool: True if lock state has been changed in this request>,
1209 1212 'msg': 'Repo `<reponame>` locked by `<username>` on <timestamp>.'
1210 1213 or
1211 1214 'msg': 'Repo `<repository name>` not locked.'
1212 1215 or
1213 1216 'msg': 'User `<user name>` set lock state for repo `<repository name>` to `<new lock state>`'
1214 1217 }
1215 1218 error : null
1216 1219
1217 1220 Example error output:
1218 1221
1219 1222 .. code-block:: bash
1220 1223
1221 1224 id : <id_given_in_input>
1222 1225 result : null
1223 1226 error : {
1224 1227 'Error occurred locking repository `<reponame>`
1225 1228 }
1226 1229 """
1227 1230
1228 1231 repo = get_repo_or_error(repoid)
1229 1232 if not has_superadmin_permission(apiuser):
1230 1233 # check if we have at least write permission for this repo !
1231 1234 _perms = ('repository.admin', 'repository.write',)
1232 1235 has_repo_permissions(apiuser, repoid, repo, _perms)
1233 1236
1234 1237 # make sure normal user does not pass someone else userid,
1235 1238 # he is not allowed to do that
1236 1239 if not isinstance(userid, Optional) and userid != apiuser.user_id:
1237 1240 raise JSONRPCError('userid is not the same as your user')
1238 1241
1239 1242 if isinstance(userid, Optional):
1240 1243 userid = apiuser.user_id
1241 1244
1242 1245 user = get_user_or_error(userid)
1243 1246
1244 1247 if isinstance(locked, Optional):
1245 1248 lockobj = repo.locked
1246 1249
1247 1250 if lockobj[0] is None:
1248 1251 _d = {
1249 1252 'repo': repo.repo_name,
1250 1253 'locked': False,
1251 1254 'locked_since': None,
1252 1255 'locked_by': None,
1253 1256 'lock_reason': None,
1254 1257 'lock_state_changed': False,
1255 1258 'msg': 'Repo `%s` not locked.' % repo.repo_name
1256 1259 }
1257 1260 return _d
1258 1261 else:
1259 1262 _user_id, _time, _reason = lockobj
1260 1263 lock_user = get_user_or_error(userid)
1261 1264 _d = {
1262 1265 'repo': repo.repo_name,
1263 1266 'locked': True,
1264 1267 'locked_since': _time,
1265 1268 'locked_by': lock_user.username,
1266 1269 'lock_reason': _reason,
1267 1270 'lock_state_changed': False,
1268 1271 'msg': ('Repo `%s` locked by `%s` on `%s`.'
1269 1272 % (repo.repo_name, lock_user.username,
1270 1273 json.dumps(time_to_datetime(_time))))
1271 1274 }
1272 1275 return _d
1273 1276
1274 1277 # force locked state through a flag
1275 1278 else:
1276 1279 locked = str2bool(locked)
1277 1280 lock_reason = Repository.LOCK_API
1278 1281 try:
1279 1282 if locked:
1280 1283 lock_time = time.time()
1281 1284 Repository.lock(repo, user.user_id, lock_time, lock_reason)
1282 1285 else:
1283 1286 lock_time = None
1284 1287 Repository.unlock(repo)
1285 1288 _d = {
1286 1289 'repo': repo.repo_name,
1287 1290 'locked': locked,
1288 1291 'locked_since': lock_time,
1289 1292 'locked_by': user.username,
1290 1293 'lock_reason': lock_reason,
1291 1294 'lock_state_changed': True,
1292 1295 'msg': ('User `%s` set lock state for repo `%s` to `%s`'
1293 1296 % (user.username, repo.repo_name, locked))
1294 1297 }
1295 1298 return _d
1296 1299 except Exception:
1297 1300 log.exception(
1298 1301 "Exception occurred while trying to lock repository")
1299 1302 raise JSONRPCError(
1300 1303 'Error occurred locking repository `%s`' % repo.repo_name
1301 1304 )
1302 1305
1303 1306
1304 1307 @jsonrpc_method()
1305 1308 def comment_commit(
1306 1309 request, apiuser, repoid, commit_id, message,
1307 1310 userid=Optional(OAttr('apiuser')), status=Optional(None)):
1308 1311 """
1309 1312 Set a commit comment, and optionally change the status of the commit.
1310 1313 This command can be executed only using api_key belonging to user
1311 1314 with admin rights, or repository administrator.
1312 1315
1313 1316 :param apiuser: This is filled automatically from the |authtoken|.
1314 1317 :type apiuser: AuthUser
1315 1318 :param repoid: Set the repository name or repository ID.
1316 1319 :type repoid: str or int
1317 1320 :param commit_id: Specify the commit_id for which to set a comment.
1318 1321 :type commit_id: str
1319 1322 :param message: The comment text.
1320 1323 :type message: str
1321 1324 :param userid: Set the user name of the comment creator.
1322 1325 :type userid: Optional(str or int)
1323 1326 :param status: status, one of 'not_reviewed', 'approved', 'rejected',
1324 1327 'under_review'
1325 1328 :type status: str
1326 1329
1327 1330 Example error output:
1328 1331
1329 1332 .. code-block:: json
1330 1333
1331 1334 {
1332 1335 "id" : <id_given_in_input>,
1333 1336 "result" : {
1334 1337 "msg": "Commented on commit `<commit_id>` for repository `<repoid>`",
1335 1338 "status_change": null or <status>,
1336 1339 "success": true
1337 1340 },
1338 1341 "error" : null
1339 1342 }
1340 1343
1341 1344 """
1342 1345 repo = get_repo_or_error(repoid)
1343 1346 if not has_superadmin_permission(apiuser):
1344 1347 _perms = ('repository.admin',)
1345 1348 has_repo_permissions(apiuser, repoid, repo, _perms)
1346 1349
1347 1350 if isinstance(userid, Optional):
1348 1351 userid = apiuser.user_id
1349 1352
1350 1353 user = get_user_or_error(userid)
1351 1354 status = Optional.extract(status)
1352 1355
1353 1356 allowed_statuses = [x[0] for x in ChangesetStatus.STATUSES]
1354 1357 if status and status not in allowed_statuses:
1355 1358 raise JSONRPCError('Bad status, must be on '
1356 1359 'of %s got %s' % (allowed_statuses, status,))
1357 1360
1358 1361 try:
1359 1362 rc_config = SettingsModel().get_all_settings()
1360 1363 renderer = rc_config.get('rhodecode_markup_renderer', 'rst')
1361 1364
1362 1365 comm = ChangesetCommentsModel().create(
1363 1366 message, repo, user, revision=commit_id, status_change=status,
1364 1367 renderer=renderer)
1365 1368 if status:
1366 1369 # also do a status change
1367 1370 try:
1368 1371 ChangesetStatusModel().set_status(
1369 1372 repo, status, user, comm, revision=commit_id,
1370 1373 dont_allow_on_closed_pull_request=True
1371 1374 )
1372 1375 except StatusChangeOnClosedPullRequestError:
1373 1376 log.exception(
1374 1377 "Exception occurred while trying to change repo commit status")
1375 1378 msg = ('Changing status on a changeset associated with '
1376 1379 'a closed pull request is not allowed')
1377 1380 raise JSONRPCError(msg)
1378 1381
1379 1382 Session().commit()
1380 1383 return {
1381 1384 'msg': (
1382 1385 'Commented on commit `%s` for repository `%s`' % (
1383 1386 comm.revision, repo.repo_name)),
1384 1387 'status_change': status,
1385 1388 'success': True,
1386 1389 }
1387 1390 except JSONRPCError:
1388 1391 # catch any inside errors, and re-raise them to prevent from
1389 1392 # below global catch to silence them
1390 1393 raise
1391 1394 except Exception:
1392 1395 log.exception("Exception occurred while trying to comment on commit")
1393 1396 raise JSONRPCError(
1394 1397 'failed to set comment on repository `%s`' % (repo.repo_name,)
1395 1398 )
1396 1399
1397 1400
1398 1401 @jsonrpc_method()
1399 1402 def grant_user_permission(request, apiuser, repoid, userid, perm):
1400 1403 """
1401 1404 Grant permissions for the specified user on the given repository,
1402 1405 or update existing permissions if found.
1403 1406
1404 1407 This command can only be run using an |authtoken| with admin
1405 1408 permissions on the |repo|.
1406 1409
1407 1410 :param apiuser: This is filled automatically from the |authtoken|.
1408 1411 :type apiuser: AuthUser
1409 1412 :param repoid: Set the repository name or repository ID.
1410 1413 :type repoid: str or int
1411 1414 :param userid: Set the user name.
1412 1415 :type userid: str
1413 1416 :param perm: Set the user permissions, using the following format
1414 1417 ``(repository.(none|read|write|admin))``
1415 1418 :type perm: str
1416 1419
1417 1420 Example output:
1418 1421
1419 1422 .. code-block:: bash
1420 1423
1421 1424 id : <id_given_in_input>
1422 1425 result: {
1423 1426 "msg" : "Granted perm: `<perm>` for user: `<username>` in repo: `<reponame>`",
1424 1427 "success": true
1425 1428 }
1426 1429 error: null
1427 1430 """
1428 1431
1429 1432 repo = get_repo_or_error(repoid)
1430 1433 user = get_user_or_error(userid)
1431 1434 perm = get_perm_or_error(perm)
1432 1435 if not has_superadmin_permission(apiuser):
1433 1436 _perms = ('repository.admin',)
1434 1437 has_repo_permissions(apiuser, repoid, repo, _perms)
1435 1438
1436 1439 try:
1437 1440
1438 1441 RepoModel().grant_user_permission(repo=repo, user=user, perm=perm)
1439 1442
1440 1443 Session().commit()
1441 1444 return {
1442 1445 'msg': 'Granted perm: `%s` for user: `%s` in repo: `%s`' % (
1443 1446 perm.permission_name, user.username, repo.repo_name
1444 1447 ),
1445 1448 'success': True
1446 1449 }
1447 1450 except Exception:
1448 1451 log.exception(
1449 1452 "Exception occurred while trying edit permissions for repo")
1450 1453 raise JSONRPCError(
1451 1454 'failed to edit permission for user: `%s` in repo: `%s`' % (
1452 1455 userid, repoid
1453 1456 )
1454 1457 )
1455 1458
1456 1459
1457 1460 @jsonrpc_method()
1458 1461 def revoke_user_permission(request, apiuser, repoid, userid):
1459 1462 """
1460 1463 Revoke permission for a user on the specified repository.
1461 1464
1462 1465 This command can only be run using an |authtoken| with admin
1463 1466 permissions on the |repo|.
1464 1467
1465 1468 :param apiuser: This is filled automatically from the |authtoken|.
1466 1469 :type apiuser: AuthUser
1467 1470 :param repoid: Set the repository name or repository ID.
1468 1471 :type repoid: str or int
1469 1472 :param userid: Set the user name of revoked user.
1470 1473 :type userid: str or int
1471 1474
1472 1475 Example error output:
1473 1476
1474 1477 .. code-block:: bash
1475 1478
1476 1479 id : <id_given_in_input>
1477 1480 result: {
1478 1481 "msg" : "Revoked perm for user: `<username>` in repo: `<reponame>`",
1479 1482 "success": true
1480 1483 }
1481 1484 error: null
1482 1485 """
1483 1486
1484 1487 repo = get_repo_or_error(repoid)
1485 1488 user = get_user_or_error(userid)
1486 1489 if not has_superadmin_permission(apiuser):
1487 1490 _perms = ('repository.admin',)
1488 1491 has_repo_permissions(apiuser, repoid, repo, _perms)
1489 1492
1490 1493 try:
1491 1494 RepoModel().revoke_user_permission(repo=repo, user=user)
1492 1495 Session().commit()
1493 1496 return {
1494 1497 'msg': 'Revoked perm for user: `%s` in repo: `%s`' % (
1495 1498 user.username, repo.repo_name
1496 1499 ),
1497 1500 'success': True
1498 1501 }
1499 1502 except Exception:
1500 1503 log.exception(
1501 1504 "Exception occurred while trying revoke permissions to repo")
1502 1505 raise JSONRPCError(
1503 1506 'failed to edit permission for user: `%s` in repo: `%s`' % (
1504 1507 userid, repoid
1505 1508 )
1506 1509 )
1507 1510
1508 1511
1509 1512 @jsonrpc_method()
1510 1513 def grant_user_group_permission(request, apiuser, repoid, usergroupid, perm):
1511 1514 """
1512 1515 Grant permission for a user group on the specified repository,
1513 1516 or update existing permissions.
1514 1517
1515 1518 This command can only be run using an |authtoken| with admin
1516 1519 permissions on the |repo|.
1517 1520
1518 1521 :param apiuser: This is filled automatically from the |authtoken|.
1519 1522 :type apiuser: AuthUser
1520 1523 :param repoid: Set the repository name or repository ID.
1521 1524 :type repoid: str or int
1522 1525 :param usergroupid: Specify the ID of the user group.
1523 1526 :type usergroupid: str or int
1524 1527 :param perm: Set the user group permissions using the following
1525 1528 format: (repository.(none|read|write|admin))
1526 1529 :type perm: str
1527 1530
1528 1531 Example output:
1529 1532
1530 1533 .. code-block:: bash
1531 1534
1532 1535 id : <id_given_in_input>
1533 1536 result : {
1534 1537 "msg" : "Granted perm: `<perm>` for group: `<usersgroupname>` in repo: `<reponame>`",
1535 1538 "success": true
1536 1539
1537 1540 }
1538 1541 error : null
1539 1542
1540 1543 Example error output:
1541 1544
1542 1545 .. code-block:: bash
1543 1546
1544 1547 id : <id_given_in_input>
1545 1548 result : null
1546 1549 error : {
1547 1550 "failed to edit permission for user group: `<usergroup>` in repo `<repo>`'
1548 1551 }
1549 1552
1550 1553 """
1551 1554
1552 1555 repo = get_repo_or_error(repoid)
1553 1556 perm = get_perm_or_error(perm)
1554 1557 if not has_superadmin_permission(apiuser):
1555 1558 _perms = ('repository.admin',)
1556 1559 has_repo_permissions(apiuser, repoid, repo, _perms)
1557 1560
1558 1561 user_group = get_user_group_or_error(usergroupid)
1559 1562 if not has_superadmin_permission(apiuser):
1560 1563 # check if we have at least read permission for this user group !
1561 1564 _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',)
1562 1565 if not HasUserGroupPermissionAnyApi(*_perms)(
1563 1566 user=apiuser, user_group_name=user_group.users_group_name):
1564 1567 raise JSONRPCError(
1565 1568 'user group `%s` does not exist' % (usergroupid,))
1566 1569
1567 1570 try:
1568 1571 RepoModel().grant_user_group_permission(
1569 1572 repo=repo, group_name=user_group, perm=perm)
1570 1573
1571 1574 Session().commit()
1572 1575 return {
1573 1576 'msg': 'Granted perm: `%s` for user group: `%s` in '
1574 1577 'repo: `%s`' % (
1575 1578 perm.permission_name, user_group.users_group_name,
1576 1579 repo.repo_name
1577 1580 ),
1578 1581 'success': True
1579 1582 }
1580 1583 except Exception:
1581 1584 log.exception(
1582 1585 "Exception occurred while trying change permission on repo")
1583 1586 raise JSONRPCError(
1584 1587 'failed to edit permission for user group: `%s` in '
1585 1588 'repo: `%s`' % (
1586 1589 usergroupid, repo.repo_name
1587 1590 )
1588 1591 )
1589 1592
1590 1593
1591 1594 @jsonrpc_method()
1592 1595 def revoke_user_group_permission(request, apiuser, repoid, usergroupid):
1593 1596 """
1594 1597 Revoke the permissions of a user group on a given repository.
1595 1598
1596 1599 This command can only be run using an |authtoken| with admin
1597 1600 permissions on the |repo|.
1598 1601
1599 1602 :param apiuser: This is filled automatically from the |authtoken|.
1600 1603 :type apiuser: AuthUser
1601 1604 :param repoid: Set the repository name or repository ID.
1602 1605 :type repoid: str or int
1603 1606 :param usergroupid: Specify the user group ID.
1604 1607 :type usergroupid: str or int
1605 1608
1606 1609 Example output:
1607 1610
1608 1611 .. code-block:: bash
1609 1612
1610 1613 id : <id_given_in_input>
1611 1614 result: {
1612 1615 "msg" : "Revoked perm for group: `<usersgroupname>` in repo: `<reponame>`",
1613 1616 "success": true
1614 1617 }
1615 1618 error: null
1616 1619 """
1617 1620
1618 1621 repo = get_repo_or_error(repoid)
1619 1622 if not has_superadmin_permission(apiuser):
1620 1623 _perms = ('repository.admin',)
1621 1624 has_repo_permissions(apiuser, repoid, repo, _perms)
1622 1625
1623 1626 user_group = get_user_group_or_error(usergroupid)
1624 1627 if not has_superadmin_permission(apiuser):
1625 1628 # check if we have at least read permission for this user group !
1626 1629 _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',)
1627 1630 if not HasUserGroupPermissionAnyApi(*_perms)(
1628 1631 user=apiuser, user_group_name=user_group.users_group_name):
1629 1632 raise JSONRPCError(
1630 1633 'user group `%s` does not exist' % (usergroupid,))
1631 1634
1632 1635 try:
1633 1636 RepoModel().revoke_user_group_permission(
1634 1637 repo=repo, group_name=user_group)
1635 1638
1636 1639 Session().commit()
1637 1640 return {
1638 1641 'msg': 'Revoked perm for user group: `%s` in repo: `%s`' % (
1639 1642 user_group.users_group_name, repo.repo_name
1640 1643 ),
1641 1644 'success': True
1642 1645 }
1643 1646 except Exception:
1644 1647 log.exception("Exception occurred while trying revoke "
1645 1648 "user group permission on repo")
1646 1649 raise JSONRPCError(
1647 1650 'failed to edit permission for user group: `%s` in '
1648 1651 'repo: `%s`' % (
1649 1652 user_group.users_group_name, repo.repo_name
1650 1653 )
1651 1654 )
1652 1655
1653 1656
1654 1657 @jsonrpc_method()
1655 1658 def pull(request, apiuser, repoid):
1656 1659 """
1657 1660 Triggers a pull on the given repository from a remote location. You
1658 1661 can use this to keep remote repositories up-to-date.
1659 1662
1660 1663 This command can only be run using an |authtoken| with admin
1661 1664 rights to the specified repository. For more information,
1662 1665 see :ref:`config-token-ref`.
1663 1666
1664 1667 This command takes the following options:
1665 1668
1666 1669 :param apiuser: This is filled automatically from the |authtoken|.
1667 1670 :type apiuser: AuthUser
1668 1671 :param repoid: The repository name or repository ID.
1669 1672 :type repoid: str or int
1670 1673
1671 1674 Example output:
1672 1675
1673 1676 .. code-block:: bash
1674 1677
1675 1678 id : <id_given_in_input>
1676 1679 result : {
1677 1680 "msg": "Pulled from `<repository name>`"
1678 1681 "repository": "<repository name>"
1679 1682 }
1680 1683 error : null
1681 1684
1682 1685 Example error output:
1683 1686
1684 1687 .. code-block:: bash
1685 1688
1686 1689 id : <id_given_in_input>
1687 1690 result : null
1688 1691 error : {
1689 1692 "Unable to pull changes from `<reponame>`"
1690 1693 }
1691 1694
1692 1695 """
1693 1696
1694 1697 repo = get_repo_or_error(repoid)
1695 1698 if not has_superadmin_permission(apiuser):
1696 1699 _perms = ('repository.admin',)
1697 1700 has_repo_permissions(apiuser, repoid, repo, _perms)
1698 1701
1699 1702 try:
1700 1703 ScmModel().pull_changes(repo.repo_name, apiuser.username)
1701 1704 return {
1702 1705 'msg': 'Pulled from `%s`' % repo.repo_name,
1703 1706 'repository': repo.repo_name
1704 1707 }
1705 1708 except Exception:
1706 1709 log.exception("Exception occurred while trying to "
1707 1710 "pull changes from remote location")
1708 1711 raise JSONRPCError(
1709 1712 'Unable to pull changes from `%s`' % repo.repo_name
1710 1713 )
1711 1714
1712 1715
1713 1716 @jsonrpc_method()
1714 1717 def strip(request, apiuser, repoid, revision, branch):
1715 1718 """
1716 1719 Strips the given revision from the specified repository.
1717 1720
1718 1721 * This will remove the revision and all of its decendants.
1719 1722
1720 1723 This command can only be run using an |authtoken| with admin rights to
1721 1724 the specified repository.
1722 1725
1723 1726 This command takes the following options:
1724 1727
1725 1728 :param apiuser: This is filled automatically from the |authtoken|.
1726 1729 :type apiuser: AuthUser
1727 1730 :param repoid: The repository name or repository ID.
1728 1731 :type repoid: str or int
1729 1732 :param revision: The revision you wish to strip.
1730 1733 :type revision: str
1731 1734 :param branch: The branch from which to strip the revision.
1732 1735 :type branch: str
1733 1736
1734 1737 Example output:
1735 1738
1736 1739 .. code-block:: bash
1737 1740
1738 1741 id : <id_given_in_input>
1739 1742 result : {
1740 1743 "msg": "'Stripped commit <commit_hash> from repo `<repository name>`'"
1741 1744 "repository": "<repository name>"
1742 1745 }
1743 1746 error : null
1744 1747
1745 1748 Example error output:
1746 1749
1747 1750 .. code-block:: bash
1748 1751
1749 1752 id : <id_given_in_input>
1750 1753 result : null
1751 1754 error : {
1752 1755 "Unable to strip commit <commit_hash> from repo `<repository name>`"
1753 1756 }
1754 1757
1755 1758 """
1756 1759
1757 1760 repo = get_repo_or_error(repoid)
1758 1761 if not has_superadmin_permission(apiuser):
1759 1762 _perms = ('repository.admin',)
1760 1763 has_repo_permissions(apiuser, repoid, repo, _perms)
1761 1764
1762 1765 try:
1763 1766 ScmModel().strip(repo, revision, branch)
1764 1767 return {
1765 1768 'msg': 'Stripped commit %s from repo `%s`' % (
1766 1769 revision, repo.repo_name),
1767 1770 'repository': repo.repo_name
1768 1771 }
1769 1772 except Exception:
1770 1773 log.exception("Exception while trying to strip")
1771 1774 raise JSONRPCError(
1772 1775 'Unable to strip commit %s from repo `%s`' % (
1773 1776 revision, repo.repo_name)
1774 1777 )
@@ -1,85 +1,122 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 import importlib
22 23
23 24 from pkg_resources import iter_entry_points
24 25 from pyramid.authentication import SessionAuthenticationPolicy
25 26
26 27 from rhodecode.authentication.registry import AuthenticationPluginRegistry
27 28 from rhodecode.authentication.routes import root_factory
28 29 from rhodecode.authentication.routes import AuthnRootResource
29 30 from rhodecode.config.routing import ADMIN_PREFIX
31 from rhodecode.model.settings import SettingsModel
32
30 33
31 34 log = logging.getLogger(__name__)
32 35
36 # Plugin ID prefixes to distinct between normal and legacy plugins.
37 plugin_prefix = 'egg:'
38 legacy_plugin_prefix = 'py:'
39
33 40
34 41 # TODO: Currently this is only used to discover the authentication plugins.
35 42 # Later on this may be used in a generic way to look up and include all kinds
36 43 # of supported enterprise plugins. Therefore this has to be moved and
37 44 # refactored to a real 'plugin look up' machinery.
38 45 # TODO: When refactoring this think about splitting it up into distinct
39 46 # discover, load and include phases.
40 47 def _discover_plugins(config, entry_point='enterprise.plugins1'):
41 _discovered_plugins = {}
42
43 48 for ep in iter_entry_points(entry_point):
44 plugin_id = 'egg:{}#{}'.format(ep.dist.project_name, ep.name)
49 plugin_id = '{}{}#{}'.format(
50 plugin_prefix, ep.dist.project_name, ep.name)
45 51 log.debug('Plugin discovered: "%s"', plugin_id)
46 module = ep.load()
47 plugin = module(plugin_id=plugin_id)
48 config.include(plugin.includeme)
52 try:
53 module = ep.load()
54 plugin = module(plugin_id=plugin_id)
55 config.include(plugin.includeme)
56 except Exception as e:
57 log.exception(
58 'Exception while loading authentication plugin '
59 '"{}": {}'.format(plugin_id, e.message))
60
61
62 def _import_legacy_plugin(plugin_id):
63 module_name = plugin_id.split(legacy_plugin_prefix, 1)[-1]
64 module = importlib.import_module(module_name)
65 return module.plugin_factory(plugin_id=plugin_id)
66
49 67
50 return _discovered_plugins
68 def _discover_legacy_plugins(config, prefix=legacy_plugin_prefix):
69 """
70 Function that imports the legacy plugins stored in the 'auth_plugins'
71 setting in database which are using the specified prefix. Normally 'py:' is
72 used for the legacy plugins.
73 """
74 auth_plugins = SettingsModel().get_setting_by_name('auth_plugins')
75 enabled_plugins = auth_plugins.app_settings_value
76 legacy_plugins = [id_ for id_ in enabled_plugins if id_.startswith(prefix)]
77
78 for plugin_id in legacy_plugins:
79 log.debug('Legacy plugin discovered: "%s"', plugin_id)
80 try:
81 plugin = _import_legacy_plugin(plugin_id)
82 config.include(plugin.includeme)
83 except Exception as e:
84 log.exception(
85 'Exception while loading legacy authentication plugin '
86 '"{}": {}'.format(plugin_id, e.message))
51 87
52 88
53 89 def includeme(config):
54 90 # Set authentication policy.
55 91 authn_policy = SessionAuthenticationPolicy()
56 92 config.set_authentication_policy(authn_policy)
57 93
58 94 # Create authentication plugin registry and add it to the pyramid registry.
59 authn_registry = AuthenticationPluginRegistry()
95 authn_registry = AuthenticationPluginRegistry(config.get_settings())
60 96 config.add_directive('add_authn_plugin', authn_registry.add_authn_plugin)
61 97 config.registry.registerUtility(authn_registry)
62 98
63 99 # Create authentication traversal root resource.
64 100 authn_root_resource = root_factory()
65 101 config.add_directive('add_authn_resource',
66 102 authn_root_resource.add_authn_resource)
67 103
68 104 # Add the authentication traversal route.
69 105 config.add_route('auth_home',
70 106 ADMIN_PREFIX + '/auth*traverse',
71 107 factory=root_factory)
72 108 # Add the authentication settings root views.
73 109 config.add_view('rhodecode.authentication.views.AuthSettingsView',
74 110 attr='index',
75 111 request_method='GET',
76 112 route_name='auth_home',
77 113 context=AuthnRootResource)
78 114 config.add_view('rhodecode.authentication.views.AuthSettingsView',
79 115 attr='auth_settings',
80 116 request_method='POST',
81 117 route_name='auth_home',
82 118 context=AuthnRootResource)
83 119
84 120 # Auto discover authentication plugins and include their configuration.
85 121 _discover_plugins(config)
122 _discover_legacy_plugins(config)
@@ -1,739 +1,609 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Authentication modules
23 23 """
24 24
25 25 import logging
26 26 import time
27 27 import traceback
28 import warnings
28 29
29 from authomatic import Authomatic
30 from authomatic.adapters import WebObAdapter
31 from authomatic.providers import oauth2, oauth1
32 from pylons import url
33 from pylons.controllers.util import Response
34 from pylons.i18n.translation import _
35 30 from pyramid.threadlocal import get_current_registry
36 31 from sqlalchemy.ext.hybrid import hybrid_property
37 32
38 import rhodecode.lib.helpers as h
39 33 from rhodecode.authentication.interface import IAuthnPluginRegistry
40 34 from rhodecode.authentication.schema import AuthnPluginSettingsSchemaBase
41 35 from rhodecode.lib import caches
42 36 from rhodecode.lib.auth import PasswordGenerator, _RhodeCodeCryptoBCrypt
43 37 from rhodecode.lib.utils2 import md5_safe, safe_int
44 38 from rhodecode.lib.utils2 import safe_str
45 from rhodecode.model.db import User, ExternalIdentity
39 from rhodecode.model.db import User
46 40 from rhodecode.model.meta import Session
47 41 from rhodecode.model.settings import SettingsModel
48 42 from rhodecode.model.user import UserModel
49 43 from rhodecode.model.user_group import UserGroupModel
50 44
51 45
52 46 log = logging.getLogger(__name__)
53 47
54 48 # auth types that authenticate() function can receive
55 49 VCS_TYPE = 'vcs'
56 50 HTTP_TYPE = 'http'
57 51
58 52
59 53 class LazyFormencode(object):
60 54 def __init__(self, formencode_obj, *args, **kwargs):
61 55 self.formencode_obj = formencode_obj
62 56 self.args = args
63 57 self.kwargs = kwargs
64 58
65 59 def __call__(self, *args, **kwargs):
66 60 from inspect import isfunction
67 61 formencode_obj = self.formencode_obj
68 62 if isfunction(formencode_obj):
69 63 # case we wrap validators into functions
70 64 formencode_obj = self.formencode_obj(*args, **kwargs)
71 65 return formencode_obj(*self.args, **self.kwargs)
72 66
73 67
74 68 class RhodeCodeAuthPluginBase(object):
75 69 # cache the authentication request for N amount of seconds. Some kind
76 70 # of authentication methods are very heavy and it's very efficient to cache
77 71 # the result of a call. If it's set to None (default) cache is off
78 72 AUTH_CACHE_TTL = None
79 73 AUTH_CACHE = {}
80 74
81 75 auth_func_attrs = {
82 76 "username": "unique username",
83 77 "firstname": "first name",
84 78 "lastname": "last name",
85 79 "email": "email address",
86 80 "groups": '["list", "of", "groups"]',
87 81 "extern_name": "name in external source of record",
88 82 "extern_type": "type of external source of record",
89 83 "admin": 'True|False defines if user should be RhodeCode super admin',
90 84 "active":
91 85 'True|False defines active state of user internally for RhodeCode',
92 86 "active_from_extern":
93 87 "True|False\None, active state from the external auth, "
94 88 "None means use definition from RhodeCode extern_type active value"
95 89 }
96 90 # set on authenticate() method and via set_auth_type func.
97 91 auth_type = None
98 92
99 93 # List of setting names to store encrypted. Plugins may override this list
100 94 # to store settings encrypted.
101 95 _settings_encrypted = []
102 96
103 97 # Mapping of python to DB settings model types. Plugins may override or
104 98 # extend this mapping.
105 99 _settings_type_map = {
106 100 str: 'str',
107 101 int: 'int',
108 102 unicode: 'unicode',
109 103 bool: 'bool',
110 104 list: 'list',
111 105 }
112 106
113 107 def __init__(self, plugin_id):
114 108 self._plugin_id = plugin_id
115 109
116 110 def _get_setting_full_name(self, name):
117 111 """
118 112 Return the full setting name used for storing values in the database.
119 113 """
120 114 # TODO: johbo: Using the name here is problematic. It would be good to
121 115 # introduce either new models in the database to hold Plugin and
122 116 # PluginSetting or to use the plugin id here.
123 117 return 'auth_{}_{}'.format(self.name, name)
124 118
125 119 def _get_setting_type(self, name, value):
126 120 """
127 121 Get the type as used by the SettingsModel accordingly to type of passed
128 122 value. Optionally the suffix `.encrypted` is appended to instruct
129 123 SettingsModel to store it encrypted.
130 124 """
131 125 type_ = self._settings_type_map.get(type(value), 'unicode')
132 126 if name in self._settings_encrypted:
133 127 type_ = '{}.encrypted'.format(type_)
134 128 return type_
135 129
136 130 def is_enabled(self):
137 131 """
138 132 Returns true if this plugin is enabled. An enabled plugin can be
139 133 configured in the admin interface but it is not consulted during
140 134 authentication.
141 135 """
142 136 auth_plugins = SettingsModel().get_auth_plugins()
143 137 return self.get_id() in auth_plugins
144 138
145 139 def is_active(self):
146 140 """
147 141 Returns true if the plugin is activated. An activated plugin is
148 142 consulted during authentication, assumed it is also enabled.
149 143 """
150 144 return self.get_setting_by_name('enabled')
151 145
152 146 def get_id(self):
153 147 """
154 148 Returns the plugin id.
155 149 """
156 150 return self._plugin_id
157 151
158 152 def get_display_name(self):
159 153 """
160 154 Returns a translation string for displaying purposes.
161 155 """
162 156 raise NotImplementedError('Not implemented in base class')
163 157
164 158 def get_settings_schema(self):
165 159 """
166 160 Returns a colander schema, representing the plugin settings.
167 161 """
168 162 return AuthnPluginSettingsSchemaBase()
169 163
170 164 def get_setting_by_name(self, name):
171 165 """
172 166 Returns a plugin setting by name.
173 167 """
174 168 full_name = self._get_setting_full_name(name)
175 169 db_setting = SettingsModel().get_setting_by_name(full_name)
176 170 return db_setting.app_settings_value if db_setting else None
177 171
178 172 def create_or_update_setting(self, name, value):
179 173 """
180 174 Create or update a setting for this plugin in the persistent storage.
181 175 """
182 176 full_name = self._get_setting_full_name(name)
183 177 type_ = self._get_setting_type(name, value)
184 178 db_setting = SettingsModel().create_or_update_setting(
185 179 full_name, value, type_)
186 180 return db_setting.app_settings_value
187 181
188 182 def get_settings(self):
189 183 """
190 184 Returns the plugin settings as dictionary.
191 185 """
192 186 settings = {}
193 187 for node in self.get_settings_schema():
194 188 settings[node.name] = self.get_setting_by_name(node.name)
195 189 return settings
196 190
197 191 @property
198 192 def validators(self):
199 193 """
200 194 Exposes RhodeCode validators modules
201 195 """
202 196 # this is a hack to overcome issues with pylons threadlocals and
203 197 # translator object _() not beein registered properly.
204 198 class LazyCaller(object):
205 199 def __init__(self, name):
206 200 self.validator_name = name
207 201
208 202 def __call__(self, *args, **kwargs):
209 203 from rhodecode.model import validators as v
210 204 obj = getattr(v, self.validator_name)
211 205 # log.debug('Initializing lazy formencode object: %s', obj)
212 206 return LazyFormencode(obj, *args, **kwargs)
213 207
214 208 class ProxyGet(object):
215 209 def __getattribute__(self, name):
216 210 return LazyCaller(name)
217 211
218 212 return ProxyGet()
219 213
220 214 @hybrid_property
221 215 def name(self):
222 216 """
223 217 Returns the name of this authentication plugin.
224 218
225 219 :returns: string
226 220 """
227 221 raise NotImplementedError("Not implemented in base class")
228 222
223 @property
224 def is_headers_auth(self):
225 """
226 Returns True if this authentication plugin uses HTTP headers as
227 authentication method.
228 """
229 return False
230
229 231 @hybrid_property
230 232 def is_container_auth(self):
231 233 """
232 Returns bool if this module uses container auth.
233
234 This property will trigger an automatic call to authenticate on
235 a visit to the website or during a push/pull.
236
237 :returns: bool
234 Deprecated method that indicates if this authentication plugin uses
235 HTTP headers as authentication method.
238 236 """
239 return False
237 warnings.warn(
238 'Use is_headers_auth instead.', category=DeprecationWarning)
239 return self.is_headers_auth
240 240
241 241 @hybrid_property
242 242 def allows_creating_users(self):
243 243 """
244 244 Defines if Plugin allows users to be created on-the-fly when
245 245 authentication is called. Controls how external plugins should behave
246 246 in terms if they are allowed to create new users, or not. Base plugins
247 247 should not be allowed to, but External ones should be !
248 248
249 249 :return: bool
250 250 """
251 251 return False
252 252
253 253 def set_auth_type(self, auth_type):
254 254 self.auth_type = auth_type
255 255
256 256 def allows_authentication_from(
257 257 self, user, allows_non_existing_user=True,
258 258 allowed_auth_plugins=None, allowed_auth_sources=None):
259 259 """
260 260 Checks if this authentication module should accept a request for
261 261 the current user.
262 262
263 263 :param user: user object fetched using plugin's get_user() method.
264 264 :param allows_non_existing_user: if True, don't allow the
265 265 user to be empty, meaning not existing in our database
266 266 :param allowed_auth_plugins: if provided, users extern_type will be
267 267 checked against a list of provided extern types, which are plugin
268 268 auth_names in the end
269 269 :param allowed_auth_sources: authentication type allowed,
270 270 `http` or `vcs` default is both.
271 271 defines if plugin will accept only http authentication vcs
272 272 authentication(git/hg) or both
273 273 :returns: boolean
274 274 """
275 275 if not user and not allows_non_existing_user:
276 276 log.debug('User is empty but plugin does not allow empty users,'
277 277 'not allowed to authenticate')
278 278 return False
279 279
280 280 expected_auth_plugins = allowed_auth_plugins or [self.name]
281 281 if user and (user.extern_type and
282 282 user.extern_type not in expected_auth_plugins):
283 283 log.debug(
284 284 'User `%s` is bound to `%s` auth type. Plugin allows only '
285 285 '%s, skipping', user, user.extern_type, expected_auth_plugins)
286 286
287 287 return False
288 288
289 289 # by default accept both
290 290 expected_auth_from = allowed_auth_sources or [HTTP_TYPE, VCS_TYPE]
291 291 if self.auth_type not in expected_auth_from:
292 292 log.debug('Current auth source is %s but plugin only allows %s',
293 293 self.auth_type, expected_auth_from)
294 294 return False
295 295
296 296 return True
297 297
298 298 def get_user(self, username=None, **kwargs):
299 299 """
300 300 Helper method for user fetching in plugins, by default it's using
301 301 simple fetch by username, but this method can be custimized in plugins
302 eg. container auth plugin to fetch user by environ params
302 eg. headers auth plugin to fetch user by environ params
303 303
304 304 :param username: username if given to fetch from database
305 305 :param kwargs: extra arguments needed for user fetching.
306 306 """
307 307 user = None
308 308 log.debug(
309 309 'Trying to fetch user `%s` from RhodeCode database', username)
310 310 if username:
311 311 user = User.get_by_username(username)
312 312 if not user:
313 313 log.debug('User not found, fallback to fetch user in '
314 314 'case insensitive mode')
315 315 user = User.get_by_username(username, case_insensitive=True)
316 316 else:
317 317 log.debug('provided username:`%s` is empty skipping...', username)
318 318 if not user:
319 319 log.debug('User `%s` not found in database', username)
320 320 return user
321 321
322 322 def user_activation_state(self):
323 323 """
324 324 Defines user activation state when creating new users
325 325
326 326 :returns: boolean
327 327 """
328 328 raise NotImplementedError("Not implemented in base class")
329 329
330 330 def auth(self, userobj, username, passwd, settings, **kwargs):
331 331 """
332 332 Given a user object (which may be null), username, a plaintext
333 333 password, and a settings object (containing all the keys needed as
334 334 listed in settings()), authenticate this user's login attempt.
335 335
336 336 Return None on failure. On success, return a dictionary of the form:
337 337
338 338 see: RhodeCodeAuthPluginBase.auth_func_attrs
339 339 This is later validated for correctness
340 340 """
341 341 raise NotImplementedError("not implemented in base class")
342 342
343 343 def _authenticate(self, userobj, username, passwd, settings, **kwargs):
344 344 """
345 345 Wrapper to call self.auth() that validates call on it
346 346
347 347 :param userobj: userobj
348 348 :param username: username
349 349 :param passwd: plaintext password
350 350 :param settings: plugin settings
351 351 """
352 352 auth = self.auth(userobj, username, passwd, settings, **kwargs)
353 353 if auth:
354 354 # check if hash should be migrated ?
355 355 new_hash = auth.get('_hash_migrate')
356 356 if new_hash:
357 357 self._migrate_hash_to_bcrypt(username, passwd, new_hash)
358 358 return self._validate_auth_return(auth)
359 359 return auth
360 360
361 361 def _migrate_hash_to_bcrypt(self, username, password, new_hash):
362 362 new_hash_cypher = _RhodeCodeCryptoBCrypt()
363 363 # extra checks, so make sure new hash is correct.
364 364 password_encoded = safe_str(password)
365 365 if new_hash and new_hash_cypher.hash_check(
366 366 password_encoded, new_hash):
367 367 cur_user = User.get_by_username(username)
368 368 cur_user.password = new_hash
369 369 Session().add(cur_user)
370 370 Session().flush()
371 371 log.info('Migrated user %s hash to bcrypt', cur_user)
372 372
373 373 def _validate_auth_return(self, ret):
374 374 if not isinstance(ret, dict):
375 375 raise Exception('returned value from auth must be a dict')
376 376 for k in self.auth_func_attrs:
377 377 if k not in ret:
378 378 raise Exception('Missing %s attribute from returned data' % k)
379 379 return ret
380 380
381 381
382 382 class RhodeCodeExternalAuthPlugin(RhodeCodeAuthPluginBase):
383 383
384 384 @hybrid_property
385 385 def allows_creating_users(self):
386 386 return True
387 387
388 388 def use_fake_password(self):
389 389 """
390 390 Return a boolean that indicates whether or not we should set the user's
391 391 password to a random value when it is authenticated by this plugin.
392 392 If your plugin provides authentication, then you will generally
393 393 want this.
394 394
395 395 :returns: boolean
396 396 """
397 397 raise NotImplementedError("Not implemented in base class")
398 398
399 399 def _authenticate(self, userobj, username, passwd, settings, **kwargs):
400 400 # at this point _authenticate calls plugin's `auth()` function
401 401 auth = super(RhodeCodeExternalAuthPlugin, self)._authenticate(
402 402 userobj, username, passwd, settings, **kwargs)
403 403 if auth:
404 404 # maybe plugin will clean the username ?
405 405 # we should use the return value
406 406 username = auth['username']
407 407
408 408 # if external source tells us that user is not active, we should
409 409 # skip rest of the process. This can prevent from creating users in
410 410 # RhodeCode when using external authentication, but if it's
411 411 # inactive user we shouldn't create that user anyway
412 412 if auth['active_from_extern'] is False:
413 413 log.warning(
414 414 "User %s authenticated against %s, but is inactive",
415 415 username, self.__module__)
416 416 return None
417 417
418 418 cur_user = User.get_by_username(username, case_insensitive=True)
419 419 is_user_existing = cur_user is not None
420 420
421 421 if is_user_existing:
422 422 log.debug('Syncing user `%s` from '
423 423 '`%s` plugin', username, self.name)
424 424 else:
425 425 log.debug('Creating non existing user `%s` from '
426 426 '`%s` plugin', username, self.name)
427 427
428 428 if self.allows_creating_users:
429 429 log.debug('Plugin `%s` allows to '
430 430 'create new users', self.name)
431 431 else:
432 432 log.debug('Plugin `%s` does not allow to '
433 433 'create new users', self.name)
434 434
435 435 user_parameters = {
436 436 'username': username,
437 437 'email': auth["email"],
438 438 'firstname': auth["firstname"],
439 439 'lastname': auth["lastname"],
440 440 'active': auth["active"],
441 441 'admin': auth["admin"],
442 442 'extern_name': auth["extern_name"],
443 443 'extern_type': self.name,
444 444 'plugin': self,
445 445 'allow_to_create_user': self.allows_creating_users,
446 446 }
447 447
448 448 if not is_user_existing:
449 449 if self.use_fake_password():
450 450 # Randomize the PW because we don't need it, but don't want
451 451 # them blank either
452 452 passwd = PasswordGenerator().gen_password(length=16)
453 453 user_parameters['password'] = passwd
454 454 else:
455 455 # Since the password is required by create_or_update method of
456 456 # UserModel, we need to set it explicitly.
457 457 # The create_or_update method is smart and recognises the
458 458 # password hashes as well.
459 459 user_parameters['password'] = cur_user.password
460 460
461 461 # we either create or update users, we also pass the flag
462 462 # that controls if this method can actually do that.
463 463 # raises NotAllowedToCreateUserError if it cannot, and we try to.
464 464 user = UserModel().create_or_update(**user_parameters)
465 465 Session().flush()
466 466 # enforce user is just in given groups, all of them has to be ones
467 467 # created from plugins. We store this info in _group_data JSON
468 468 # field
469 469 try:
470 470 groups = auth['groups'] or []
471 471 UserGroupModel().enforce_groups(user, groups, self.name)
472 472 except Exception:
473 473 # for any reason group syncing fails, we should
474 474 # proceed with login
475 475 log.error(traceback.format_exc())
476 476 Session().commit()
477 477 return auth
478 478
479 479
480 class AuthomaticBase(RhodeCodeExternalAuthPlugin):
481
482 # TODO: Think about how to create and store this secret string.
483 # We need the secret for the authomatic library. It needs to be the same
484 # across requests.
485 def _get_authomatic_secret(self, length=40):
486 secret = self.get_setting_by_name('secret')
487 if secret is None or secret == 'None' or secret == '':
488 from Crypto import Random, Hash
489 secret_bytes = Random.new().read(length)
490 secret_hash = Hash.SHA256.new()
491 secret_hash.update(secret_bytes)
492 secret = secret_hash.hexdigest()
493 self.create_or_update_setting('secret', secret)
494 Session.commit()
495 secret = self.get_setting_by_name('secret')
496 return secret
497
498 def get_authomatic(self):
499 scope = []
500 if self.name == 'bitbucket':
501 provider_class = oauth1.Bitbucket
502 scope = ['account', 'email', 'repository', 'issue', 'issue:write']
503 elif self.name == 'github':
504 provider_class = oauth2.GitHub
505 scope = ['repo', 'public_repo', 'user:email']
506 elif self.name == 'google':
507 provider_class = oauth2.Google
508 scope = ['profile', 'email']
509 elif self.name == 'twitter':
510 provider_class = oauth1.Twitter
511
512 authomatic_conf = {
513 self.name: {
514 'class_': provider_class,
515 'consumer_key': self.get_setting_by_name('consumer_key'),
516 'consumer_secret': self.get_setting_by_name('consumer_secret'),
517 'scope': scope,
518 'access_headers': {'User-Agent': 'TestAppAgent'},
519 }
520 }
521 secret = self._get_authomatic_secret()
522 return Authomatic(config=authomatic_conf,
523 secret=secret)
524
525 def get_provider_result(self, request):
526 """
527 Provides `authomatic.core.LoginResult` for provider and request
528
529 :param provider_name:
530 :param request:
531 :param config:
532 :return:
533 """
534 response = Response()
535 adapter = WebObAdapter(request, response)
536 authomatic_inst = self.get_authomatic()
537 return authomatic_inst.login(adapter, self.name), response
538
539 def handle_social_data(self, session, user_id, social_data):
540 """
541 Updates user tokens in database whenever necessary
542 :param request:
543 :param user:
544 :param social_data:
545 :return:
546 """
547 if not self.is_active():
548 h.flash(_('This provider is currently disabled'),
549 category='warning')
550 return False
551
552 social_data = social_data
553 update_identity = False
554
555 existing_row = ExternalIdentity.by_external_id_and_provider(
556 social_data['user']['id'],
557 social_data['credentials.provider']
558 )
559
560 if existing_row:
561 Session().delete(existing_row)
562 update_identity = True
563
564 if not existing_row or update_identity:
565 if not update_identity:
566 h.flash(_('Your external identity is now '
567 'connected with your account'), category='success')
568
569 if not social_data['user']['id']:
570 h.flash(_('No external user id found? Perhaps permissions'
571 'for authentication are set incorrectly'),
572 category='error')
573 return False
574
575 ex_identity = ExternalIdentity()
576 ex_identity.external_id = social_data['user']['id']
577 ex_identity.external_username = social_data['user']['user_name']
578 ex_identity.provider_name = social_data['credentials.provider']
579 ex_identity.access_token = social_data['credentials.token']
580 ex_identity.token_secret = social_data['credentials.token_secret']
581 ex_identity.alt_token = social_data['credentials.refresh_token']
582 ex_identity.local_user_id = user_id
583 Session().add(ex_identity)
584 session.pop('rhodecode.social_auth', None)
585 return ex_identity
586
587 def callback_url(self):
588 try:
589 return url('social_auth', provider_name=self.name, qualified=True)
590 except TypeError:
591 pass
592 return ''
593
594
595 480 def loadplugin(plugin_id):
596 481 """
597 482 Loads and returns an instantiated authentication plugin.
598 483 Returns the RhodeCodeAuthPluginBase subclass on success,
599 raises exceptions on failure.
600
601 raises:
602 KeyError -- if no plugin available with given name
603 TypeError -- if the RhodeCodeAuthPlugin is not a subclass of
604 ours RhodeCodeAuthPluginBase
484 or None on failure.
605 485 """
606 486 # TODO: Disusing pyramids thread locals to retrieve the registry.
607 487 authn_registry = get_current_registry().getUtility(IAuthnPluginRegistry)
608 488 plugin = authn_registry.get_plugin(plugin_id)
609 489 if plugin is None:
610 490 log.error('Authentication plugin not found: "%s"', plugin_id)
611 491 return plugin
612 492
613 493
614 494 def get_auth_cache_manager(custom_ttl=None):
615 495 return caches.get_cache_manager(
616 496 'auth_plugins', 'rhodecode.authentication', custom_ttl)
617 497
618 498
619 499 def authenticate(username, password, environ=None, auth_type=None,
620 500 skip_missing=False):
621 501 """
622 502 Authentication function used for access control,
623 503 It tries to authenticate based on enabled authentication modules.
624 504
625 :param username: username can be empty for container auth
626 :param password: password can be empty for container auth
627 :param environ: environ headers passed for container auth
505 :param username: username can be empty for headers auth
506 :param password: password can be empty for headers auth
507 :param environ: environ headers passed for headers auth
628 508 :param auth_type: type of authentication, either `HTTP_TYPE` or `VCS_TYPE`
629 509 :param skip_missing: ignores plugins that are in db but not in environment
630 510 :returns: None if auth failed, plugin_user dict if auth is correct
631 511 """
632 512 if not auth_type or auth_type not in [HTTP_TYPE, VCS_TYPE]:
633 513 raise ValueError('auth type must be on of http, vcs got "%s" instead'
634 514 % auth_type)
635 container_only = environ and not (username and password)
636 auth_plugins = SettingsModel().get_auth_plugins()
637 for plugin_id in auth_plugins:
638 plugin = loadplugin(plugin_id)
515 headers_only = environ and not (username and password)
639 516
640 if plugin is None:
641 log.warning('Authentication plugin missing: "{}"'.format(
642 plugin_id))
643 continue
644
645 if not plugin.is_active():
646 log.info('Authentication plugin is inactive: "{}"'.format(
647 plugin_id))
648 continue
649
517 authn_registry = get_current_registry().getUtility(IAuthnPluginRegistry)
518 for plugin in authn_registry.get_plugins_for_authentication():
650 519 plugin.set_auth_type(auth_type)
651 520 user = plugin.get_user(username)
652 521 display_user = user.username if user else username
653 522
654 if container_only and not plugin.is_container_auth:
655 log.debug('Auth type is for container only and plugin `%s` is not '
656 'container plugin, skipping...', plugin_id)
523 if headers_only and not plugin.is_headers_auth:
524 log.debug('Auth type is for headers only and plugin `%s` is not '
525 'headers plugin, skipping...', plugin.get_id())
657 526 continue
658 527
659 528 # load plugin settings from RhodeCode database
660 529 plugin_settings = plugin.get_settings()
661 530 log.debug('Plugin settings:%s', plugin_settings)
662 531
663 log.debug('Trying authentication using ** %s **', plugin_id)
532 log.debug('Trying authentication using ** %s **', plugin.get_id())
664 533 # use plugin's method of user extraction.
665 534 user = plugin.get_user(username, environ=environ,
666 535 settings=plugin_settings)
667 536 display_user = user.username if user else username
668 log.debug('Plugin %s extracted user is `%s`', plugin_id, display_user)
537 log.debug(
538 'Plugin %s extracted user is `%s`', plugin.get_id(), display_user)
669 539
670 540 if not plugin.allows_authentication_from(user):
671 541 log.debug('Plugin %s does not accept user `%s` for authentication',
672 plugin_id, display_user)
542 plugin.get_id(), display_user)
673 543 continue
674 544 else:
675 545 log.debug('Plugin %s accepted user `%s` for authentication',
676 plugin_id, display_user)
546 plugin.get_id(), display_user)
677 547
678 548 log.info('Authenticating user `%s` using %s plugin',
679 display_user, plugin_id)
549 display_user, plugin.get_id())
680 550
681 551 _cache_ttl = 0
682 552
683 553 if isinstance(plugin.AUTH_CACHE_TTL, (int, long)):
684 554 # plugin cache set inside is more important than the settings value
685 555 _cache_ttl = plugin.AUTH_CACHE_TTL
686 556 elif plugin_settings.get('auth_cache_ttl'):
687 557 _cache_ttl = safe_int(plugin_settings.get('auth_cache_ttl'), 0)
688 558
689 559 plugin_cache_active = bool(_cache_ttl and _cache_ttl > 0)
690 560
691 561 # get instance of cache manager configured for a namespace
692 562 cache_manager = get_auth_cache_manager(custom_ttl=_cache_ttl)
693 563
694 log.debug('Cache for plugin `%s` active: %s', plugin_id,
564 log.debug('Cache for plugin `%s` active: %s', plugin.get_id(),
695 565 plugin_cache_active)
696 566
697 567 # for environ based password can be empty, but then the validation is
698 568 # on the server that fills in the env data needed for authentication
699 569 _password_hash = md5_safe(plugin.name + username + (password or ''))
700 570
701 571 # _authenticate is a wrapper for .auth() method of plugin.
702 572 # it checks if .auth() sends proper data.
703 573 # For RhodeCodeExternalAuthPlugin it also maps users to
704 574 # Database and maps the attributes returned from .auth()
705 575 # to RhodeCode database. If this function returns data
706 576 # then auth is correct.
707 577 start = time.time()
708 578 log.debug('Running plugin `%s` _authenticate method',
709 plugin_id)
579 plugin.get_id())
710 580
711 581 def auth_func():
712 582 """
713 583 This function is used internally in Cache of Beaker to calculate
714 584 Results
715 585 """
716 586 return plugin._authenticate(
717 587 user, username, password, plugin_settings,
718 588 environ=environ or {})
719 589
720 590 if plugin_cache_active:
721 591 plugin_user = cache_manager.get(
722 592 _password_hash, createfunc=auth_func)
723 593 else:
724 594 plugin_user = auth_func()
725 595
726 596 auth_time = time.time() - start
727 597 log.debug('Authentication for plugin `%s` completed in %.3fs, '
728 598 'expiration time of fetched cache %.1fs.',
729 plugin_id, auth_time, _cache_ttl)
599 plugin.get_id(), auth_time, _cache_ttl)
730 600
731 601 log.debug('PLUGIN USER DATA: %s', plugin_user)
732 602
733 603 if plugin_user:
734 604 log.debug('Plugin returned proper authentication data')
735 605 return plugin_user
736 606 # we failed to Auth because .auth() method didn't return proper user
737 607 log.debug("User `%s` failed to authenticate against %s",
738 display_user, plugin_id)
608 display_user, plugin.get_id())
739 609 return None
@@ -1,276 +1,284 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 RhodeCode authentication plugin for Atlassian CROWD
23 23 """
24 24
25 25
26 26 import colander
27 27 import base64
28 28 import logging
29 29 import urllib2
30 30
31 31 from pylons.i18n.translation import lazy_ugettext as _
32 32 from sqlalchemy.ext.hybrid import hybrid_property
33 33
34 34 from rhodecode.authentication.base import RhodeCodeExternalAuthPlugin
35 35 from rhodecode.authentication.schema import AuthnPluginSettingsSchemaBase
36 36 from rhodecode.authentication.routes import AuthnPluginResourceBase
37 from rhodecode.lib.colander_utils import strip_whitespace
37 38 from rhodecode.lib.ext_json import json, formatted_json
38 39 from rhodecode.model.db import User
39 40
40 41 log = logging.getLogger(__name__)
41 42
42 43
43 44 def plugin_factory(plugin_id, *args, **kwds):
44 45 """
45 46 Factory function that is called during plugin discovery.
46 47 It returns the plugin instance.
47 48 """
48 49 plugin = RhodeCodeAuthPlugin(plugin_id)
49 50 return plugin
50 51
51 52
52 53 class CrowdAuthnResource(AuthnPluginResourceBase):
53 54 pass
54 55
55 56
56 57 class CrowdSettingsSchema(AuthnPluginSettingsSchemaBase):
57 58 host = colander.SchemaNode(
58 59 colander.String(),
59 60 default='127.0.0.1',
60 61 description=_('The FQDN or IP of the Atlassian CROWD Server'),
62 preparer=strip_whitespace,
61 63 title=_('Host'),
62 64 widget='string')
63 65 port = colander.SchemaNode(
64 66 colander.Int(),
65 67 default=8095,
66 68 description=_('The Port in use by the Atlassian CROWD Server'),
69 preparer=strip_whitespace,
67 70 title=_('Port'),
68 71 validator=colander.Range(min=0, max=65536),
69 72 widget='int')
70 73 app_name = colander.SchemaNode(
71 74 colander.String(),
72 75 default='',
73 76 description=_('The Application Name to authenticate to CROWD'),
77 preparer=strip_whitespace,
74 78 title=_('Application Name'),
75 79 widget='string')
76 80 app_password = colander.SchemaNode(
77 81 colander.String(),
78 82 default='',
79 83 description=_('The password to authenticate to CROWD'),
84 preparer=strip_whitespace,
80 85 title=_('Application Password'),
81 86 widget='password')
82 87 admin_groups = colander.SchemaNode(
83 88 colander.String(),
84 89 default='',
85 90 description=_('A comma separated list of group names that identify '
86 91 'users as RhodeCode Administrators'),
87 92 missing='',
93 preparer=strip_whitespace,
88 94 title=_('Admin Groups'),
89 95 widget='string')
90 96
91 97
92 98 class CrowdServer(object):
93 99 def __init__(self, *args, **kwargs):
94 100 """
95 101 Create a new CrowdServer object that points to IP/Address 'host',
96 102 on the given port, and using the given method (https/http). user and
97 103 passwd can be set here or with set_credentials. If unspecified,
98 104 "version" defaults to "latest".
99 105
100 106 example::
101 107
102 108 cserver = CrowdServer(host="127.0.0.1",
103 109 port="8095",
104 110 user="some_app",
105 111 passwd="some_passwd",
106 112 version="1")
107 113 """
108 114 if not "port" in kwargs:
109 115 kwargs["port"] = "8095"
110 116 self._logger = kwargs.get("logger", logging.getLogger(__name__))
111 117 self._uri = "%s://%s:%s/crowd" % (kwargs.get("method", "http"),
112 118 kwargs.get("host", "127.0.0.1"),
113 119 kwargs.get("port", "8095"))
114 120 self.set_credentials(kwargs.get("user", ""),
115 121 kwargs.get("passwd", ""))
116 122 self._version = kwargs.get("version", "latest")
117 123 self._url_list = None
118 124 self._appname = "crowd"
119 125
120 126 def set_credentials(self, user, passwd):
121 127 self.user = user
122 128 self.passwd = passwd
123 129 self._make_opener()
124 130
125 131 def _make_opener(self):
126 132 mgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
127 133 mgr.add_password(None, self._uri, self.user, self.passwd)
128 134 handler = urllib2.HTTPBasicAuthHandler(mgr)
129 135 self.opener = urllib2.build_opener(handler)
130 136
131 137 def _request(self, url, body=None, headers=None,
132 138 method=None, noformat=False,
133 139 empty_response_ok=False):
134 140 _headers = {"Content-type": "application/json",
135 141 "Accept": "application/json"}
136 142 if self.user and self.passwd:
137 143 authstring = base64.b64encode("%s:%s" % (self.user, self.passwd))
138 144 _headers["Authorization"] = "Basic %s" % authstring
139 145 if headers:
140 146 _headers.update(headers)
141 147 log.debug("Sent crowd: \n%s"
142 148 % (formatted_json({"url": url, "body": body,
143 149 "headers": _headers})))
144 150 request = urllib2.Request(url, body, _headers)
145 151 if method:
146 152 request.get_method = lambda: method
147 153
148 154 global msg
149 155 msg = ""
150 156 try:
151 157 rdoc = self.opener.open(request)
152 158 msg = "".join(rdoc.readlines())
153 159 if not msg and empty_response_ok:
154 160 rval = {}
155 161 rval["status"] = True
156 162 rval["error"] = "Response body was empty"
157 163 elif not noformat:
158 164 rval = json.loads(msg)
159 165 rval["status"] = True
160 166 else:
161 167 rval = "".join(rdoc.readlines())
162 168 except Exception as e:
163 169 if not noformat:
164 170 rval = {"status": False,
165 171 "body": body,
166 172 "error": str(e) + "\n" + msg}
167 173 else:
168 174 rval = None
169 175 return rval
170 176
171 177 def user_auth(self, username, password):
172 178 """Authenticate a user against crowd. Returns brief information about
173 179 the user."""
174 180 url = ("%s/rest/usermanagement/%s/authentication?username=%s"
175 181 % (self._uri, self._version, username))
176 182 body = json.dumps({"value": password})
177 183 return self._request(url, body)
178 184
179 185 def user_groups(self, username):
180 186 """Retrieve a list of groups to which this user belongs."""
181 187 url = ("%s/rest/usermanagement/%s/user/group/nested?username=%s"
182 188 % (self._uri, self._version, username))
183 189 return self._request(url)
184 190
185 191
186 192 class RhodeCodeAuthPlugin(RhodeCodeExternalAuthPlugin):
187 193
188 194 def includeme(self, config):
189 195 config.add_authn_plugin(self)
190 196 config.add_authn_resource(self.get_id(), CrowdAuthnResource(self))
191 197 config.add_view(
192 198 'rhodecode.authentication.views.AuthnPluginViewBase',
193 199 attr='settings_get',
200 renderer='rhodecode:templates/admin/auth/plugin_settings.html',
194 201 request_method='GET',
195 202 route_name='auth_home',
196 203 context=CrowdAuthnResource)
197 204 config.add_view(
198 205 'rhodecode.authentication.views.AuthnPluginViewBase',
199 206 attr='settings_post',
207 renderer='rhodecode:templates/admin/auth/plugin_settings.html',
200 208 request_method='POST',
201 209 route_name='auth_home',
202 210 context=CrowdAuthnResource)
203 211
204 212 def get_settings_schema(self):
205 213 return CrowdSettingsSchema()
206 214
207 215 def get_display_name(self):
208 216 return _('CROWD')
209 217
210 218 @hybrid_property
211 219 def name(self):
212 220 return "crowd"
213 221
214 222 def use_fake_password(self):
215 223 return True
216 224
217 225 def user_activation_state(self):
218 226 def_user_perms = User.get_default_user().AuthUser.permissions['global']
219 227 return 'hg.extern_activate.auto' in def_user_perms
220 228
221 229 def auth(self, userobj, username, password, settings, **kwargs):
222 230 """
223 231 Given a user object (which may be null), username, a plaintext password,
224 232 and a settings object (containing all the keys needed as listed in settings()),
225 233 authenticate this user's login attempt.
226 234
227 235 Return None on failure. On success, return a dictionary of the form:
228 236
229 237 see: RhodeCodeAuthPluginBase.auth_func_attrs
230 238 This is later validated for correctness
231 239 """
232 240 if not username or not password:
233 241 log.debug('Empty username or password skipping...')
234 242 return None
235 243
236 244 log.debug("Crowd settings: \n%s" % (formatted_json(settings)))
237 245 server = CrowdServer(**settings)
238 246 server.set_credentials(settings["app_name"], settings["app_password"])
239 247 crowd_user = server.user_auth(username, password)
240 248 log.debug("Crowd returned: \n%s" % (formatted_json(crowd_user)))
241 249 if not crowd_user["status"]:
242 250 return None
243 251
244 252 res = server.user_groups(crowd_user["name"])
245 253 log.debug("Crowd groups: \n%s" % (formatted_json(res)))
246 254 crowd_user["groups"] = [x["name"] for x in res["groups"]]
247 255
248 256 # old attrs fetched from RhodeCode database
249 257 admin = getattr(userobj, 'admin', False)
250 258 active = getattr(userobj, 'active', True)
251 259 email = getattr(userobj, 'email', '')
252 260 username = getattr(userobj, 'username', username)
253 261 firstname = getattr(userobj, 'firstname', '')
254 262 lastname = getattr(userobj, 'lastname', '')
255 263 extern_type = getattr(userobj, 'extern_type', '')
256 264
257 265 user_attrs = {
258 266 'username': username,
259 267 'firstname': crowd_user["first-name"] or firstname,
260 268 'lastname': crowd_user["last-name"] or lastname,
261 269 'groups': crowd_user["groups"],
262 270 'email': crowd_user["email"] or email,
263 271 'admin': admin,
264 272 'active': active,
265 273 'active_from_extern': crowd_user.get('active'),
266 274 'extern_name': crowd_user["name"],
267 275 'extern_type': extern_type,
268 276 }
269 277
270 278 # set an admin if we're in admin_groups of crowd
271 279 for group in settings["admin_groups"]:
272 280 if group in user_attrs["groups"]:
273 281 user_attrs["admin"] = True
274 282 log.debug("Final crowd user object: \n%s" % (formatted_json(user_attrs)))
275 283 log.info('user %s authenticated correctly' % user_attrs['username'])
276 284 return user_attrs
@@ -1,163 +1,167 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 RhodeCode authentication plugin for Jasig CAS
23 23 http://www.jasig.org/cas
24 24 """
25 25
26 26
27 27 import colander
28 28 import logging
29 29 import rhodecode
30 30 import urllib
31 31 import urllib2
32 32
33 33 from pylons.i18n.translation import lazy_ugettext as _
34 34 from sqlalchemy.ext.hybrid import hybrid_property
35 35
36 36 from rhodecode.authentication.base import RhodeCodeExternalAuthPlugin
37 37 from rhodecode.authentication.schema import AuthnPluginSettingsSchemaBase
38 38 from rhodecode.authentication.routes import AuthnPluginResourceBase
39 from rhodecode.lib.colander_utils import strip_whitespace
39 40 from rhodecode.lib.utils2 import safe_unicode
40 41 from rhodecode.model.db import User
41 42
42 43 log = logging.getLogger(__name__)
43 44
44 45
45 46 def plugin_factory(plugin_id, *args, **kwds):
46 47 """
47 48 Factory function that is called during plugin discovery.
48 49 It returns the plugin instance.
49 50 """
50 51 plugin = RhodeCodeAuthPlugin(plugin_id)
51 52 return plugin
52 53
53 54
54 55 class JasigCasAuthnResource(AuthnPluginResourceBase):
55 56 pass
56 57
57 58
58 59 class JasigCasSettingsSchema(AuthnPluginSettingsSchemaBase):
59 60 service_url = colander.SchemaNode(
60 61 colander.String(),
61 62 default='https://domain.com/cas/v1/tickets',
62 63 description=_('The url of the Jasig CAS REST service'),
64 preparer=strip_whitespace,
63 65 title=_('URL'),
64 66 widget='string')
65 67
66 68
67 69 class RhodeCodeAuthPlugin(RhodeCodeExternalAuthPlugin):
68 70
69 71 def includeme(self, config):
70 72 config.add_authn_plugin(self)
71 73 config.add_authn_resource(self.get_id(), JasigCasAuthnResource(self))
72 74 config.add_view(
73 75 'rhodecode.authentication.views.AuthnPluginViewBase',
74 76 attr='settings_get',
77 renderer='rhodecode:templates/admin/auth/plugin_settings.html',
75 78 request_method='GET',
76 79 route_name='auth_home',
77 80 context=JasigCasAuthnResource)
78 81 config.add_view(
79 82 'rhodecode.authentication.views.AuthnPluginViewBase',
80 83 attr='settings_post',
84 renderer='rhodecode:templates/admin/auth/plugin_settings.html',
81 85 request_method='POST',
82 86 route_name='auth_home',
83 87 context=JasigCasAuthnResource)
84 88
85 89 def get_settings_schema(self):
86 90 return JasigCasSettingsSchema()
87 91
88 92 def get_display_name(self):
89 93 return _('Jasig-CAS')
90 94
91 95 @hybrid_property
92 96 def name(self):
93 97 return "jasig-cas"
94 98
95 @hybrid_property
96 def is_container_auth(self):
99 @property
100 def is_headers_auth(self):
97 101 return True
98 102
99 103 def use_fake_password(self):
100 104 return True
101 105
102 106 def user_activation_state(self):
103 107 def_user_perms = User.get_default_user().AuthUser.permissions['global']
104 108 return 'hg.extern_activate.auto' in def_user_perms
105 109
106 110 def auth(self, userobj, username, password, settings, **kwargs):
107 111 """
108 112 Given a user object (which may be null), username, a plaintext password,
109 113 and a settings object (containing all the keys needed as listed in settings()),
110 114 authenticate this user's login attempt.
111 115
112 116 Return None on failure. On success, return a dictionary of the form:
113 117
114 118 see: RhodeCodeAuthPluginBase.auth_func_attrs
115 119 This is later validated for correctness
116 120 """
117 121 if not username or not password:
118 122 log.debug('Empty username or password skipping...')
119 123 return None
120 124
121 125 log.debug("Jasig CAS settings: %s", settings)
122 126 params = urllib.urlencode({'username': username, 'password': password})
123 127 headers = {"Content-type": "application/x-www-form-urlencoded",
124 128 "Accept": "text/plain",
125 129 "User-Agent": "RhodeCode-auth-%s" % rhodecode.__version__}
126 130 url = settings["service_url"]
127 131
128 132 log.debug("Sent Jasig CAS: \n%s",
129 133 {"url": url, "body": params, "headers": headers})
130 134 request = urllib2.Request(url, params, headers)
131 135 try:
132 136 response = urllib2.urlopen(request)
133 137 except urllib2.HTTPError as e:
134 138 log.debug("HTTPError when requesting Jasig CAS (status code: %d)" % e.code)
135 139 return None
136 140 except urllib2.URLError as e:
137 141 log.debug("URLError when requesting Jasig CAS url: %s " % url)
138 142 return None
139 143
140 144 # old attrs fetched from RhodeCode database
141 145 admin = getattr(userobj, 'admin', False)
142 146 active = getattr(userobj, 'active', True)
143 147 email = getattr(userobj, 'email', '')
144 148 username = getattr(userobj, 'username', username)
145 149 firstname = getattr(userobj, 'firstname', '')
146 150 lastname = getattr(userobj, 'lastname', '')
147 151 extern_type = getattr(userobj, 'extern_type', '')
148 152
149 153 user_attrs = {
150 154 'username': username,
151 155 'firstname': safe_unicode(firstname or username),
152 156 'lastname': safe_unicode(lastname or ''),
153 157 'groups': [],
154 158 'email': email or '',
155 159 'admin': admin or False,
156 160 'active': active,
157 161 'active_from_extern': True,
158 162 'extern_name': username,
159 163 'extern_type': extern_type,
160 164 }
161 165
162 166 log.info('user %s authenticated correctly' % user_attrs['username'])
163 167 return user_attrs
@@ -1,447 +1,461 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 RhodeCode authentication plugin for LDAP
23 23 """
24 24
25 25
26 26 import colander
27 27 import logging
28 28 import traceback
29 29
30 30 from pylons.i18n.translation import lazy_ugettext as _
31 31 from sqlalchemy.ext.hybrid import hybrid_property
32 32
33 33 from rhodecode.authentication.base import RhodeCodeExternalAuthPlugin
34 34 from rhodecode.authentication.schema import AuthnPluginSettingsSchemaBase
35 35 from rhodecode.authentication.routes import AuthnPluginResourceBase
36 from rhodecode.lib.colander_utils import strip_whitespace
36 37 from rhodecode.lib.exceptions import (
37 38 LdapConnectionError, LdapUsernameError, LdapPasswordError, LdapImportError
38 39 )
39 40 from rhodecode.lib.utils2 import safe_unicode, safe_str
40 41 from rhodecode.model.db import User
41 42 from rhodecode.model.validators import Missing
42 43
43 44 log = logging.getLogger(__name__)
44 45
45 46 try:
46 47 import ldap
47 48 except ImportError:
48 # means that python-ldap is not installed
49 ldap = Missing()
49 # means that python-ldap is not installed, we use Missing object to mark
50 # ldap lib is Missing
51 ldap = Missing
50 52
51 53
52 54 def plugin_factory(plugin_id, *args, **kwds):
53 55 """
54 56 Factory function that is called during plugin discovery.
55 57 It returns the plugin instance.
56 58 """
57 59 plugin = RhodeCodeAuthPlugin(plugin_id)
58 60 return plugin
59 61
60 62
61 63 class LdapAuthnResource(AuthnPluginResourceBase):
62 64 pass
63 65
64 66
65 67 class LdapSettingsSchema(AuthnPluginSettingsSchemaBase):
66 68 tls_kind_choices = ['PLAIN', 'LDAPS', 'START_TLS']
67 69 tls_reqcert_choices = ['NEVER', 'ALLOW', 'TRY', 'DEMAND', 'HARD']
68 70 search_scope_choices = ['BASE', 'ONELEVEL', 'SUBTREE']
69 71
70 72 host = colander.SchemaNode(
71 73 colander.String(),
72 74 default='',
73 75 description=_('Host of the LDAP Server'),
76 preparer=strip_whitespace,
74 77 title=_('LDAP Host'),
75 78 widget='string')
76 79 port = colander.SchemaNode(
77 80 colander.Int(),
78 81 default=389,
79 82 description=_('Port that the LDAP server is listening on'),
83 preparer=strip_whitespace,
80 84 title=_('Port'),
81 85 validator=colander.Range(min=0, max=65536),
82 86 widget='int')
83 87 dn_user = colander.SchemaNode(
84 88 colander.String(),
85 89 default='',
86 90 description=_('User to connect to LDAP'),
87 91 missing='',
92 preparer=strip_whitespace,
88 93 title=_('Account'),
89 94 widget='string')
90 95 dn_pass = colander.SchemaNode(
91 96 colander.String(),
92 97 default='',
93 98 description=_('Password to connect to LDAP'),
94 99 missing='',
100 preparer=strip_whitespace,
95 101 title=_('Password'),
96 102 widget='password')
97 103 tls_kind = colander.SchemaNode(
98 104 colander.String(),
99 105 default=tls_kind_choices[0],
100 106 description=_('TLS Type'),
101 107 title=_('Connection Security'),
102 108 validator=colander.OneOf(tls_kind_choices),
103 109 widget='select')
104 110 tls_reqcert = colander.SchemaNode(
105 111 colander.String(),
106 112 default=tls_reqcert_choices[0],
107 113 description=_('Require Cert over TLS?'),
108 114 title=_('Certificate Checks'),
109 115 validator=colander.OneOf(tls_reqcert_choices),
110 116 widget='select')
111 117 base_dn = colander.SchemaNode(
112 118 colander.String(),
113 119 default='',
114 120 description=_('Base DN to search (e.g., dc=mydomain,dc=com)'),
115 121 missing='',
122 preparer=strip_whitespace,
116 123 title=_('Base DN'),
117 124 widget='string')
118 125 filter = colander.SchemaNode(
119 126 colander.String(),
120 127 default='',
121 128 description=_('Filter to narrow results (e.g., ou=Users, etc)'),
122 129 missing='',
130 preparer=strip_whitespace,
123 131 title=_('LDAP Search Filter'),
124 132 widget='string')
125 133 search_scope = colander.SchemaNode(
126 134 colander.String(),
127 135 default=search_scope_choices[0],
128 136 description=_('How deep to search LDAP'),
129 137 title=_('LDAP Search Scope'),
130 138 validator=colander.OneOf(search_scope_choices),
131 139 widget='select')
132 140 attr_login = colander.SchemaNode(
133 141 colander.String(),
134 142 default='',
135 143 description=_('LDAP Attribute to map to user name'),
144 missing_msg=_('The LDAP Login attribute of the CN must be specified'),
145 preparer=strip_whitespace,
136 146 title=_('Login Attribute'),
137 missing_msg=_('The LDAP Login attribute of the CN must be specified'),
138 147 widget='string')
139 148 attr_firstname = colander.SchemaNode(
140 149 colander.String(),
141 150 default='',
142 151 description=_('LDAP Attribute to map to first name'),
143 152 missing='',
153 preparer=strip_whitespace,
144 154 title=_('First Name Attribute'),
145 155 widget='string')
146 156 attr_lastname = colander.SchemaNode(
147 157 colander.String(),
148 158 default='',
149 159 description=_('LDAP Attribute to map to last name'),
150 160 missing='',
161 preparer=strip_whitespace,
151 162 title=_('Last Name Attribute'),
152 163 widget='string')
153 164 attr_email = colander.SchemaNode(
154 165 colander.String(),
155 166 default='',
156 167 description=_('LDAP Attribute to map to email address'),
157 168 missing='',
169 preparer=strip_whitespace,
158 170 title=_('Email Attribute'),
159 171 widget='string')
160 172
161 173
162 174 class AuthLdap(object):
163 175
164 176 def _build_servers(self):
165 177 return ', '.join(
166 178 ["{}://{}:{}".format(
167 179 self.ldap_server_type, host.strip(), self.LDAP_SERVER_PORT)
168 180 for host in self.SERVER_ADDRESSES])
169 181
170 182 def __init__(self, server, base_dn, port=389, bind_dn='', bind_pass='',
171 183 tls_kind='PLAIN', tls_reqcert='DEMAND', ldap_version=3,
172 184 search_scope='SUBTREE', attr_login='uid',
173 185 ldap_filter='(&(objectClass=user)(!(objectClass=computer)))'):
174 if isinstance(ldap, Missing):
186 if ldap == Missing:
175 187 raise LdapImportError("Missing or incompatible ldap library")
176 188
177 189 self.ldap_version = ldap_version
178 190 self.ldap_server_type = 'ldap'
179 191
180 192 self.TLS_KIND = tls_kind
181 193
182 194 if self.TLS_KIND == 'LDAPS':
183 195 port = port or 689
184 196 self.ldap_server_type += 's'
185 197
186 198 OPT_X_TLS_DEMAND = 2
187 199 self.TLS_REQCERT = getattr(ldap, 'OPT_X_TLS_%s' % tls_reqcert,
188 200 OPT_X_TLS_DEMAND)
189 201 # split server into list
190 202 self.SERVER_ADDRESSES = server.split(',')
191 203 self.LDAP_SERVER_PORT = port
192 204
193 205 # USE FOR READ ONLY BIND TO LDAP SERVER
194 206 self.attr_login = attr_login
195 207
196 208 self.LDAP_BIND_DN = safe_str(bind_dn)
197 209 self.LDAP_BIND_PASS = safe_str(bind_pass)
198 210 self.LDAP_SERVER = self._build_servers()
199 211 self.SEARCH_SCOPE = getattr(ldap, 'SCOPE_%s' % search_scope)
200 212 self.BASE_DN = safe_str(base_dn)
201 213 self.LDAP_FILTER = safe_str(ldap_filter)
202 214
203 215 def _get_ldap_server(self):
204 216 if hasattr(ldap, 'OPT_X_TLS_CACERTDIR'):
205 217 ldap.set_option(ldap.OPT_X_TLS_CACERTDIR,
206 218 '/etc/openldap/cacerts')
207 219 ldap.set_option(ldap.OPT_REFERRALS, ldap.OPT_OFF)
208 220 ldap.set_option(ldap.OPT_RESTART, ldap.OPT_ON)
209 221 ldap.set_option(ldap.OPT_TIMEOUT, 20)
210 222 ldap.set_option(ldap.OPT_NETWORK_TIMEOUT, 10)
211 223 ldap.set_option(ldap.OPT_TIMELIMIT, 15)
212 224 if self.TLS_KIND != 'PLAIN':
213 225 ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, self.TLS_REQCERT)
214 226 server = ldap.initialize(self.LDAP_SERVER)
215 227 if self.ldap_version == 2:
216 228 server.protocol = ldap.VERSION2
217 229 else:
218 230 server.protocol = ldap.VERSION3
219 231
220 232 if self.TLS_KIND == 'START_TLS':
221 233 server.start_tls_s()
222 234
223 235 if self.LDAP_BIND_DN and self.LDAP_BIND_PASS:
224 236 log.debug('Trying simple_bind with password and given DN: %s',
225 237 self.LDAP_BIND_DN)
226 238 server.simple_bind_s(self.LDAP_BIND_DN, self.LDAP_BIND_PASS)
227 239
228 240 return server
229 241
230 242 def get_uid(self, username):
231 243 from rhodecode.lib.helpers import chop_at
232 244 uid = username
233 245 for server_addr in self.SERVER_ADDRESSES:
234 246 uid = chop_at(username, "@%s" % server_addr)
235 247 return uid
236 248
237 249 def fetch_attrs_from_simple_bind(self, server, dn, username, password):
238 250 try:
239 251 log.debug('Trying simple bind with %s', dn)
240 252 server.simple_bind_s(dn, safe_str(password))
241 253 user = server.search_ext_s(
242 254 dn, ldap.SCOPE_BASE, '(objectClass=*)', )[0]
243 255 _, attrs = user
244 256 return attrs
245 257
246 258 except ldap.INVALID_CREDENTIALS:
247 259 log.debug(
248 260 "LDAP rejected password for user '%s': %s, org_exc:",
249 261 username, dn, exc_info=True)
250 262
251 263 def authenticate_ldap(self, username, password):
252 264 """
253 265 Authenticate a user via LDAP and return his/her LDAP properties.
254 266
255 267 Raises AuthenticationError if the credentials are rejected, or
256 268 EnvironmentError if the LDAP server can't be reached.
257 269
258 270 :param username: username
259 271 :param password: password
260 272 """
261 273
262 274 uid = self.get_uid(username)
263 275
264 276 if not password:
265 277 msg = "Authenticating user %s with blank password not allowed"
266 278 log.warning(msg, username)
267 279 raise LdapPasswordError(msg)
268 280 if "," in username:
269 281 raise LdapUsernameError("invalid character in username: ,")
270 282 try:
271 283 server = self._get_ldap_server()
272 284 filter_ = '(&%s(%s=%s))' % (
273 285 self.LDAP_FILTER, self.attr_login, username)
274 286 log.debug("Authenticating %r filter %s at %s", self.BASE_DN,
275 287 filter_, self.LDAP_SERVER)
276 288 lobjects = server.search_ext_s(
277 289 self.BASE_DN, self.SEARCH_SCOPE, filter_)
278 290
279 291 if not lobjects:
280 292 raise ldap.NO_SUCH_OBJECT()
281 293
282 294 for (dn, _attrs) in lobjects:
283 295 if dn is None:
284 296 continue
285 297
286 298 user_attrs = self.fetch_attrs_from_simple_bind(
287 299 server, dn, username, password)
288 300 if user_attrs:
289 301 break
290 302
291 303 else:
292 304 log.debug("No matching LDAP objects for authentication "
293 305 "of '%s' (%s)", uid, username)
294 306 raise LdapPasswordError('Failed to authenticate user '
295 307 'with given password')
296 308
297 309 except ldap.NO_SUCH_OBJECT:
298 310 log.debug("LDAP says no such user '%s' (%s), org_exc:",
299 311 uid, username, exc_info=True)
300 312 raise LdapUsernameError()
301 313 except ldap.SERVER_DOWN:
302 314 org_exc = traceback.format_exc()
303 315 raise LdapConnectionError(
304 316 "LDAP can't access authentication "
305 317 "server, org_exc:%s" % org_exc)
306 318
307 319 return dn, user_attrs
308 320
309 321
310 322 class RhodeCodeAuthPlugin(RhodeCodeExternalAuthPlugin):
311 323 # used to define dynamic binding in the
312 324 DYNAMIC_BIND_VAR = '$login'
313 325
314 326 def includeme(self, config):
315 327 config.add_authn_plugin(self)
316 328 config.add_authn_resource(self.get_id(), LdapAuthnResource(self))
317 329 config.add_view(
318 330 'rhodecode.authentication.views.AuthnPluginViewBase',
319 331 attr='settings_get',
332 renderer='rhodecode:templates/admin/auth/plugin_settings.html',
320 333 request_method='GET',
321 334 route_name='auth_home',
322 335 context=LdapAuthnResource)
323 336 config.add_view(
324 337 'rhodecode.authentication.views.AuthnPluginViewBase',
325 338 attr='settings_post',
339 renderer='rhodecode:templates/admin/auth/plugin_settings.html',
326 340 request_method='POST',
327 341 route_name='auth_home',
328 342 context=LdapAuthnResource)
329 343
330 344 def get_settings_schema(self):
331 345 return LdapSettingsSchema()
332 346
333 347 def get_display_name(self):
334 348 return _('LDAP')
335 349
336 350 @hybrid_property
337 351 def name(self):
338 352 return "ldap"
339 353
340 354 def use_fake_password(self):
341 355 return True
342 356
343 357 def user_activation_state(self):
344 358 def_user_perms = User.get_default_user().AuthUser.permissions['global']
345 359 return 'hg.extern_activate.auto' in def_user_perms
346 360
347 361 def try_dynamic_binding(self, username, password, current_args):
348 362 """
349 363 Detects marker inside our original bind, and uses dynamic auth if
350 364 present
351 365 """
352 366
353 367 org_bind = current_args['bind_dn']
354 368 passwd = current_args['bind_pass']
355 369
356 370 def has_bind_marker(username):
357 371 if self.DYNAMIC_BIND_VAR in username:
358 372 return True
359 373
360 374 # we only passed in user with "special" variable
361 375 if org_bind and has_bind_marker(org_bind) and not passwd:
362 376 log.debug('Using dynamic user/password binding for ldap '
363 377 'authentication. Replacing `%s` with username',
364 378 self.DYNAMIC_BIND_VAR)
365 379 current_args['bind_dn'] = org_bind.replace(
366 380 self.DYNAMIC_BIND_VAR, username)
367 381 current_args['bind_pass'] = password
368 382
369 383 return current_args
370 384
371 385 def auth(self, userobj, username, password, settings, **kwargs):
372 386 """
373 387 Given a user object (which may be null), username, a plaintext password,
374 388 and a settings object (containing all the keys needed as listed in
375 389 settings()), authenticate this user's login attempt.
376 390
377 391 Return None on failure. On success, return a dictionary of the form:
378 392
379 393 see: RhodeCodeAuthPluginBase.auth_func_attrs
380 394 This is later validated for correctness
381 395 """
382 396
383 397 if not username or not password:
384 398 log.debug('Empty username or password skipping...')
385 399 return None
386 400
387 401 ldap_args = {
388 402 'server': settings.get('host', ''),
389 403 'base_dn': settings.get('base_dn', ''),
390 404 'port': settings.get('port'),
391 405 'bind_dn': settings.get('dn_user'),
392 406 'bind_pass': settings.get('dn_pass'),
393 407 'tls_kind': settings.get('tls_kind'),
394 408 'tls_reqcert': settings.get('tls_reqcert'),
395 409 'search_scope': settings.get('search_scope'),
396 410 'attr_login': settings.get('attr_login'),
397 411 'ldap_version': 3,
398 412 'ldap_filter': settings.get('filter'),
399 413 }
400 414
401 415 ldap_attrs = self.try_dynamic_binding(username, password, ldap_args)
402 416
403 417 log.debug('Checking for ldap authentication.')
404 418
405 419 try:
406 420 aldap = AuthLdap(**ldap_args)
407 421 (user_dn, ldap_attrs) = aldap.authenticate_ldap(username, password)
408 422 log.debug('Got ldap DN response %s', user_dn)
409 423
410 424 def get_ldap_attr(k):
411 425 return ldap_attrs.get(settings.get(k), [''])[0]
412 426
413 427 # old attrs fetched from RhodeCode database
414 428 admin = getattr(userobj, 'admin', False)
415 429 active = getattr(userobj, 'active', True)
416 430 email = getattr(userobj, 'email', '')
417 431 username = getattr(userobj, 'username', username)
418 432 firstname = getattr(userobj, 'firstname', '')
419 433 lastname = getattr(userobj, 'lastname', '')
420 434 extern_type = getattr(userobj, 'extern_type', '')
421 435
422 436 groups = []
423 437 user_attrs = {
424 438 'username': username,
425 439 'firstname': safe_unicode(
426 440 get_ldap_attr('attr_firstname') or firstname),
427 441 'lastname': safe_unicode(
428 442 get_ldap_attr('attr_lastname') or lastname),
429 443 'groups': groups,
430 444 'email': get_ldap_attr('attr_email' or email),
431 445 'admin': admin,
432 446 'active': active,
433 447 "active_from_extern": None,
434 448 'extern_name': user_dn,
435 449 'extern_type': extern_type,
436 450 }
437 451 log.debug('ldap user: %s', user_attrs)
438 452 log.info('user %s authenticated correctly', user_attrs['username'])
439 453
440 454 return user_attrs
441 455
442 456 except (LdapUsernameError, LdapPasswordError, LdapImportError):
443 457 log.exception("LDAP related exception")
444 458 return None
445 459 except (Exception,):
446 460 log.exception("Other exception")
447 461 return None
@@ -1,155 +1,160 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20 """
21 21 RhodeCode authentication library for PAM
22 22 """
23 23
24 24 import colander
25 25 import grp
26 26 import logging
27 27 import pam
28 28 import pwd
29 29 import re
30 30 import socket
31 31
32 32 from pylons.i18n.translation import lazy_ugettext as _
33 33 from sqlalchemy.ext.hybrid import hybrid_property
34 34
35 35 from rhodecode.authentication.base import RhodeCodeExternalAuthPlugin
36 36 from rhodecode.authentication.schema import AuthnPluginSettingsSchemaBase
37 37 from rhodecode.authentication.routes import AuthnPluginResourceBase
38 from rhodecode.lib.colander_utils import strip_whitespace
38 39
39 40 log = logging.getLogger(__name__)
40 41
41 42
42 43 def plugin_factory(plugin_id, *args, **kwds):
43 44 """
44 45 Factory function that is called during plugin discovery.
45 46 It returns the plugin instance.
46 47 """
47 48 plugin = RhodeCodeAuthPlugin(plugin_id)
48 49 return plugin
49 50
50 51
51 52 class PamAuthnResource(AuthnPluginResourceBase):
52 53 pass
53 54
54 55
55 56 class PamSettingsSchema(AuthnPluginSettingsSchemaBase):
56 57 service = colander.SchemaNode(
57 58 colander.String(),
58 59 default='login',
59 60 description=_('PAM service name to use for authentication.'),
61 preparer=strip_whitespace,
60 62 title=_('PAM service name'),
61 63 widget='string')
62 64 gecos = colander.SchemaNode(
63 65 colander.String(),
64 66 default='(?P<last_name>.+),\s*(?P<first_name>\w+)',
65 67 description=_('Regular expression for extracting user name/email etc. '
66 68 'from Unix userinfo.'),
69 preparer=strip_whitespace,
67 70 title=_('Gecos Regex'),
68 71 widget='string')
69 72
70 73
71 74 class RhodeCodeAuthPlugin(RhodeCodeExternalAuthPlugin):
72 75 # PAM authentication can be slow. Repository operations involve a lot of
73 76 # auth calls. Little caching helps speedup push/pull operations significantly
74 77 AUTH_CACHE_TTL = 4
75 78
76 79 def includeme(self, config):
77 80 config.add_authn_plugin(self)
78 81 config.add_authn_resource(self.get_id(), PamAuthnResource(self))
79 82 config.add_view(
80 83 'rhodecode.authentication.views.AuthnPluginViewBase',
81 84 attr='settings_get',
85 renderer='rhodecode:templates/admin/auth/plugin_settings.html',
82 86 request_method='GET',
83 87 route_name='auth_home',
84 88 context=PamAuthnResource)
85 89 config.add_view(
86 90 'rhodecode.authentication.views.AuthnPluginViewBase',
87 91 attr='settings_post',
92 renderer='rhodecode:templates/admin/auth/plugin_settings.html',
88 93 request_method='POST',
89 94 route_name='auth_home',
90 95 context=PamAuthnResource)
91 96
92 97 def get_display_name(self):
93 98 return _('PAM')
94 99
95 100 @hybrid_property
96 101 def name(self):
97 102 return "pam"
98 103
99 104 def get_settings_schema(self):
100 105 return PamSettingsSchema()
101 106
102 107 def use_fake_password(self):
103 108 return True
104 109
105 110 def auth(self, userobj, username, password, settings, **kwargs):
106 111 if not username or not password:
107 112 log.debug('Empty username or password skipping...')
108 113 return None
109 114
110 115 auth_result = pam.authenticate(username, password, settings["service"])
111 116
112 117 if not auth_result:
113 118 log.error("PAM was unable to authenticate user: %s" % (username, ))
114 119 return None
115 120
116 121 log.debug('Got PAM response %s' % (auth_result, ))
117 122
118 123 # old attrs fetched from RhodeCode database
119 124 default_email = "%s@%s" % (username, socket.gethostname())
120 125 admin = getattr(userobj, 'admin', False)
121 126 active = getattr(userobj, 'active', True)
122 127 email = getattr(userobj, 'email', '') or default_email
123 128 username = getattr(userobj, 'username', username)
124 129 firstname = getattr(userobj, 'firstname', '')
125 130 lastname = getattr(userobj, 'lastname', '')
126 131 extern_type = getattr(userobj, 'extern_type', '')
127 132
128 133 user_attrs = {
129 134 'username': username,
130 135 'firstname': firstname,
131 136 'lastname': lastname,
132 137 'groups': [g.gr_name for g in grp.getgrall()
133 138 if username in g.gr_mem],
134 139 'email': email,
135 140 'admin': admin,
136 141 'active': active,
137 142 'active_from_extern': None,
138 143 'extern_name': username,
139 144 'extern_type': extern_type,
140 145 }
141 146
142 147 try:
143 148 user_data = pwd.getpwnam(username)
144 149 regex = settings["gecos"]
145 150 match = re.search(regex, user_data.pw_gecos)
146 151 if match:
147 152 user_attrs["firstname"] = match.group('first_name')
148 153 user_attrs["lastname"] = match.group('last_name')
149 154 except Exception:
150 155 log.warning("Cannot extract additional info for PAM user")
151 156 pass
152 157
153 158 log.debug("pamuser: %s", user_attrs)
154 159 log.info('user %s authenticated correctly' % user_attrs['username'])
155 160 return user_attrs
@@ -1,139 +1,141 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 RhodeCode authentication plugin for built in internal auth
23 23 """
24 24
25 25 import logging
26 26
27 27 from pylons.i18n.translation import lazy_ugettext as _
28 28 from sqlalchemy.ext.hybrid import hybrid_property
29 29
30 30 from rhodecode.authentication.base import RhodeCodeAuthPluginBase
31 31 from rhodecode.authentication.routes import AuthnPluginResourceBase
32 32 from rhodecode.lib.utils2 import safe_str
33 33 from rhodecode.model.db import User
34 34
35 35 log = logging.getLogger(__name__)
36 36
37 37
38 38 def plugin_factory(plugin_id, *args, **kwds):
39 39 plugin = RhodeCodeAuthPlugin(plugin_id)
40 40 return plugin
41 41
42 42
43 43 class RhodecodeAuthnResource(AuthnPluginResourceBase):
44 44 pass
45 45
46 46
47 47 class RhodeCodeAuthPlugin(RhodeCodeAuthPluginBase):
48 48
49 49 def includeme(self, config):
50 50 config.add_authn_plugin(self)
51 51 config.add_authn_resource(self.get_id(), RhodecodeAuthnResource(self))
52 52 config.add_view(
53 53 'rhodecode.authentication.views.AuthnPluginViewBase',
54 54 attr='settings_get',
55 renderer='rhodecode:templates/admin/auth/plugin_settings.html',
55 56 request_method='GET',
56 57 route_name='auth_home',
57 58 context=RhodecodeAuthnResource)
58 59 config.add_view(
59 60 'rhodecode.authentication.views.AuthnPluginViewBase',
60 61 attr='settings_post',
62 renderer='rhodecode:templates/admin/auth/plugin_settings.html',
61 63 request_method='POST',
62 64 route_name='auth_home',
63 65 context=RhodecodeAuthnResource)
64 66
65 67 def get_display_name(self):
66 68 return _('Rhodecode')
67 69
68 70 @hybrid_property
69 71 def name(self):
70 72 return "rhodecode"
71 73
72 74 def user_activation_state(self):
73 75 def_user_perms = User.get_default_user().AuthUser.permissions['global']
74 76 return 'hg.register.auto_activate' in def_user_perms
75 77
76 78 def allows_authentication_from(
77 79 self, user, allows_non_existing_user=True,
78 80 allowed_auth_plugins=None, allowed_auth_sources=None):
79 81 """
80 82 Custom method for this auth that doesn't accept non existing users.
81 83 We know that user exists in our database.
82 84 """
83 85 allows_non_existing_user = False
84 86 return super(RhodeCodeAuthPlugin, self).allows_authentication_from(
85 87 user, allows_non_existing_user=allows_non_existing_user)
86 88
87 89 def auth(self, userobj, username, password, settings, **kwargs):
88 90 if not userobj:
89 91 log.debug('userobj was:%s skipping' % (userobj, ))
90 92 return None
91 93 if userobj.extern_type != self.name:
92 94 log.warning(
93 95 "userobj:%s extern_type mismatch got:`%s` expected:`%s`" %
94 96 (userobj, userobj.extern_type, self.name))
95 97 return None
96 98
97 99 user_attrs = {
98 100 "username": userobj.username,
99 101 "firstname": userobj.firstname,
100 102 "lastname": userobj.lastname,
101 103 "groups": [],
102 104 "email": userobj.email,
103 105 "admin": userobj.admin,
104 106 "active": userobj.active,
105 107 "active_from_extern": userobj.active,
106 108 "extern_name": userobj.user_id,
107 109 "extern_type": userobj.extern_type,
108 110 }
109 111
110 112 log.debug("User attributes:%s" % (user_attrs, ))
111 113 if userobj.active:
112 114 from rhodecode.lib import auth
113 115 crypto_backend = auth.crypto_backend()
114 116 password_encoded = safe_str(password)
115 117 password_match, new_hash = crypto_backend.hash_check_with_upgrade(
116 118 password_encoded, userobj.password)
117 119
118 120 if password_match and new_hash:
119 121 log.debug('user %s properly authenticated, but '
120 122 'requires hash change to bcrypt', userobj)
121 123 # if password match, and we use OLD deprecated hash,
122 124 # we should migrate this user hash password to the new hash
123 125 # we store the new returned by hash_check_with_upgrade function
124 126 user_attrs['_hash_migrate'] = new_hash
125 127
126 128 if userobj.username == User.DEFAULT_USER and userobj.active:
127 129 log.info(
128 130 'user %s authenticated correctly as anonymous user', userobj)
129 131 return user_attrs
130 132
131 133 elif userobj.username == username and password_match:
132 134 log.info('user %s authenticated correctly', userobj)
133 135 return user_attrs
134 136 log.info("user %s had a bad password when "
135 137 "authenticating on this plugin", userobj)
136 138 return None
137 139 else:
138 140 log.warning('user %s tried auth but is disabled', userobj)
139 141 return None
@@ -1,53 +1,87 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22
23 23 from pyramid.exceptions import ConfigurationError
24 24 from zope.interface import implementer
25 25
26 26 from rhodecode.authentication.interface import IAuthnPluginRegistry
27 27 from rhodecode.lib.utils2 import safe_str
28 from rhodecode.model.settings import SettingsModel
28 29
29 30 log = logging.getLogger(__name__)
30 31
31 32
32 33 @implementer(IAuthnPluginRegistry)
33 34 class AuthenticationPluginRegistry(object):
34 def __init__(self):
35
36 # INI settings key to set a fallback authentication plugin.
37 fallback_plugin_key = 'rhodecode.auth_plugin_fallback'
38
39 def __init__(self, settings):
35 40 self._plugins = {}
41 self._fallback_plugin = settings.get(self.fallback_plugin_key, None)
36 42
37 43 def add_authn_plugin(self, config, plugin):
38 44 plugin_id = plugin.get_id()
39 45 if plugin_id in self._plugins.keys():
40 46 raise ConfigurationError(
41 47 'Cannot register authentication plugin twice: "%s"', plugin_id)
42 48 else:
43 49 log.debug('Register authentication plugin: "%s"', plugin_id)
44 50 self._plugins[plugin_id] = plugin
45 51
46 52 def get_plugins(self):
47 53 def sort_key(plugin):
48 54 return str.lower(safe_str(plugin.get_display_name()))
49 55
50 56 return sorted(self._plugins.values(), key=sort_key)
51 57
52 58 def get_plugin(self, plugin_id):
53 59 return self._plugins.get(plugin_id, None)
60
61 def get_plugins_for_authentication(self):
62 """
63 Returns a list of plugins which should be consulted when authenticating
64 a user. It only returns plugins which are enabled and active.
65 Additionally it includes the fallback plugin from the INI file, if
66 `rhodecode.auth_plugin_fallback` is set to a plugin ID.
67 """
68 plugins = []
69
70 # Add all enabled and active plugins to the list. We iterate over the
71 # auth_plugins setting from DB beacuse it also represents the ordering.
72 enabled_plugins = SettingsModel().get_auth_plugins()
73 for plugin_id in enabled_plugins:
74 plugin = self.get_plugin(plugin_id)
75 if plugin is not None and plugin.is_active():
76 plugins.append(plugin)
77
78 # Add the fallback plugin from ini file.
79 if self._fallback_plugin:
80 log.warn(
81 'Using fallback authentication plugin from INI file: "%s"',
82 self._fallback_plugin)
83 plugin = self.get_plugin(self._fallback_plugin)
84 if plugin is not None and plugin not in plugins:
85 plugins.append(plugin)
86
87 return plugins
@@ -1,151 +1,150 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22
23 23 from pyramid.exceptions import ConfigurationError
24 from pyramid.i18n import TranslationStringFactory
25 24
26 25 from rhodecode.lib.utils2 import safe_str
27 26 from rhodecode.model.settings import SettingsModel
27 from rhodecode.translation import _
28 28
29 _ = TranslationStringFactory('rhodecode-enterprise')
30 29
31 30 log = logging.getLogger(__name__)
32 31
33 32
34 33 class AuthnResourceBase(object):
35 34 __name__ = None
36 35 __parent__ = None
37 36
38 37 def get_root(self):
39 38 current = self
40 39 while current.__parent__ is not None:
41 40 current = current.__parent__
42 41 return current
43 42
44 43
45 44 class AuthnPluginResourceBase(AuthnResourceBase):
46 45
47 46 def __init__(self, plugin):
48 47 self.plugin = plugin
49 48 self.__name__ = plugin.name
50 49 self.display_name = plugin.get_display_name()
51 50
52 51
53 52 class AuthnRootResource(AuthnResourceBase):
54 53 """
55 54 This is the root traversal resource object for the authentication settings.
56 55 """
57 56
58 57 def __init__(self):
59 58 self._store = {}
60 59 self._resource_name_map = {}
61 60 self.display_name = _('Global')
62 61
63 62 def __getitem__(self, key):
64 63 """
65 64 Customized get item function to return only items (plugins) that are
66 65 activated.
67 66 """
68 67 if self._is_item_active(key):
69 68 return self._store[key]
70 69 else:
71 70 raise KeyError('Authentication plugin "{}" is not active.'.format(
72 71 key))
73 72
74 73 def __iter__(self):
75 74 for key in self._store.keys():
76 75 if self._is_item_active(key):
77 76 yield self._store[key]
78 77
79 78 def _is_item_active(self, key):
80 79 activated_plugins = SettingsModel().get_auth_plugins()
81 80 plugin_id = self.get_plugin_id(key)
82 81 return plugin_id in activated_plugins
83 82
84 83 def get_plugin_id(self, resource_name):
85 84 """
86 85 Return the plugin id for the given traversal resource name.
87 86 """
88 87 # TODO: Store this info in the resource element.
89 88 return self._resource_name_map[resource_name]
90 89
91 90 def get_sorted_list(self):
92 91 """
93 92 Returns a sorted list of sub resources for displaying purposes.
94 93 """
95 94 def sort_key(resource):
96 95 return str.lower(safe_str(resource.display_name))
97 96
98 97 active = [item for item in self]
99 98 return sorted(active, key=sort_key)
100 99
101 100 def get_nav_list(self):
102 101 """
103 102 Returns a sorted list of resources for displaying the navigation.
104 103 """
105 104 list = self.get_sorted_list()
106 105 list.insert(0, self)
107 106 return list
108 107
109 108 def add_authn_resource(self, config, plugin_id, resource):
110 109 """
111 110 Register a traversal resource as a sub element to the authentication
112 111 settings. This method is registered as a directive on the pyramid
113 112 configurator object and called by plugins.
114 113 """
115 114
116 115 def _ensure_unique_name(name, limit=100):
117 116 counter = 1
118 117 current = name
119 118 while current in self._store.keys():
120 119 current = '{}{}'.format(name, counter)
121 120 counter += 1
122 121 if counter > limit:
123 122 raise ConfigurationError(
124 123 'Cannot build unique name for traversal resource "%s" '
125 124 'registered by plugin "%s"', name, plugin_id)
126 125 return current
127 126
128 127 # Allow plugin resources with identical names by rename duplicates.
129 128 unique_name = _ensure_unique_name(resource.__name__)
130 129 if unique_name != resource.__name__:
131 log.warn('Name collision for traversal resource "%s" registered',
130 log.warn('Name collision for traversal resource "%s" registered '
132 131 'by authentication plugin "%s"', resource.__name__,
133 132 plugin_id)
134 133 resource.__name__ = unique_name
135 134
136 135 log.debug('Register traversal resource "%s" for plugin "%s"',
137 136 unique_name, plugin_id)
138 137 self._resource_name_map[unique_name] = plugin_id
139 138 resource.__parent__ = self
140 139 self._store[unique_name] = resource
141 140
142 141
143 142 root = AuthnRootResource()
144 143
145 144
146 145 def root_factory(request=None):
147 146 """
148 147 Returns the root traversal resource instance used for the authentication
149 148 settings route.
150 149 """
151 150 return root
@@ -1,53 +1,51 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import colander
22 22
23 from pyramid.i18n import TranslationStringFactory
24
25 _ = TranslationStringFactory('rhodecode-enterprise')
23 from rhodecode.translation import _
26 24
27 25
28 26 class AuthnPluginSettingsSchemaBase(colander.MappingSchema):
29 27 """
30 28 This base schema is intended for use in authentication plugins.
31 29 It adds a few default settings (e.g., "enabled"), so that plugin
32 30 authors don't have to maintain a bunch of boilerplate.
33 31 """
34 32 enabled = colander.SchemaNode(
35 33 colander.Bool(),
36 34 default=False,
37 35 description=_('Enable or disable this authentication plugin.'),
38 36 missing=False,
39 37 title=_('Enabled'),
40 38 widget='bool',
41 39 )
42 40 cache_ttl = colander.SchemaNode(
43 41 colander.Int(),
44 42 default=0,
45 43 description=_('Amount of seconds to cache the authentication '
46 44 'call for this plugin. Useful for long calls like '
47 45 'LDAP to improve the responsiveness of the '
48 46 'authentication system (0 means disabled).'),
49 47 missing=0,
50 48 title=_('Auth Cache TTL'),
51 49 validator=colander.Range(min=0, max=None),
52 50 widget='int',
53 51 )
@@ -1,220 +1,182 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import colander
22 22 import formencode.htmlfill
23 23 import logging
24 24
25 25 from pyramid.httpexceptions import HTTPFound
26 from pyramid.i18n import TranslationStringFactory
27 26 from pyramid.renderers import render
28 27 from pyramid.response import Response
29 28
30 29 from rhodecode.authentication.base import get_auth_cache_manager
31 30 from rhodecode.authentication.interface import IAuthnPluginRegistry
32 31 from rhodecode.lib import auth
33 32 from rhodecode.lib.auth import LoginRequired, HasPermissionAllDecorator
34 33 from rhodecode.model.forms import AuthSettingsForm
35 34 from rhodecode.model.meta import Session
36 35 from rhodecode.model.settings import SettingsModel
36 from rhodecode.translation import _
37 37
38 38 log = logging.getLogger(__name__)
39 39
40 _ = TranslationStringFactory('rhodecode-enterprise')
41
42 40
43 41 class AuthnPluginViewBase(object):
44 42
45 43 def __init__(self, context, request):
46 44 self.request = request
47 45 self.context = context
48 46 self.plugin = context.plugin
49 47
50 # TODO: Think about replacing the htmlfill stuff.
51 def _render_and_fill(self, template, template_context, request,
52 form_defaults, validation_errors):
53 """
54 Helper to render a template and fill the HTML form fields with
55 defaults. Also displays the form errors.
56 """
57 # Render template to string.
58 html = render(template, template_context, request=request)
59
60 # Fill the HTML form fields with default values and add error messages.
61 html = formencode.htmlfill.render(
62 html,
63 defaults=form_defaults,
64 errors=validation_errors,
65 prefix_error=False,
66 encoding="UTF-8",
67 force_defaults=False)
68
69 return html
70
71 def settings_get(self):
48 def settings_get(self, defaults=None, errors=None):
72 49 """
73 50 View that displays the plugin settings as a form.
74 51 """
75 form_defaults = {}
76 validation_errors = None
52 defaults = defaults or {}
53 errors = errors or {}
77 54 schema = self.plugin.get_settings_schema()
78 55
79 56 # Get default values for the form.
80 for node in schema.children:
81 value = self.plugin.get_setting_by_name(node.name) or node.default
82 form_defaults[node.name] = value
57 for node in schema:
58 db_value = self.plugin.get_setting_by_name(node.name)
59 defaults.setdefault(node.name, db_value)
83 60
84 61 template_context = {
62 'defaults': defaults,
63 'errors': errors,
64 'plugin': self.context.plugin,
85 65 'resource': self.context,
86 'plugin': self.context.plugin
87 66 }
88 67
89 return Response(self._render_and_fill(
90 'rhodecode:templates/admin/auth/plugin_settings.html',
91 template_context,
92 self.request,
93 form_defaults,
94 validation_errors))
68 return template_context
95 69
96 70 def settings_post(self):
97 71 """
98 72 View that validates and stores the plugin settings.
99 73 """
100 74 schema = self.plugin.get_settings_schema()
101 75 try:
102 76 valid_data = schema.deserialize(self.request.params)
103 77 except colander.Invalid, e:
104 78 # Display error message and display form again.
105 form_defaults = self.request.params
106 validation_errors = e.asdict()
107 79 self.request.session.flash(
108 80 _('Errors exist when saving plugin settings. '
109 'Please check the form inputs.'),
81 'Please check the form inputs.'),
110 82 queue='error')
111
112 template_context = {
113 'resource': self.context,
114 'plugin': self.context.plugin
115 }
116
117 return Response(self._render_and_fill(
118 'rhodecode:templates/admin/auth/plugin_settings.html',
119 template_context,
120 self.request,
121 form_defaults,
122 validation_errors))
83 defaults = schema.flatten(self.request.params)
84 return self.settings_get(errors=e.asdict(), defaults=defaults)
123 85
124 86 # Store validated data.
125 87 for name, value in valid_data.items():
126 88 self.plugin.create_or_update_setting(name, value)
127 89 Session.commit()
128 90
129 91 # Display success message and redirect.
130 92 self.request.session.flash(
131 93 _('Auth settings updated successfully.'),
132 94 queue='success')
133 95 redirect_to = self.request.resource_path(
134 96 self.context, route_name='auth_home')
135 97 return HTTPFound(redirect_to)
136 98
137 99
138 100 # TODO: Ongoing migration in these views.
139 101 # - Maybe we should also use a colander schema for these views.
140 102 class AuthSettingsView(object):
141 103 def __init__(self, context, request):
142 104 self.context = context
143 105 self.request = request
144 106
145 107 # TODO: Move this into a utility function. It is needed in all view
146 108 # classes during migration. Maybe a mixin?
147 109
148 110 # Some of the decorators rely on this attribute to be present on the
149 111 # class of the decorated method.
150 112 self._rhodecode_user = request.user
151 113
152 114 @LoginRequired()
153 115 @HasPermissionAllDecorator('hg.admin')
154 def index(self, defaults={}, errors=None, prefix_error=False):
116 def index(self, defaults=None, errors=None, prefix_error=False):
117 defaults = defaults or {}
155 118 authn_registry = self.request.registry.getUtility(IAuthnPluginRegistry)
156 default_plugins = ['egg:rhodecode-enterprise-ce#rhodecode']
157 enabled_plugins = SettingsModel().get_auth_plugins() or default_plugins
119 enabled_plugins = SettingsModel().get_auth_plugins()
158 120
159 121 # Create template context and render it.
160 122 template_context = {
161 123 'resource': self.context,
162 124 'available_plugins': authn_registry.get_plugins(),
163 125 'enabled_plugins': enabled_plugins,
164 126 }
165 127 html = render('rhodecode:templates/admin/auth/auth_settings.html',
166 128 template_context,
167 129 request=self.request)
168 130
169 131 # Create form default values and fill the form.
170 132 form_defaults = {
171 133 'auth_plugins': ','.join(enabled_plugins)
172 134 }
173 135 form_defaults.update(defaults)
174 136 html = formencode.htmlfill.render(
175 137 html,
176 138 defaults=form_defaults,
177 139 errors=errors,
178 140 prefix_error=prefix_error,
179 141 encoding="UTF-8",
180 142 force_defaults=False)
181 143
182 144 return Response(html)
183 145
184 146 @LoginRequired()
185 147 @HasPermissionAllDecorator('hg.admin')
186 148 @auth.CSRFRequired()
187 149 def auth_settings(self):
188 150 try:
189 151 form = AuthSettingsForm()()
190 152 form_result = form.to_python(self.request.params)
191 153 plugins = ','.join(form_result['auth_plugins'])
192 154 setting = SettingsModel().create_or_update_setting(
193 155 'auth_plugins', plugins)
194 156 Session().add(setting)
195 157 Session().commit()
196 158
197 159 cache_manager = get_auth_cache_manager()
198 160 cache_manager.clear()
199 161 self.request.session.flash(
200 162 _('Auth settings updated successfully.'),
201 163 queue='success')
202 164 except formencode.Invalid as errors:
203 165 e = errors.error_dict or {}
204 166 self.request.session.flash(
205 167 _('Errors exist when saving plugin setting. '
206 168 'Please check the form inputs.'),
207 169 queue='error')
208 170 return self.index(
209 171 defaults=errors.value,
210 172 errors=e,
211 173 prefix_error=False)
212 174 except Exception:
213 175 log.exception('Exception in auth_settings')
214 176 self.request.session.flash(
215 177 _('Error occurred during update of auth settings.'),
216 178 queue='error')
217 179
218 180 redirect_to = self.request.resource_path(
219 181 self.context, route_name='auth_home')
220 182 return HTTPFound(redirect_to)
@@ -1,181 +1,192 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Pylons environment configuration
23 23 """
24 24
25 25 import os
26 26 import logging
27 27 import rhodecode
28 28 import platform
29 29 import re
30 import io
30 31
31 32 from mako.lookup import TemplateLookup
32 33 from pylons.configuration import PylonsConfig
33 34 from pylons.error import handle_mako_error
35 from pyramid.settings import asbool
34 36
35 37 # don't remove this import it does magic for celery
36 38 from rhodecode.lib import celerypylons # noqa
37 39
38 40 import rhodecode.lib.app_globals as app_globals
39 41
40 42 from rhodecode.config import utils
41 43 from rhodecode.config.routing import make_map
44 from rhodecode.config.jsroutes import generate_jsroutes_content
42 45
43 46 from rhodecode.lib import helpers
44 47 from rhodecode.lib.auth import set_available_permissions
45 48 from rhodecode.lib.utils import (
46 49 repo2db_mapper, make_db_config, set_rhodecode_config,
47 50 load_rcextensions)
48 51 from rhodecode.lib.utils2 import str2bool, aslist
49 52 from rhodecode.lib.vcs import connect_vcs, start_vcs_server
50 53 from rhodecode.model.scm import ScmModel
51 54
52 55 log = logging.getLogger(__name__)
53 56
54
55 57 def load_environment(global_conf, app_conf, initial=False,
56 58 test_env=None, test_index=None):
57 59 """
58 60 Configure the Pylons environment via the ``pylons.config``
59 61 object
60 62 """
61 63 config = PylonsConfig()
62 64
63 rhodecode.is_test = str2bool(app_conf.get('is_test', 'False'))
64 65
65 66 # Pylons paths
66 67 root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
67 68 paths = {
68 69 'root': root,
69 70 'controllers': os.path.join(root, 'controllers'),
70 71 'static_files': os.path.join(root, 'public'),
71 72 'templates': [os.path.join(root, 'templates')],
72 73 }
73 74
74 75 # Initialize config with the basic options
75 76 config.init_app(global_conf, app_conf, package='rhodecode', paths=paths)
76 77
77 78 # store some globals into rhodecode
78 79 rhodecode.CELERY_ENABLED = str2bool(config['app_conf'].get('use_celery'))
79 80 rhodecode.CELERY_EAGER = str2bool(
80 81 config['app_conf'].get('celery.always.eager'))
81 82
82 83 config['routes.map'] = make_map(config)
84
85 if asbool(config['debug']):
86 jsroutes = config['routes.map'].jsroutes()
87 jsroutes_file_content = generate_jsroutes_content(jsroutes)
88 jsroutes_file_path = os.path.join(
89 paths['static_files'], 'js', 'rhodecode', 'routes.js')
90
91 with io.open(jsroutes_file_path, 'w', encoding='utf-8') as f:
92 f.write(jsroutes_file_content)
93
83 94 config['pylons.app_globals'] = app_globals.Globals(config)
84 95 config['pylons.h'] = helpers
85 96 rhodecode.CONFIG = config
86 97
87 98 load_rcextensions(root_path=config['here'])
88 99
89 100 # Setup cache object as early as possible
90 101 import pylons
91 102 pylons.cache._push_object(config['pylons.app_globals'].cache)
92 103
93 104 # Create the Mako TemplateLookup, with the default auto-escaping
94 105 config['pylons.app_globals'].mako_lookup = TemplateLookup(
95 106 directories=paths['templates'],
96 107 error_handler=handle_mako_error,
97 108 module_directory=os.path.join(app_conf['cache_dir'], 'templates'),
98 109 input_encoding='utf-8', default_filters=['escape'],
99 110 imports=['from webhelpers.html import escape'])
100 111
101 112 # sets the c attribute access when don't existing attribute are accessed
102 113 config['pylons.strict_tmpl_context'] = True
103 config_file_name = os.path.split(config['__file__'])[-1]
104 test = re.match('^test[\w_]*\.ini$', config_file_name) is not None
105 if test:
106 if test_env is None:
107 test_env = not int(os.environ.get('RC_NO_TMP_PATH', 0))
108
109 from rhodecode.lib.utils import create_test_env, create_test_index
110 from rhodecode.tests import TESTS_TMP_PATH
111 # test repos
112 if test_env:
113 create_test_env(TESTS_TMP_PATH, config)
114 create_test_index(TESTS_TMP_PATH, config, True)
115 114
116 115 # Limit backends to "vcs.backends" from configuration
117 116 backends = config['vcs.backends'] = aslist(
118 117 config.get('vcs.backends', 'hg,git'), sep=',')
119 118 for alias in rhodecode.BACKENDS.keys():
120 119 if alias not in backends:
121 120 del rhodecode.BACKENDS[alias]
122 121 log.info("Enabled backends: %s", backends)
123 122
124 123 # initialize vcs client and optionally run the server if enabled
125 124 vcs_server_uri = config.get('vcs.server', '')
126 125 vcs_server_enabled = str2bool(config.get('vcs.server.enable', 'true'))
127 126 start_server = (
128 127 str2bool(config.get('vcs.start_server', 'false')) and
129 128 not int(os.environ.get('RC_VCSSERVER_TEST_DISABLE', '0')))
130 129 if vcs_server_enabled and start_server:
131 130 log.info("Starting vcsserver")
132 131 start_vcs_server(server_and_port=vcs_server_uri,
133 132 protocol=utils.get_vcs_server_protocol(config),
134 133 log_level=config['vcs.server.log_level'])
135 134
136 # MULTIPLE DB configs
137 # Setup the SQLAlchemy database engine
138 utils.initialize_database(config)
139
140 135 set_available_permissions(config)
141 136 db_cfg = make_db_config(clear_session=True)
142 137
143 138 repos_path = list(db_cfg.items('paths'))[0][1]
144 139 config['base_path'] = repos_path
145 140
146 141 config['vcs.hooks.direct_calls'] = _use_direct_hook_calls(config)
147 142 config['vcs.hooks.protocol'] = _get_vcs_hooks_protocol(config)
148 143
149 144 # store db config also in main global CONFIG
150 145 set_rhodecode_config(config)
151 146
152 147 # configure instance id
153 148 utils.set_instance_id(config)
154 149
155 150 # CONFIGURATION OPTIONS HERE (note: all config options will override
156 151 # any Pylons config options)
157 152
158 153 # store config reference into our module to skip import magic of pylons
159 154 rhodecode.CONFIG.update(config)
160 155
161 156 utils.configure_pyro4(config)
162 157 utils.configure_vcs(config)
163 158 if vcs_server_enabled:
164 159 connect_vcs(vcs_server_uri, utils.get_vcs_server_protocol(config))
165 160
166 161 import_on_startup = str2bool(config.get('startup.import_repos', False))
167 162 if vcs_server_enabled and import_on_startup:
168 163 repo2db_mapper(ScmModel().repo_scan(repos_path), remove_obsolete=False)
169 164 return config
170 165
171 166
172 167 def _use_direct_hook_calls(config):
173 168 default_direct_hook_calls = 'false'
174 169 direct_hook_calls = str2bool(
175 170 config.get('vcs.hooks.direct_calls', default_direct_hook_calls))
176 171 return direct_hook_calls
177 172
178 173
179 174 def _get_vcs_hooks_protocol(config):
180 175 protocol = config.get('vcs.hooks.protocol', 'pyro4').lower()
181 176 return protocol
177
178
179 def load_pyramid_environment(global_config, settings):
180 # Some parts of the code expect a merge of global and app settings.
181 settings_merged = global_config.copy()
182 settings_merged.update(settings)
183
184 # If this is a test run we prepare the test environment like
185 # creating a test database, test search index and test repositories.
186 # This has to be done before the database connection is initialized.
187 if settings['is_test']:
188 rhodecode.is_test = True
189 utils.initialize_test_environment(settings_merged)
190
191 # Initialize the database connection.
192 utils.initialize_database(settings_merged)
@@ -1,309 +1,315 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Pylons middleware initialization
23 23 """
24 24 import logging
25 25
26 26 from paste.registry import RegistryManager
27 27 from paste.gzipper import make_gzip_middleware
28 28 from pylons.middleware import ErrorHandler, StatusCodeRedirect
29 29 from pylons.wsgiapp import PylonsApp
30 30 from pyramid.authorization import ACLAuthorizationPolicy
31 31 from pyramid.config import Configurator
32 32 from pyramid.static import static_view
33 33 from pyramid.settings import asbool, aslist
34 34 from pyramid.wsgi import wsgiapp
35 35 from routes.middleware import RoutesMiddleware
36 36 import routes.util
37 37
38 38 import rhodecode
39 39 from rhodecode.config import patches
40 from rhodecode.config.environment import load_environment
40 from rhodecode.config.environment import (
41 load_environment, load_pyramid_environment)
41 42 from rhodecode.lib.middleware import csrf
42 43 from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
43 44 from rhodecode.lib.middleware.disable_vcs import DisableVCSPagesWrapper
44 45 from rhodecode.lib.middleware.https_fixup import HttpsFixup
45 46 from rhodecode.lib.middleware.vcs import VCSMiddleware
46 47 from rhodecode.lib.plugins.utils import register_rhodecode_plugin
47 48
48 49
49 50 log = logging.getLogger(__name__)
50 51
51 52
52 53 def make_app(global_conf, full_stack=True, static_files=True, **app_conf):
53 54 """Create a Pylons WSGI application and return it
54 55
55 56 ``global_conf``
56 57 The inherited configuration for this application. Normally from
57 58 the [DEFAULT] section of the Paste ini file.
58 59
59 60 ``full_stack``
60 61 Whether or not this application provides a full WSGI stack (by
61 62 default, meaning it handles its own exceptions and errors).
62 63 Disable full_stack when this application is "managed" by
63 64 another WSGI middleware.
64 65
65 66 ``app_conf``
66 67 The application's local configuration. Normally specified in
67 68 the [app:<name>] section of the Paste ini file (where <name>
68 69 defaults to main).
69 70
70 71 """
71 72 # Apply compatibility patches
72 73 patches.kombu_1_5_1_python_2_7_11()
73 74 patches.inspect_getargspec()
74 75
75 76 # Configure the Pylons environment
76 77 config = load_environment(global_conf, app_conf)
77 78
78 79 # The Pylons WSGI app
79 80 app = PylonsApp(config=config)
80 81 if rhodecode.is_test:
81 82 app = csrf.CSRFDetector(app)
82 83
83 84 expected_origin = config.get('expected_origin')
84 85 if expected_origin:
85 86 # The API can be accessed from other Origins.
86 87 app = csrf.OriginChecker(app, expected_origin,
87 88 skip_urls=[routes.util.url_for('api')])
88 89
89 90 # Add RoutesMiddleware. Currently we have two instances in the stack. This
90 91 # is the lower one to make the StatusCodeRedirect middleware happy.
91 92 # TODO: johbo: This is not optimal, search for a better solution.
92 93 app = RoutesMiddleware(app, config['routes.map'])
93 94
94 95 # CUSTOM MIDDLEWARE HERE (filtered by error handling middlewares)
95 96 if asbool(config['pdebug']):
96 97 from rhodecode.lib.profiler import ProfilingMiddleware
97 98 app = ProfilingMiddleware(app)
98 99
99 100 # Protect from VCS Server error related pages when server is not available
100 101 vcs_server_enabled = asbool(config.get('vcs.server.enable', 'true'))
101 102 if not vcs_server_enabled:
102 103 app = DisableVCSPagesWrapper(app)
103 104
104 105 if asbool(full_stack):
105 106
106 107 # Appenlight monitoring and error handler
107 108 app, appenlight_client = wrap_in_appenlight_if_enabled(app, config)
108 109
109 110 # Handle Python exceptions
110 111 app = ErrorHandler(app, global_conf, **config['pylons.errorware'])
111 112
112 113 # we want our low level middleware to get to the request ASAP. We don't
113 114 # need any pylons stack middleware in them
114 115 app = VCSMiddleware(app, config, appenlight_client)
115 116 # Display error documents for 401, 403, 404 status codes (and
116 117 # 500 when debug is disabled)
117 118 if asbool(config['debug']):
118 119 app = StatusCodeRedirect(app)
119 120 else:
120 121 app = StatusCodeRedirect(app, [400, 401, 403, 404, 500])
121 122
122 123 # enable https redirects based on HTTP_X_URL_SCHEME set by proxy
123 124 app = HttpsFixup(app, config)
124 125
125 126 # Establish the Registry for this application
126 127 app = RegistryManager(app)
127 128
128 129 app.config = config
129 130
130 131 return app
131 132
132 133
133 134 def make_pyramid_app(global_config, **settings):
134 135 """
135 136 Constructs the WSGI application based on Pyramid and wraps the Pylons based
136 137 application.
137 138
138 139 Specials:
139 140
140 141 * We migrate from Pylons to Pyramid. While doing this, we keep both
141 142 frameworks functional. This involves moving some WSGI middlewares around
142 143 and providing access to some data internals, so that the old code is
143 144 still functional.
144 145
145 146 * The application can also be integrated like a plugin via the call to
146 147 `includeme`. This is accompanied with the other utility functions which
147 148 are called. Changing this should be done with great care to not break
148 149 cases when these fragments are assembled from another place.
149 150
150 151 """
151 152 # The edition string should be available in pylons too, so we add it here
152 153 # before copying the settings.
153 154 settings.setdefault('rhodecode.edition', 'Community Edition')
154 155
155 156 # As long as our Pylons application does expect "unprepared" settings, make
156 157 # sure that we keep an unmodified copy. This avoids unintentional change of
157 158 # behavior in the old application.
158 159 settings_pylons = settings.copy()
159 160
160 161 sanitize_settings_and_apply_defaults(settings)
161 162 config = Configurator(settings=settings)
162 163 add_pylons_compat_data(config.registry, global_config, settings_pylons)
164
165 load_pyramid_environment(global_config, settings)
166
163 167 includeme(config)
164 168 includeme_last(config)
165 169 pyramid_app = config.make_wsgi_app()
166 170 pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config)
167 171 return pyramid_app
168 172
169 173
170 174 def add_pylons_compat_data(registry, global_config, settings):
171 175 """
172 176 Attach data to the registry to support the Pylons integration.
173 177 """
174 178 registry._pylons_compat_global_config = global_config
175 179 registry._pylons_compat_settings = settings
176 180
177 181
178 182 def includeme(config):
179 183 settings = config.registry.settings
180 184
181 185 # Includes which are required. The application would fail without them.
182 186 config.include('pyramid_mako')
183 187 config.include('pyramid_beaker')
184 188 config.include('rhodecode.authentication')
189 config.include('rhodecode.login')
185 190 config.include('rhodecode.tweens')
186 191 config.include('rhodecode.api')
187 192
188 193 # Set the authorization policy.
189 194 authz_policy = ACLAuthorizationPolicy()
190 195 config.set_authorization_policy(authz_policy)
191 196
192 197 # Set the default renderer for HTML templates to mako.
193 198 config.add_mako_renderer('.html')
194 199
195 200 # plugin information
196 201 config.registry.rhodecode_plugins = {}
197 202
198 203 config.add_directive(
199 204 'register_rhodecode_plugin', register_rhodecode_plugin)
200 205 # include RhodeCode plugins
201 206 includes = aslist(settings.get('rhodecode.includes', []))
202 207 for inc in includes:
203 208 config.include(inc)
204 209
205 210 # This is the glue which allows us to migrate in chunks. By registering the
206 211 # pylons based application as the "Not Found" view in Pyramid, we will
207 212 # fallback to the old application each time the new one does not yet know
208 213 # how to handle a request.
209 214 pylons_app = make_app(
210 215 config.registry._pylons_compat_global_config,
211 216 **config.registry._pylons_compat_settings)
212 217 config.registry._pylons_compat_config = pylons_app.config
213 218 pylons_app_as_view = wsgiapp(pylons_app)
214 219 config.add_notfound_view(pylons_app_as_view)
215 220
216 221
217 222 def includeme_last(config):
218 223 """
219 224 The static file catchall needs to be last in the view configuration.
220 225 """
221 226 settings = config.registry.settings
222 227
223 228 # Note: johbo: I would prefer to register a prefix for static files at some
224 229 # point, e.g. move them under '_static/'. This would fully avoid that we
225 230 # can have name clashes with a repository name. Imaging someone calling his
226 231 # repo "css" ;-) Also having an external web server to serve out the static
227 232 # files seems to be easier to set up if they have a common prefix.
228 233 #
229 234 # Example: config.add_static_view('_static', path='rhodecode:public')
230 235 #
231 236 # It might be an option to register both paths for a while and then migrate
232 237 # over to the new location.
233 238
234 239 # Serving static files with a catchall.
235 240 if settings['static_files']:
236 241 config.add_route('catchall_static', '/*subpath')
237 242 config.add_view(
238 243 static_view('rhodecode:public'), route_name='catchall_static')
239 244
240 245
241 246 def wrap_app_in_wsgi_middlewares(pyramid_app, config):
242 247 """
243 248 Apply outer WSGI middlewares around the application.
244 249
245 250 Part of this has been moved up from the Pylons layer, so that the
246 251 data is also available if old Pylons code is hit through an already ported
247 252 view.
248 253 """
249 254 settings = config.registry.settings
250 255
251 256 # Add RoutesMiddleware. Currently we have two instances in the stack. This
252 257 # is the upper one to support the pylons compatibility tween during
253 258 # migration to pyramid.
254 259 pyramid_app = RoutesMiddleware(
255 260 pyramid_app, config.registry._pylons_compat_config['routes.map'])
256 261
257 262 # TODO: johbo: Don't really see why we enable the gzip middleware when
258 263 # serving static files, might be something that should have its own setting
259 264 # as well?
260 265 if settings['static_files']:
261 266 pyramid_app = make_gzip_middleware(
262 267 pyramid_app, settings, compress_level=1)
263 268
264 269 return pyramid_app
265 270
266 271
267 272 def sanitize_settings_and_apply_defaults(settings):
268 273 """
269 274 Applies settings defaults and does all type conversion.
270 275
271 276 We would move all settings parsing and preparation into this place, so that
272 277 we have only one place left which deals with this part. The remaining parts
273 278 of the application would start to rely fully on well prepared settings.
274 279
275 280 This piece would later be split up per topic to avoid a big fat monster
276 281 function.
277 282 """
278 283
279 284 # Pyramid's mako renderer has to search in the templates folder so that the
280 285 # old templates still work. Ported and new templates are expected to use
281 286 # real asset specifications for the includes.
282 287 mako_directories = settings.setdefault('mako.directories', [
283 288 # Base templates of the original Pylons application
284 289 'rhodecode:templates',
285 290 ])
286 291 log.debug(
287 292 "Using the following Mako template directories: %s",
288 293 mako_directories)
289 294
290 295 # Default includes, possible to change as a user
291 296 pyramid_includes = settings.setdefault('pyramid.includes', [
292 297 'rhodecode.lib.middleware.request_wrapper',
293 298 ])
294 299 log.debug(
295 300 "Using the following pyramid.includes: %s",
296 301 pyramid_includes)
297 302
298 303 # TODO: johbo: Re-think this, usually the call to config.include
299 304 # should allow to pass in a prefix.
300 305 settings.setdefault('rhodecode.api.url', '/_admin/api')
301 306
302 307 _bool_setting(settings, 'vcs.server.enable', 'true')
303 308 _bool_setting(settings, 'static_files', 'true')
309 _bool_setting(settings, 'is_test', 'false')
304 310
305 311 return settings
306 312
307 313
308 314 def _bool_setting(settings, name, default):
309 315 settings[name] = asbool(settings.get(name, default))
@@ -1,1112 +1,1149 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Routes configuration
23 23
24 24 The more specific and detailed routes should be defined first so they
25 25 may take precedent over the more generic routes. For more information
26 26 refer to the routes manual at http://routes.groovie.org/docs/
27 27
28 28 IMPORTANT: if you change any routing here, make sure to take a look at lib/base.py
29 29 and _route_name variable which uses some of stored naming here to do redirects.
30 30 """
31 31 import os
32 import re
32 33 from routes import Mapper
33 34
34 35 from rhodecode.config import routing_links
35 36
36 37 # prefix for non repository related links needs to be prefixed with `/`
37 38 ADMIN_PREFIX = '/_admin'
38 39
39 40 # Default requirements for URL parts
40 41 URL_NAME_REQUIREMENTS = {
41 42 # group name can have a slash in them, but they must not end with a slash
42 43 'group_name': r'.*?[^/]',
43 44 # repo names can have a slash in them, but they must not end with a slash
44 45 'repo_name': r'.*?[^/]',
45 46 # file path eats up everything at the end
46 47 'f_path': r'.*',
47 48 # reference types
48 49 'source_ref_type': '(branch|book|tag|rev|\%\(source_ref_type\)s)',
49 50 'target_ref_type': '(branch|book|tag|rev|\%\(target_ref_type\)s)',
50 51 }
51 52
52 53
54 class JSRoutesMapper(Mapper):
55 """
56 Wrapper for routes.Mapper to make pyroutes compatible url definitions
57 """
58 _named_route_regex = re.compile(r'^[a-z-_0-9A-Z]+$')
59 _argument_prog = re.compile('\{(.*?)\}|:\((.*)\)')
60 def __init__(self, *args, **kw):
61 super(JSRoutesMapper, self).__init__(*args, **kw)
62 self._jsroutes = []
63
64 def connect(self, *args, **kw):
65 """
66 Wrapper for connect to take an extra argument jsroute=True
67
68 :param jsroute: boolean, if True will add the route to the pyroutes list
69 """
70 if kw.pop('jsroute', False):
71 if not self._named_route_regex.match(args[0]):
72 raise Exception('only named routes can be added to pyroutes')
73 self._jsroutes.append(args[0])
74
75 super(JSRoutesMapper, self).connect(*args, **kw)
76
77 def _extract_route_information(self, route):
78 """
79 Convert a route into tuple(name, path, args), eg:
80 ('user_profile', '/profile/%(username)s', ['username'])
81 """
82 routepath = route.routepath
83 def replace(matchobj):
84 if matchobj.group(1):
85 return "%%(%s)s" % matchobj.group(1).split(':')[0]
86 else:
87 return "%%(%s)s" % matchobj.group(2)
88
89 routepath = self._argument_prog.sub(replace, routepath)
90 return (
91 route.name,
92 routepath,
93 [(arg[0].split(':')[0] if arg[0] != '' else arg[1])
94 for arg in self._argument_prog.findall(route.routepath)]
95 )
96
97 def jsroutes(self):
98 """
99 Return a list of pyroutes.js compatible routes
100 """
101 for route_name in self._jsroutes:
102 yield self._extract_route_information(self._routenames[route_name])
103
104
53 105 def make_map(config):
54 106 """Create, configure and return the routes Mapper"""
55 rmap = Mapper(directory=config['pylons.paths']['controllers'],
107 rmap = JSRoutesMapper(directory=config['pylons.paths']['controllers'],
56 108 always_scan=config['debug'])
57 109 rmap.minimization = False
58 110 rmap.explicit = False
59 111
60 112 from rhodecode.lib.utils2 import str2bool
61 113 from rhodecode.model import repo, repo_group
62 114
63 115 def check_repo(environ, match_dict):
64 116 """
65 117 check for valid repository for proper 404 handling
66 118
67 119 :param environ:
68 120 :param match_dict:
69 121 """
70 122 repo_name = match_dict.get('repo_name')
71 123
72 124 if match_dict.get('f_path'):
73 125 # fix for multiple initial slashes that causes errors
74 126 match_dict['f_path'] = match_dict['f_path'].lstrip('/')
75 127 repo_model = repo.RepoModel()
76 128 by_name_match = repo_model.get_by_repo_name(repo_name)
77 129 # if we match quickly from database, short circuit the operation,
78 130 # and validate repo based on the type.
79 131 if by_name_match:
80 132 return True
81 133
82 134 by_id_match = repo_model.get_repo_by_id(repo_name)
83 135 if by_id_match:
84 136 repo_name = by_id_match.repo_name
85 137 match_dict['repo_name'] = repo_name
86 138 return True
87 139
88 140 return False
89 141
90 142 def check_group(environ, match_dict):
91 143 """
92 144 check for valid repository group path for proper 404 handling
93 145
94 146 :param environ:
95 147 :param match_dict:
96 148 """
97 149 repo_group_name = match_dict.get('group_name')
98 150 repo_group_model = repo_group.RepoGroupModel()
99 151 by_name_match = repo_group_model.get_by_group_name(repo_group_name)
100 152 if by_name_match:
101 153 return True
102 154
103 155 return False
104 156
105 157 def check_user_group(environ, match_dict):
106 158 """
107 159 check for valid user group for proper 404 handling
108 160
109 161 :param environ:
110 162 :param match_dict:
111 163 """
112 164 return True
113 165
114 166 def check_int(environ, match_dict):
115 167 return match_dict.get('id').isdigit()
116 168
117 169 # The ErrorController route (handles 404/500 error pages); it should
118 170 # likely stay at the top, ensuring it can always be resolved
119 171 rmap.connect('/error/{action}', controller='error')
120 172 rmap.connect('/error/{action}/{id}', controller='error')
121 173
122 174 #==========================================================================
123 175 # CUSTOM ROUTES HERE
124 176 #==========================================================================
125 177
126 178 # MAIN PAGE
127 rmap.connect('home', '/', controller='home', action='index')
128 rmap.connect('repo_switcher_data', '/_repos_and_groups', controller='home',
129 action='repo_switcher_data')
179 rmap.connect('home', '/', controller='home', action='index', jsroute=True)
180 rmap.connect('goto_switcher_data', '/_goto_data', controller='home',
181 action='goto_switcher_data')
130 182 rmap.connect('repo_list_data', '/_repos', controller='home',
131 183 action='repo_list_data')
132 184
133 185 rmap.connect('user_autocomplete_data', '/_users', controller='home',
134 action='user_autocomplete_data')
186 action='user_autocomplete_data', jsroute=True)
135 187 rmap.connect('user_group_autocomplete_data', '/_user_groups', controller='home',
136 188 action='user_group_autocomplete_data')
137 189
138 190 rmap.connect(
139 191 'user_profile', '/_profiles/{username}', controller='users',
140 192 action='user_profile')
141 193
142 194 # TODO: johbo: Static links, to be replaced by our redirection mechanism
143 195 rmap.connect('rst_help',
144 196 'http://docutils.sourceforge.net/docs/user/rst/quickref.html',
145 197 _static=True)
146 198 rmap.connect('markdown_help',
147 199 'http://daringfireball.net/projects/markdown/syntax',
148 200 _static=True)
149 201 rmap.connect('rhodecode_official', 'https://rhodecode.com', _static=True)
150 202 rmap.connect('rhodecode_support', 'https://rhodecode.com/help/', _static=True)
151 203 rmap.connect('rhodecode_translations', 'https://rhodecode.com/translate/enterprise', _static=True)
152 204 # TODO: anderson - making this a static link since redirect won't play
153 205 # nice with POST requests
154 206 rmap.connect('enterprise_license_convert_from_old',
155 207 'https://rhodecode.com/u/license-upgrade',
156 208 _static=True)
157 209
158 210 routing_links.connect_redirection_links(rmap)
159 211
160 212 rmap.connect('ping', '%s/ping' % (ADMIN_PREFIX,), controller='home', action='ping')
161 213 rmap.connect('error_test', '%s/error_test' % (ADMIN_PREFIX,), controller='home', action='error_test')
162 214
163 215 # ADMIN REPOSITORY ROUTES
164 216 with rmap.submapper(path_prefix=ADMIN_PREFIX,
165 217 controller='admin/repos') as m:
166 218 m.connect('repos', '/repos',
167 219 action='create', conditions={'method': ['POST']})
168 220 m.connect('repos', '/repos',
169 221 action='index', conditions={'method': ['GET']})
170 m.connect('new_repo', '/create_repository',
222 m.connect('new_repo', '/create_repository', jsroute=True,
171 223 action='create_repository', conditions={'method': ['GET']})
172 224 m.connect('/repos/{repo_name}',
173 225 action='update', conditions={'method': ['PUT'],
174 226 'function': check_repo},
175 227 requirements=URL_NAME_REQUIREMENTS)
176 228 m.connect('delete_repo', '/repos/{repo_name}',
177 229 action='delete', conditions={'method': ['DELETE']},
178 230 requirements=URL_NAME_REQUIREMENTS)
179 231 m.connect('repo', '/repos/{repo_name}',
180 232 action='show', conditions={'method': ['GET'],
181 233 'function': check_repo},
182 234 requirements=URL_NAME_REQUIREMENTS)
183 235
184 236 # ADMIN REPOSITORY GROUPS ROUTES
185 237 with rmap.submapper(path_prefix=ADMIN_PREFIX,
186 238 controller='admin/repo_groups') as m:
187 239 m.connect('repo_groups', '/repo_groups',
188 240 action='create', conditions={'method': ['POST']})
189 241 m.connect('repo_groups', '/repo_groups',
190 242 action='index', conditions={'method': ['GET']})
191 243 m.connect('new_repo_group', '/repo_groups/new',
192 244 action='new', conditions={'method': ['GET']})
193 245 m.connect('update_repo_group', '/repo_groups/{group_name}',
194 246 action='update', conditions={'method': ['PUT'],
195 247 'function': check_group},
196 248 requirements=URL_NAME_REQUIREMENTS)
197 249
198 250 # EXTRAS REPO GROUP ROUTES
199 251 m.connect('edit_repo_group', '/repo_groups/{group_name}/edit',
200 252 action='edit',
201 253 conditions={'method': ['GET'], 'function': check_group},
202 254 requirements=URL_NAME_REQUIREMENTS)
203 255 m.connect('edit_repo_group', '/repo_groups/{group_name}/edit',
204 256 action='edit',
205 257 conditions={'method': ['PUT'], 'function': check_group},
206 258 requirements=URL_NAME_REQUIREMENTS)
207 259
208 260 m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced',
209 261 action='edit_repo_group_advanced',
210 262 conditions={'method': ['GET'], 'function': check_group},
211 263 requirements=URL_NAME_REQUIREMENTS)
212 264 m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced',
213 265 action='edit_repo_group_advanced',
214 266 conditions={'method': ['PUT'], 'function': check_group},
215 267 requirements=URL_NAME_REQUIREMENTS)
216 268
217 269 m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions',
218 270 action='edit_repo_group_perms',
219 271 conditions={'method': ['GET'], 'function': check_group},
220 272 requirements=URL_NAME_REQUIREMENTS)
221 273 m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions',
222 274 action='update_perms',
223 275 conditions={'method': ['PUT'], 'function': check_group},
224 276 requirements=URL_NAME_REQUIREMENTS)
225 277
226 278 m.connect('delete_repo_group', '/repo_groups/{group_name}',
227 279 action='delete', conditions={'method': ['DELETE'],
228 280 'function': check_group},
229 281 requirements=URL_NAME_REQUIREMENTS)
230 282
231 283 # ADMIN USER ROUTES
232 284 with rmap.submapper(path_prefix=ADMIN_PREFIX,
233 285 controller='admin/users') as m:
234 286 m.connect('users', '/users',
235 287 action='create', conditions={'method': ['POST']})
236 288 m.connect('users', '/users',
237 289 action='index', conditions={'method': ['GET']})
238 290 m.connect('new_user', '/users/new',
239 291 action='new', conditions={'method': ['GET']})
240 292 m.connect('update_user', '/users/{user_id}',
241 293 action='update', conditions={'method': ['PUT']})
242 294 m.connect('delete_user', '/users/{user_id}',
243 295 action='delete', conditions={'method': ['DELETE']})
244 296 m.connect('edit_user', '/users/{user_id}/edit',
245 297 action='edit', conditions={'method': ['GET']})
246 298 m.connect('user', '/users/{user_id}',
247 299 action='show', conditions={'method': ['GET']})
248 300 m.connect('force_password_reset_user', '/users/{user_id}/password_reset',
249 301 action='reset_password', conditions={'method': ['POST']})
250 302 m.connect('create_personal_repo_group', '/users/{user_id}/create_repo_group',
251 303 action='create_personal_repo_group', conditions={'method': ['POST']})
252 304
253 305 # EXTRAS USER ROUTES
254 306 m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced',
255 307 action='edit_advanced', conditions={'method': ['GET']})
256 308 m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced',
257 309 action='update_advanced', conditions={'method': ['PUT']})
258 310
259 311 m.connect('edit_user_auth_tokens', '/users/{user_id}/edit/auth_tokens',
260 312 action='edit_auth_tokens', conditions={'method': ['GET']})
261 313 m.connect('edit_user_auth_tokens', '/users/{user_id}/edit/auth_tokens',
262 314 action='add_auth_token', conditions={'method': ['PUT']})
263 315 m.connect('edit_user_auth_tokens', '/users/{user_id}/edit/auth_tokens',
264 316 action='delete_auth_token', conditions={'method': ['DELETE']})
265 317
266 318 m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions',
267 319 action='edit_global_perms', conditions={'method': ['GET']})
268 320 m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions',
269 321 action='update_global_perms', conditions={'method': ['PUT']})
270 322
271 323 m.connect('edit_user_perms_summary', '/users/{user_id}/edit/permissions_summary',
272 324 action='edit_perms_summary', conditions={'method': ['GET']})
273 325
274 326 m.connect('edit_user_emails', '/users/{user_id}/edit/emails',
275 327 action='edit_emails', conditions={'method': ['GET']})
276 328 m.connect('edit_user_emails', '/users/{user_id}/edit/emails',
277 329 action='add_email', conditions={'method': ['PUT']})
278 330 m.connect('edit_user_emails', '/users/{user_id}/edit/emails',
279 331 action='delete_email', conditions={'method': ['DELETE']})
280 332
281 333 m.connect('edit_user_ips', '/users/{user_id}/edit/ips',
282 334 action='edit_ips', conditions={'method': ['GET']})
283 335 m.connect('edit_user_ips', '/users/{user_id}/edit/ips',
284 336 action='add_ip', conditions={'method': ['PUT']})
285 337 m.connect('edit_user_ips', '/users/{user_id}/edit/ips',
286 338 action='delete_ip', conditions={'method': ['DELETE']})
287 339
288 340 # ADMIN USER GROUPS REST ROUTES
289 341 with rmap.submapper(path_prefix=ADMIN_PREFIX,
290 342 controller='admin/user_groups') as m:
291 343 m.connect('users_groups', '/user_groups',
292 344 action='create', conditions={'method': ['POST']})
293 345 m.connect('users_groups', '/user_groups',
294 346 action='index', conditions={'method': ['GET']})
295 347 m.connect('new_users_group', '/user_groups/new',
296 348 action='new', conditions={'method': ['GET']})
297 349 m.connect('update_users_group', '/user_groups/{user_group_id}',
298 350 action='update', conditions={'method': ['PUT']})
299 351 m.connect('delete_users_group', '/user_groups/{user_group_id}',
300 352 action='delete', conditions={'method': ['DELETE']})
301 353 m.connect('edit_users_group', '/user_groups/{user_group_id}/edit',
302 354 action='edit', conditions={'method': ['GET']},
303 355 function=check_user_group)
304 356
305 357 # EXTRAS USER GROUP ROUTES
306 m.connect('edit_user_group_global_perms', '/user_groups/{user_group_id}/edit/global_permissions',
358 m.connect('edit_user_group_global_perms',
359 '/user_groups/{user_group_id}/edit/global_permissions',
307 360 action='edit_global_perms', conditions={'method': ['GET']})
308 m.connect('edit_user_group_global_perms', '/user_groups/{user_group_id}/edit/global_permissions',
361 m.connect('edit_user_group_global_perms',
362 '/user_groups/{user_group_id}/edit/global_permissions',
309 363 action='update_global_perms', conditions={'method': ['PUT']})
310 m.connect('edit_user_group_perms_summary', '/user_groups/{user_group_id}/edit/permissions_summary',
364 m.connect('edit_user_group_perms_summary',
365 '/user_groups/{user_group_id}/edit/permissions_summary',
311 366 action='edit_perms_summary', conditions={'method': ['GET']})
312 367
313 m.connect('edit_user_group_perms', '/user_groups/{user_group_id}/edit/permissions',
368 m.connect('edit_user_group_perms',
369 '/user_groups/{user_group_id}/edit/permissions',
314 370 action='edit_perms', conditions={'method': ['GET']})
315 m.connect('edit_user_group_perms', '/user_groups/{user_group_id}/edit/permissions',
371 m.connect('edit_user_group_perms',
372 '/user_groups/{user_group_id}/edit/permissions',
316 373 action='update_perms', conditions={'method': ['PUT']})
317 374
318 m.connect('edit_user_group_advanced', '/user_groups/{user_group_id}/edit/advanced',
375 m.connect('edit_user_group_advanced',
376 '/user_groups/{user_group_id}/edit/advanced',
319 377 action='edit_advanced', conditions={'method': ['GET']})
320 378
321 m.connect('edit_user_group_members', '/user_groups/{user_group_id}/edit/members',
379 m.connect('edit_user_group_members',
380 '/user_groups/{user_group_id}/edit/members', jsroute=True,
322 381 action='edit_members', conditions={'method': ['GET']})
323 382
324 383 # ADMIN PERMISSIONS ROUTES
325 384 with rmap.submapper(path_prefix=ADMIN_PREFIX,
326 385 controller='admin/permissions') as m:
327 386 m.connect('admin_permissions_application', '/permissions/application',
328 387 action='permission_application_update', conditions={'method': ['POST']})
329 388 m.connect('admin_permissions_application', '/permissions/application',
330 389 action='permission_application', conditions={'method': ['GET']})
331 390
332 391 m.connect('admin_permissions_global', '/permissions/global',
333 392 action='permission_global_update', conditions={'method': ['POST']})
334 393 m.connect('admin_permissions_global', '/permissions/global',
335 394 action='permission_global', conditions={'method': ['GET']})
336 395
337 396 m.connect('admin_permissions_object', '/permissions/object',
338 397 action='permission_objects_update', conditions={'method': ['POST']})
339 398 m.connect('admin_permissions_object', '/permissions/object',
340 399 action='permission_objects', conditions={'method': ['GET']})
341 400
342 401 m.connect('admin_permissions_ips', '/permissions/ips',
343 402 action='permission_ips', conditions={'method': ['POST']})
344 403 m.connect('admin_permissions_ips', '/permissions/ips',
345 404 action='permission_ips', conditions={'method': ['GET']})
346 405
347 406 m.connect('admin_permissions_overview', '/permissions/overview',
348 407 action='permission_perms', conditions={'method': ['GET']})
349 408
350 409 # ADMIN DEFAULTS REST ROUTES
351 410 with rmap.submapper(path_prefix=ADMIN_PREFIX,
352 411 controller='admin/defaults') as m:
353 412 m.connect('admin_defaults_repositories', '/defaults/repositories',
354 413 action='update_repository_defaults', conditions={'method': ['POST']})
355 414 m.connect('admin_defaults_repositories', '/defaults/repositories',
356 415 action='index', conditions={'method': ['GET']})
357 416
358 417 # ADMIN DEBUG STYLE ROUTES
359 418 if str2bool(config.get('debug_style')):
360 419 with rmap.submapper(path_prefix=ADMIN_PREFIX + '/debug_style',
361 420 controller='debug_style') as m:
362 421 m.connect('debug_style_home', '',
363 422 action='index', conditions={'method': ['GET']})
364 423 m.connect('debug_style_template', '/t/{t_path}',
365 424 action='template', conditions={'method': ['GET']})
366 425
367 426 # ADMIN SETTINGS ROUTES
368 427 with rmap.submapper(path_prefix=ADMIN_PREFIX,
369 428 controller='admin/settings') as m:
370 429
371 430 # default
372 431 m.connect('admin_settings', '/settings',
373 432 action='settings_global_update',
374 433 conditions={'method': ['POST']})
375 434 m.connect('admin_settings', '/settings',
376 435 action='settings_global', conditions={'method': ['GET']})
377 436
378 437 m.connect('admin_settings_vcs', '/settings/vcs',
379 438 action='settings_vcs_update',
380 439 conditions={'method': ['POST']})
381 440 m.connect('admin_settings_vcs', '/settings/vcs',
382 441 action='settings_vcs',
383 442 conditions={'method': ['GET']})
384 443 m.connect('admin_settings_vcs', '/settings/vcs',
385 444 action='delete_svn_pattern',
386 445 conditions={'method': ['DELETE']})
387 446
388 447 m.connect('admin_settings_mapping', '/settings/mapping',
389 448 action='settings_mapping_update',
390 449 conditions={'method': ['POST']})
391 450 m.connect('admin_settings_mapping', '/settings/mapping',
392 451 action='settings_mapping', conditions={'method': ['GET']})
393 452
394 453 m.connect('admin_settings_global', '/settings/global',
395 454 action='settings_global_update',
396 455 conditions={'method': ['POST']})
397 456 m.connect('admin_settings_global', '/settings/global',
398 457 action='settings_global', conditions={'method': ['GET']})
399 458
400 459 m.connect('admin_settings_visual', '/settings/visual',
401 460 action='settings_visual_update',
402 461 conditions={'method': ['POST']})
403 462 m.connect('admin_settings_visual', '/settings/visual',
404 463 action='settings_visual', conditions={'method': ['GET']})
405 464
406 465 m.connect('admin_settings_issuetracker',
407 466 '/settings/issue-tracker', action='settings_issuetracker',
408 467 conditions={'method': ['GET']})
409 468 m.connect('admin_settings_issuetracker_save',
410 469 '/settings/issue-tracker/save',
411 470 action='settings_issuetracker_save',
412 471 conditions={'method': ['POST']})
413 472 m.connect('admin_issuetracker_test', '/settings/issue-tracker/test',
414 473 action='settings_issuetracker_test',
415 474 conditions={'method': ['POST']})
416 475 m.connect('admin_issuetracker_delete',
417 476 '/settings/issue-tracker/delete',
418 477 action='settings_issuetracker_delete',
419 478 conditions={'method': ['DELETE']})
420 479
421 480 m.connect('admin_settings_email', '/settings/email',
422 481 action='settings_email_update',
423 482 conditions={'method': ['POST']})
424 483 m.connect('admin_settings_email', '/settings/email',
425 484 action='settings_email', conditions={'method': ['GET']})
426 485
427 486 m.connect('admin_settings_hooks', '/settings/hooks',
428 487 action='settings_hooks_update',
429 488 conditions={'method': ['POST', 'DELETE']})
430 489 m.connect('admin_settings_hooks', '/settings/hooks',
431 490 action='settings_hooks', conditions={'method': ['GET']})
432 491
433 492 m.connect('admin_settings_search', '/settings/search',
434 493 action='settings_search', conditions={'method': ['GET']})
435 494
436 495 m.connect('admin_settings_system', '/settings/system',
437 496 action='settings_system', conditions={'method': ['GET']})
438 497
439 498 m.connect('admin_settings_system_update', '/settings/system/updates',
440 499 action='settings_system_update', conditions={'method': ['GET']})
441 500
442 501 m.connect('admin_settings_supervisor', '/settings/supervisor',
443 502 action='settings_supervisor', conditions={'method': ['GET']})
444 503 m.connect('admin_settings_supervisor_log', '/settings/supervisor/{procid}/log',
445 504 action='settings_supervisor_log', conditions={'method': ['GET']})
446 505
447 506 m.connect('admin_settings_labs', '/settings/labs',
448 507 action='settings_labs_update',
449 508 conditions={'method': ['POST']})
450 509 m.connect('admin_settings_labs', '/settings/labs',
451 510 action='settings_labs', conditions={'method': ['GET']})
452 511
453 512 m.connect('admin_settings_open_source', '/settings/open_source',
454 513 action='settings_open_source',
455 514 conditions={'method': ['GET']})
456 515
457 516 # ADMIN MY ACCOUNT
458 517 with rmap.submapper(path_prefix=ADMIN_PREFIX,
459 518 controller='admin/my_account') as m:
460 519
461 520 m.connect('my_account', '/my_account',
462 521 action='my_account', conditions={'method': ['GET']})
463 522 m.connect('my_account_edit', '/my_account/edit',
464 523 action='my_account_edit', conditions={'method': ['GET']})
465 524 m.connect('my_account', '/my_account',
466 525 action='my_account_update', conditions={'method': ['POST']})
467 526
468 527 m.connect('my_account_password', '/my_account/password',
469 528 action='my_account_password', conditions={'method': ['GET']})
470 529 m.connect('my_account_password', '/my_account/password',
471 530 action='my_account_password_update', conditions={'method': ['POST']})
472 531
473 532 m.connect('my_account_repos', '/my_account/repos',
474 533 action='my_account_repos', conditions={'method': ['GET']})
475 534
476 535 m.connect('my_account_watched', '/my_account/watched',
477 536 action='my_account_watched', conditions={'method': ['GET']})
478 537
479 538 m.connect('my_account_pullrequests', '/my_account/pull_requests',
480 539 action='my_account_pullrequests', conditions={'method': ['GET']})
481 540
482 541 m.connect('my_account_perms', '/my_account/perms',
483 542 action='my_account_perms', conditions={'method': ['GET']})
484 543
485 544 m.connect('my_account_emails', '/my_account/emails',
486 545 action='my_account_emails', conditions={'method': ['GET']})
487 546 m.connect('my_account_emails', '/my_account/emails',
488 547 action='my_account_emails_add', conditions={'method': ['POST']})
489 548 m.connect('my_account_emails', '/my_account/emails',
490 549 action='my_account_emails_delete', conditions={'method': ['DELETE']})
491 550
492 551 m.connect('my_account_auth_tokens', '/my_account/auth_tokens',
493 552 action='my_account_auth_tokens', conditions={'method': ['GET']})
494 553 m.connect('my_account_auth_tokens', '/my_account/auth_tokens',
495 554 action='my_account_auth_tokens_add', conditions={'method': ['POST']})
496 555 m.connect('my_account_auth_tokens', '/my_account/auth_tokens',
497 556 action='my_account_auth_tokens_delete', conditions={'method': ['DELETE']})
498 557
499 m.connect('my_account_oauth', '/my_account/oauth',
500 action='my_account_oauth', conditions={'method': ['GET']})
501 m.connect('my_account_oauth', '/my_account/oauth',
502 action='my_account_oauth_delete',
503 conditions={'method': ['DELETE']})
504
505 558 # NOTIFICATION REST ROUTES
506 559 with rmap.submapper(path_prefix=ADMIN_PREFIX,
507 560 controller='admin/notifications') as m:
508 561 m.connect('notifications', '/notifications',
509 562 action='index', conditions={'method': ['GET']})
510 563 m.connect('notifications_mark_all_read', '/notifications/mark_all_read',
511 564 action='mark_all_read', conditions={'method': ['POST']})
512 565
513 566 m.connect('/notifications/{notification_id}',
514 567 action='update', conditions={'method': ['PUT']})
515 568 m.connect('/notifications/{notification_id}',
516 569 action='delete', conditions={'method': ['DELETE']})
517 570 m.connect('notification', '/notifications/{notification_id}',
518 571 action='show', conditions={'method': ['GET']})
519 572
520 573 # ADMIN GIST
521 574 with rmap.submapper(path_prefix=ADMIN_PREFIX,
522 575 controller='admin/gists') as m:
523 576 m.connect('gists', '/gists',
524 577 action='create', conditions={'method': ['POST']})
525 m.connect('gists', '/gists',
578 m.connect('gists', '/gists', jsroute=True,
526 579 action='index', conditions={'method': ['GET']})
527 m.connect('new_gist', '/gists/new',
580 m.connect('new_gist', '/gists/new', jsroute=True,
528 581 action='new', conditions={'method': ['GET']})
529 582
530 583 m.connect('/gists/{gist_id}',
531 584 action='delete', conditions={'method': ['DELETE']})
532 585 m.connect('edit_gist', '/gists/{gist_id}/edit',
533 586 action='edit_form', conditions={'method': ['GET']})
534 587 m.connect('edit_gist', '/gists/{gist_id}/edit',
535 588 action='edit', conditions={'method': ['POST']})
536 589 m.connect(
537 590 'edit_gist_check_revision', '/gists/{gist_id}/edit/check_revision',
538 591 action='check_revision', conditions={'method': ['GET']})
539 592
540 593 m.connect('gist', '/gists/{gist_id}',
541 594 action='show', conditions={'method': ['GET']})
542 595 m.connect('gist_rev', '/gists/{gist_id}/{revision}',
543 596 revision='tip',
544 597 action='show', conditions={'method': ['GET']})
545 598 m.connect('formatted_gist', '/gists/{gist_id}/{revision}/{format}',
546 599 revision='tip',
547 600 action='show', conditions={'method': ['GET']})
548 601 m.connect('formatted_gist_file', '/gists/{gist_id}/{revision}/{format}/{f_path}',
549 602 revision='tip',
550 603 action='show', conditions={'method': ['GET']},
551 604 requirements=URL_NAME_REQUIREMENTS)
552 605
553 606 # ADMIN MAIN PAGES
554 607 with rmap.submapper(path_prefix=ADMIN_PREFIX,
555 608 controller='admin/admin') as m:
556 609 m.connect('admin_home', '', action='index')
557 610 m.connect('admin_add_repo', '/add_repo/{new_repo:[a-z0-9\. _-]*}',
558 611 action='add_repo')
559 612 m.connect(
560 'pull_requests_global', '/pull_requests/{pull_request_id:[0-9]+}',
613 'pull_requests_global_0', '/pull_requests/{pull_request_id:[0-9]+}',
561 614 action='pull_requests')
615 m.connect(
616 'pull_requests_global', '/pull-requests/{pull_request_id:[0-9]+}',
617 action='pull_requests')
618
562 619
563 620 # USER JOURNAL
564 621 rmap.connect('journal', '%s/journal' % (ADMIN_PREFIX,),
565 622 controller='journal', action='index')
566 623 rmap.connect('journal_rss', '%s/journal/rss' % (ADMIN_PREFIX,),
567 624 controller='journal', action='journal_rss')
568 625 rmap.connect('journal_atom', '%s/journal/atom' % (ADMIN_PREFIX,),
569 626 controller='journal', action='journal_atom')
570 627
571 628 rmap.connect('public_journal', '%s/public_journal' % (ADMIN_PREFIX,),
572 629 controller='journal', action='public_journal')
573 630
574 631 rmap.connect('public_journal_rss', '%s/public_journal/rss' % (ADMIN_PREFIX,),
575 632 controller='journal', action='public_journal_rss')
576 633
577 634 rmap.connect('public_journal_rss_old', '%s/public_journal_rss' % (ADMIN_PREFIX,),
578 635 controller='journal', action='public_journal_rss')
579 636
580 637 rmap.connect('public_journal_atom',
581 638 '%s/public_journal/atom' % (ADMIN_PREFIX,), controller='journal',
582 639 action='public_journal_atom')
583 640
584 641 rmap.connect('public_journal_atom_old',
585 642 '%s/public_journal_atom' % (ADMIN_PREFIX,), controller='journal',
586 643 action='public_journal_atom')
587 644
588 645 rmap.connect('toggle_following', '%s/toggle_following' % (ADMIN_PREFIX,),
589 controller='journal', action='toggle_following',
646 controller='journal', action='toggle_following', jsroute=True,
590 647 conditions={'method': ['POST']})
591 648
592 649 # FULL TEXT SEARCH
593 650 rmap.connect('search', '%s/search' % (ADMIN_PREFIX,),
594 651 controller='search')
595 652 rmap.connect('search_repo_home', '/{repo_name}/search',
596 653 controller='search',
597 654 action='index',
598 655 conditions={'function': check_repo},
599 656 requirements=URL_NAME_REQUIREMENTS)
600 657
601 # LOGIN/LOGOUT/REGISTER/SIGN IN
602 rmap.connect('login_home', '%s/login' % (ADMIN_PREFIX,), controller='login',
603 action='index')
604
605 rmap.connect('logout_home', '%s/logout' % (ADMIN_PREFIX,), controller='login',
606 action='logout', conditions={'method': ['POST']})
607
608 rmap.connect('register', '%s/register' % (ADMIN_PREFIX,), controller='login',
609 action='register')
610
611 rmap.connect('reset_password', '%s/password_reset' % (ADMIN_PREFIX,),
612 controller='login', action='password_reset')
613
614 rmap.connect('reset_password_confirmation',
615 '%s/password_reset_confirmation' % (ADMIN_PREFIX,),
616 controller='login', action='password_reset_confirmation')
617
618 rmap.connect('social_auth',
619 '%s/social_auth/{provider_name}' % (ADMIN_PREFIX,),
620 controller='login', action='social_auth')
621
622 658 # FEEDS
623 659 rmap.connect('rss_feed_home', '/{repo_name}/feed/rss',
624 660 controller='feed', action='rss',
625 661 conditions={'function': check_repo},
626 662 requirements=URL_NAME_REQUIREMENTS)
627 663
628 664 rmap.connect('atom_feed_home', '/{repo_name}/feed/atom',
629 665 controller='feed', action='atom',
630 666 conditions={'function': check_repo},
631 667 requirements=URL_NAME_REQUIREMENTS)
632 668
633 669 #==========================================================================
634 670 # REPOSITORY ROUTES
635 671 #==========================================================================
636 672
637 673 rmap.connect('repo_creating_home', '/{repo_name}/repo_creating',
638 674 controller='admin/repos', action='repo_creating',
639 675 requirements=URL_NAME_REQUIREMENTS)
640 676 rmap.connect('repo_check_home', '/{repo_name}/crepo_check',
641 677 controller='admin/repos', action='repo_check',
642 678 requirements=URL_NAME_REQUIREMENTS)
643 679
644 680 rmap.connect('repo_stats', '/{repo_name}/repo_stats/{commit_id}',
645 681 controller='summary', action='repo_stats',
646 682 conditions={'function': check_repo},
647 requirements=URL_NAME_REQUIREMENTS)
683 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
648 684
649 685 rmap.connect('repo_refs_data', '/{repo_name}/refs-data',
650 controller='summary', action='repo_refs_data',
686 controller='summary', action='repo_refs_data', jsroute=True,
651 687 requirements=URL_NAME_REQUIREMENTS)
652 688 rmap.connect('repo_refs_changelog_data', '/{repo_name}/refs-data-changelog',
653 689 controller='summary', action='repo_refs_changelog_data',
654 requirements=URL_NAME_REQUIREMENTS)
690 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
655 691
656 692 rmap.connect('changeset_home', '/{repo_name}/changeset/{revision}',
657 controller='changeset', revision='tip',
693 controller='changeset', revision='tip', jsroute=True,
658 694 conditions={'function': check_repo},
659 695 requirements=URL_NAME_REQUIREMENTS)
660 696 rmap.connect('changeset_children', '/{repo_name}/changeset_children/{revision}',
661 697 controller='changeset', revision='tip', action='changeset_children',
662 698 conditions={'function': check_repo},
663 699 requirements=URL_NAME_REQUIREMENTS)
664 700 rmap.connect('changeset_parents', '/{repo_name}/changeset_parents/{revision}',
665 701 controller='changeset', revision='tip', action='changeset_parents',
666 702 conditions={'function': check_repo},
667 703 requirements=URL_NAME_REQUIREMENTS)
668 704
669 705 # repo edit options
670 rmap.connect('edit_repo', '/{repo_name}/settings',
706 rmap.connect('edit_repo', '/{repo_name}/settings', jsroute=True,
671 707 controller='admin/repos', action='edit',
672 708 conditions={'method': ['GET'], 'function': check_repo},
673 709 requirements=URL_NAME_REQUIREMENTS)
674 710
675 711 rmap.connect('edit_repo_perms', '/{repo_name}/settings/permissions',
712 jsroute=True,
676 713 controller='admin/repos', action='edit_permissions',
677 714 conditions={'method': ['GET'], 'function': check_repo},
678 715 requirements=URL_NAME_REQUIREMENTS)
679 716 rmap.connect('edit_repo_perms_update', '/{repo_name}/settings/permissions',
680 717 controller='admin/repos', action='edit_permissions_update',
681 718 conditions={'method': ['PUT'], 'function': check_repo},
682 719 requirements=URL_NAME_REQUIREMENTS)
683 720
684 721 rmap.connect('edit_repo_fields', '/{repo_name}/settings/fields',
685 722 controller='admin/repos', action='edit_fields',
686 723 conditions={'method': ['GET'], 'function': check_repo},
687 724 requirements=URL_NAME_REQUIREMENTS)
688 725 rmap.connect('create_repo_fields', '/{repo_name}/settings/fields/new',
689 726 controller='admin/repos', action='create_repo_field',
690 727 conditions={'method': ['PUT'], 'function': check_repo},
691 728 requirements=URL_NAME_REQUIREMENTS)
692 729 rmap.connect('delete_repo_fields', '/{repo_name}/settings/fields/{field_id}',
693 730 controller='admin/repos', action='delete_repo_field',
694 731 conditions={'method': ['DELETE'], 'function': check_repo},
695 732 requirements=URL_NAME_REQUIREMENTS)
696 733
697 734 rmap.connect('edit_repo_advanced', '/{repo_name}/settings/advanced',
698 735 controller='admin/repos', action='edit_advanced',
699 736 conditions={'method': ['GET'], 'function': check_repo},
700 737 requirements=URL_NAME_REQUIREMENTS)
701 738
702 739 rmap.connect('edit_repo_advanced_locking', '/{repo_name}/settings/advanced/locking',
703 740 controller='admin/repos', action='edit_advanced_locking',
704 741 conditions={'method': ['PUT'], 'function': check_repo},
705 742 requirements=URL_NAME_REQUIREMENTS)
706 743 rmap.connect('toggle_locking', '/{repo_name}/settings/advanced/locking_toggle',
707 744 controller='admin/repos', action='toggle_locking',
708 745 conditions={'method': ['GET'], 'function': check_repo},
709 746 requirements=URL_NAME_REQUIREMENTS)
710 747
711 748 rmap.connect('edit_repo_advanced_journal', '/{repo_name}/settings/advanced/journal',
712 749 controller='admin/repos', action='edit_advanced_journal',
713 750 conditions={'method': ['PUT'], 'function': check_repo},
714 751 requirements=URL_NAME_REQUIREMENTS)
715 752
716 753 rmap.connect('edit_repo_advanced_fork', '/{repo_name}/settings/advanced/fork',
717 754 controller='admin/repos', action='edit_advanced_fork',
718 755 conditions={'method': ['PUT'], 'function': check_repo},
719 756 requirements=URL_NAME_REQUIREMENTS)
720 757
721 758 rmap.connect('edit_repo_caches', '/{repo_name}/settings/caches',
722 759 controller='admin/repos', action='edit_caches_form',
723 760 conditions={'method': ['GET'], 'function': check_repo},
724 761 requirements=URL_NAME_REQUIREMENTS)
725 762 rmap.connect('edit_repo_caches', '/{repo_name}/settings/caches',
726 763 controller='admin/repos', action='edit_caches',
727 764 conditions={'method': ['PUT'], 'function': check_repo},
728 765 requirements=URL_NAME_REQUIREMENTS)
729 766
730 767 rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote',
731 768 controller='admin/repos', action='edit_remote_form',
732 769 conditions={'method': ['GET'], 'function': check_repo},
733 770 requirements=URL_NAME_REQUIREMENTS)
734 771 rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote',
735 772 controller='admin/repos', action='edit_remote',
736 773 conditions={'method': ['PUT'], 'function': check_repo},
737 774 requirements=URL_NAME_REQUIREMENTS)
738 775
739 776 rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics',
740 777 controller='admin/repos', action='edit_statistics_form',
741 778 conditions={'method': ['GET'], 'function': check_repo},
742 779 requirements=URL_NAME_REQUIREMENTS)
743 780 rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics',
744 781 controller='admin/repos', action='edit_statistics',
745 782 conditions={'method': ['PUT'], 'function': check_repo},
746 783 requirements=URL_NAME_REQUIREMENTS)
747 784 rmap.connect('repo_settings_issuetracker',
748 785 '/{repo_name}/settings/issue-tracker',
749 786 controller='admin/repos', action='repo_issuetracker',
750 787 conditions={'method': ['GET'], 'function': check_repo},
751 788 requirements=URL_NAME_REQUIREMENTS)
752 789 rmap.connect('repo_issuetracker_test',
753 790 '/{repo_name}/settings/issue-tracker/test',
754 791 controller='admin/repos', action='repo_issuetracker_test',
755 792 conditions={'method': ['POST'], 'function': check_repo},
756 793 requirements=URL_NAME_REQUIREMENTS)
757 794 rmap.connect('repo_issuetracker_delete',
758 795 '/{repo_name}/settings/issue-tracker/delete',
759 796 controller='admin/repos', action='repo_issuetracker_delete',
760 797 conditions={'method': ['DELETE'], 'function': check_repo},
761 798 requirements=URL_NAME_REQUIREMENTS)
762 799 rmap.connect('repo_issuetracker_save',
763 800 '/{repo_name}/settings/issue-tracker/save',
764 801 controller='admin/repos', action='repo_issuetracker_save',
765 802 conditions={'method': ['POST'], 'function': check_repo},
766 803 requirements=URL_NAME_REQUIREMENTS)
767 804 rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs',
768 805 controller='admin/repos', action='repo_settings_vcs_update',
769 806 conditions={'method': ['POST'], 'function': check_repo},
770 807 requirements=URL_NAME_REQUIREMENTS)
771 808 rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs',
772 809 controller='admin/repos', action='repo_settings_vcs',
773 810 conditions={'method': ['GET'], 'function': check_repo},
774 811 requirements=URL_NAME_REQUIREMENTS)
775 812 rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs',
776 813 controller='admin/repos', action='repo_delete_svn_pattern',
777 814 conditions={'method': ['DELETE'], 'function': check_repo},
778 815 requirements=URL_NAME_REQUIREMENTS)
779 816
780 817 # still working url for backward compat.
781 818 rmap.connect('raw_changeset_home_depraced',
782 819 '/{repo_name}/raw-changeset/{revision}',
783 820 controller='changeset', action='changeset_raw',
784 821 revision='tip', conditions={'function': check_repo},
785 822 requirements=URL_NAME_REQUIREMENTS)
786 823
787 824 # new URLs
788 825 rmap.connect('changeset_raw_home',
789 826 '/{repo_name}/changeset-diff/{revision}',
790 827 controller='changeset', action='changeset_raw',
791 828 revision='tip', conditions={'function': check_repo},
792 829 requirements=URL_NAME_REQUIREMENTS)
793 830
794 831 rmap.connect('changeset_patch_home',
795 832 '/{repo_name}/changeset-patch/{revision}',
796 833 controller='changeset', action='changeset_patch',
797 834 revision='tip', conditions={'function': check_repo},
798 835 requirements=URL_NAME_REQUIREMENTS)
799 836
800 837 rmap.connect('changeset_download_home',
801 838 '/{repo_name}/changeset-download/{revision}',
802 839 controller='changeset', action='changeset_download',
803 840 revision='tip', conditions={'function': check_repo},
804 841 requirements=URL_NAME_REQUIREMENTS)
805 842
806 843 rmap.connect('changeset_comment',
807 '/{repo_name}/changeset/{revision}/comment',
844 '/{repo_name}/changeset/{revision}/comment', jsroute=True,
808 845 controller='changeset', revision='tip', action='comment',
809 846 conditions={'function': check_repo},
810 847 requirements=URL_NAME_REQUIREMENTS)
811 848
812 849 rmap.connect('changeset_comment_preview',
813 '/{repo_name}/changeset/comment/preview',
850 '/{repo_name}/changeset/comment/preview', jsroute=True,
814 851 controller='changeset', action='preview_comment',
815 852 conditions={'function': check_repo, 'method': ['POST']},
816 853 requirements=URL_NAME_REQUIREMENTS)
817 854
818 855 rmap.connect('changeset_comment_delete',
819 856 '/{repo_name}/changeset/comment/{comment_id}/delete',
820 857 controller='changeset', action='delete_comment',
821 858 conditions={'function': check_repo, 'method': ['DELETE']},
822 requirements=URL_NAME_REQUIREMENTS)
859 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
823 860
824 861 rmap.connect('changeset_info', '/changeset_info/{repo_name}/{revision}',
825 862 controller='changeset', action='changeset_info',
826 requirements=URL_NAME_REQUIREMENTS)
863 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
827 864
828 865 rmap.connect('compare_home',
829 866 '/{repo_name}/compare',
830 867 controller='compare', action='index',
831 868 conditions={'function': check_repo},
832 869 requirements=URL_NAME_REQUIREMENTS)
833 870
834 871 rmap.connect('compare_url',
835 872 '/{repo_name}/compare/{source_ref_type}@{source_ref:.*?}...{target_ref_type}@{target_ref:.*?}',
836 873 controller='compare', action='compare',
837 874 conditions={'function': check_repo},
838 requirements=URL_NAME_REQUIREMENTS)
875 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
839 876
840 877 rmap.connect('pullrequest_home',
841 878 '/{repo_name}/pull-request/new', controller='pullrequests',
842 879 action='index', conditions={'function': check_repo,
843 880 'method': ['GET']},
844 requirements=URL_NAME_REQUIREMENTS)
881 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
845 882
846 883 rmap.connect('pullrequest',
847 884 '/{repo_name}/pull-request/new', controller='pullrequests',
848 885 action='create', conditions={'function': check_repo,
849 886 'method': ['POST']},
850 requirements=URL_NAME_REQUIREMENTS)
887 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
851 888
852 889 rmap.connect('pullrequest_repo_refs',
853 890 '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
854 891 controller='pullrequests',
855 892 action='get_repo_refs',
856 893 conditions={'function': check_repo, 'method': ['GET']},
857 requirements=URL_NAME_REQUIREMENTS)
894 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
858 895
859 896 rmap.connect('pullrequest_repo_destinations',
860 897 '/{repo_name}/pull-request/repo-destinations',
861 898 controller='pullrequests',
862 899 action='get_repo_destinations',
863 900 conditions={'function': check_repo, 'method': ['GET']},
864 requirements=URL_NAME_REQUIREMENTS)
901 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
865 902
866 903 rmap.connect('pullrequest_show',
867 904 '/{repo_name}/pull-request/{pull_request_id}',
868 905 controller='pullrequests',
869 906 action='show', conditions={'function': check_repo,
870 907 'method': ['GET']},
871 908 requirements=URL_NAME_REQUIREMENTS)
872 909
873 910 rmap.connect('pullrequest_update',
874 911 '/{repo_name}/pull-request/{pull_request_id}',
875 912 controller='pullrequests',
876 913 action='update', conditions={'function': check_repo,
877 914 'method': ['PUT']},
878 requirements=URL_NAME_REQUIREMENTS)
915 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
879 916
880 917 rmap.connect('pullrequest_merge',
881 918 '/{repo_name}/pull-request/{pull_request_id}',
882 919 controller='pullrequests',
883 920 action='merge', conditions={'function': check_repo,
884 921 'method': ['POST']},
885 922 requirements=URL_NAME_REQUIREMENTS)
886 923
887 924 rmap.connect('pullrequest_delete',
888 925 '/{repo_name}/pull-request/{pull_request_id}',
889 926 controller='pullrequests',
890 927 action='delete', conditions={'function': check_repo,
891 928 'method': ['DELETE']},
892 929 requirements=URL_NAME_REQUIREMENTS)
893 930
894 931 rmap.connect('pullrequest_show_all',
895 932 '/{repo_name}/pull-request',
896 933 controller='pullrequests',
897 934 action='show_all', conditions={'function': check_repo,
898 935 'method': ['GET']},
899 requirements=URL_NAME_REQUIREMENTS)
936 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
900 937
901 938 rmap.connect('pullrequest_comment',
902 939 '/{repo_name}/pull-request-comment/{pull_request_id}',
903 940 controller='pullrequests',
904 941 action='comment', conditions={'function': check_repo,
905 942 'method': ['POST']},
906 requirements=URL_NAME_REQUIREMENTS)
943 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
907 944
908 945 rmap.connect('pullrequest_comment_delete',
909 946 '/{repo_name}/pull-request-comment/{comment_id}/delete',
910 947 controller='pullrequests', action='delete_comment',
911 948 conditions={'function': check_repo, 'method': ['DELETE']},
912 requirements=URL_NAME_REQUIREMENTS)
949 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
913 950
914 951 rmap.connect('summary_home_explicit', '/{repo_name}/summary',
915 952 controller='summary', conditions={'function': check_repo},
916 953 requirements=URL_NAME_REQUIREMENTS)
917 954
918 955 rmap.connect('branches_home', '/{repo_name}/branches',
919 956 controller='branches', conditions={'function': check_repo},
920 957 requirements=URL_NAME_REQUIREMENTS)
921 958
922 959 rmap.connect('tags_home', '/{repo_name}/tags',
923 960 controller='tags', conditions={'function': check_repo},
924 961 requirements=URL_NAME_REQUIREMENTS)
925 962
926 963 rmap.connect('bookmarks_home', '/{repo_name}/bookmarks',
927 964 controller='bookmarks', conditions={'function': check_repo},
928 965 requirements=URL_NAME_REQUIREMENTS)
929 966
930 rmap.connect('changelog_home', '/{repo_name}/changelog',
967 rmap.connect('changelog_home', '/{repo_name}/changelog', jsroute=True,
931 968 controller='changelog', conditions={'function': check_repo},
932 969 requirements=URL_NAME_REQUIREMENTS)
933 970
934 971 rmap.connect('changelog_summary_home', '/{repo_name}/changelog_summary',
935 972 controller='changelog', action='changelog_summary',
936 973 conditions={'function': check_repo},
937 974 requirements=URL_NAME_REQUIREMENTS)
938 975
939 rmap.connect('changelog_file_home', '/{repo_name}/changelog/{revision}/{f_path}',
976 rmap.connect('changelog_file_home',
977 '/{repo_name}/changelog/{revision}/{f_path}',
940 978 controller='changelog', f_path=None,
941 979 conditions={'function': check_repo},
942 requirements=URL_NAME_REQUIREMENTS)
980 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
943 981
944 982 rmap.connect('changelog_details', '/{repo_name}/changelog_details/{cs}',
945 983 controller='changelog', action='changelog_details',
946 984 conditions={'function': check_repo},
947 985 requirements=URL_NAME_REQUIREMENTS)
948 986
949 rmap.connect('files_home',
950 '/{repo_name}/files/{revision}/{f_path}',
987 rmap.connect('files_home', '/{repo_name}/files/{revision}/{f_path}',
951 988 controller='files', revision='tip', f_path='',
952 989 conditions={'function': check_repo},
953 requirements=URL_NAME_REQUIREMENTS)
990 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
954 991
955 992 rmap.connect('files_home_simple_catchrev',
956 993 '/{repo_name}/files/{revision}',
957 994 controller='files', revision='tip', f_path='',
958 995 conditions={'function': check_repo},
959 996 requirements=URL_NAME_REQUIREMENTS)
960 997
961 998 rmap.connect('files_home_simple_catchall',
962 999 '/{repo_name}/files',
963 1000 controller='files', revision='tip', f_path='',
964 1001 conditions={'function': check_repo},
965 1002 requirements=URL_NAME_REQUIREMENTS)
966 1003
967 1004 rmap.connect('files_history_home',
968 1005 '/{repo_name}/history/{revision}/{f_path}',
969 1006 controller='files', action='history', revision='tip', f_path='',
970 1007 conditions={'function': check_repo},
971 requirements=URL_NAME_REQUIREMENTS)
1008 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
972 1009
973 1010 rmap.connect('files_authors_home',
974 1011 '/{repo_name}/authors/{revision}/{f_path}',
975 1012 controller='files', action='authors', revision='tip', f_path='',
976 1013 conditions={'function': check_repo},
977 requirements=URL_NAME_REQUIREMENTS)
1014 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
978 1015
979 1016 rmap.connect('files_diff_home', '/{repo_name}/diff/{f_path}',
980 1017 controller='files', action='diff', f_path='',
981 1018 conditions={'function': check_repo},
982 1019 requirements=URL_NAME_REQUIREMENTS)
983 1020
984 1021 rmap.connect('files_diff_2way_home',
985 1022 '/{repo_name}/diff-2way/{f_path}',
986 1023 controller='files', action='diff_2way', f_path='',
987 1024 conditions={'function': check_repo},
988 1025 requirements=URL_NAME_REQUIREMENTS)
989 1026
990 1027 rmap.connect('files_rawfile_home',
991 1028 '/{repo_name}/rawfile/{revision}/{f_path}',
992 1029 controller='files', action='rawfile', revision='tip',
993 1030 f_path='', conditions={'function': check_repo},
994 1031 requirements=URL_NAME_REQUIREMENTS)
995 1032
996 1033 rmap.connect('files_raw_home',
997 1034 '/{repo_name}/raw/{revision}/{f_path}',
998 1035 controller='files', action='raw', revision='tip', f_path='',
999 1036 conditions={'function': check_repo},
1000 1037 requirements=URL_NAME_REQUIREMENTS)
1001 1038
1002 1039 rmap.connect('files_render_home',
1003 1040 '/{repo_name}/render/{revision}/{f_path}',
1004 1041 controller='files', action='index', revision='tip', f_path='',
1005 1042 rendered=True, conditions={'function': check_repo},
1006 1043 requirements=URL_NAME_REQUIREMENTS)
1007 1044
1008 1045 rmap.connect('files_annotate_home',
1009 1046 '/{repo_name}/annotate/{revision}/{f_path}',
1010 1047 controller='files', action='index', revision='tip',
1011 1048 f_path='', annotate=True, conditions={'function': check_repo},
1012 1049 requirements=URL_NAME_REQUIREMENTS)
1013 1050
1014 1051 rmap.connect('files_edit',
1015 1052 '/{repo_name}/edit/{revision}/{f_path}',
1016 1053 controller='files', action='edit', revision='tip',
1017 1054 f_path='',
1018 1055 conditions={'function': check_repo, 'method': ['POST']},
1019 1056 requirements=URL_NAME_REQUIREMENTS)
1020 1057
1021 1058 rmap.connect('files_edit_home',
1022 1059 '/{repo_name}/edit/{revision}/{f_path}',
1023 1060 controller='files', action='edit_home', revision='tip',
1024 1061 f_path='', conditions={'function': check_repo},
1025 1062 requirements=URL_NAME_REQUIREMENTS)
1026 1063
1027 1064 rmap.connect('files_add',
1028 1065 '/{repo_name}/add/{revision}/{f_path}',
1029 1066 controller='files', action='add', revision='tip',
1030 1067 f_path='',
1031 1068 conditions={'function': check_repo, 'method': ['POST']},
1032 1069 requirements=URL_NAME_REQUIREMENTS)
1033 1070
1034 1071 rmap.connect('files_add_home',
1035 1072 '/{repo_name}/add/{revision}/{f_path}',
1036 1073 controller='files', action='add_home', revision='tip',
1037 1074 f_path='', conditions={'function': check_repo},
1038 1075 requirements=URL_NAME_REQUIREMENTS)
1039 1076
1040 1077 rmap.connect('files_delete',
1041 1078 '/{repo_name}/delete/{revision}/{f_path}',
1042 1079 controller='files', action='delete', revision='tip',
1043 1080 f_path='',
1044 1081 conditions={'function': check_repo, 'method': ['POST']},
1045 1082 requirements=URL_NAME_REQUIREMENTS)
1046 1083
1047 1084 rmap.connect('files_delete_home',
1048 1085 '/{repo_name}/delete/{revision}/{f_path}',
1049 1086 controller='files', action='delete_home', revision='tip',
1050 1087 f_path='', conditions={'function': check_repo},
1051 1088 requirements=URL_NAME_REQUIREMENTS)
1052 1089
1053 1090 rmap.connect('files_archive_home', '/{repo_name}/archive/{fname}',
1054 1091 controller='files', action='archivefile',
1055 1092 conditions={'function': check_repo},
1056 requirements=URL_NAME_REQUIREMENTS)
1093 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
1057 1094
1058 1095 rmap.connect('files_nodelist_home',
1059 1096 '/{repo_name}/nodelist/{revision}/{f_path}',
1060 1097 controller='files', action='nodelist',
1061 1098 conditions={'function': check_repo},
1062 requirements=URL_NAME_REQUIREMENTS)
1099 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
1063 1100
1064 1101 rmap.connect('files_metadata_list_home',
1065 1102 '/{repo_name}/metadata_list/{revision}/{f_path}',
1066 1103 controller='files', action='metadata_list',
1067 1104 conditions={'function': check_repo},
1068 requirements=URL_NAME_REQUIREMENTS)
1105 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
1069 1106
1070 1107 rmap.connect('repo_fork_create_home', '/{repo_name}/fork',
1071 1108 controller='forks', action='fork_create',
1072 1109 conditions={'function': check_repo, 'method': ['POST']},
1073 1110 requirements=URL_NAME_REQUIREMENTS)
1074 1111
1075 1112 rmap.connect('repo_fork_home', '/{repo_name}/fork',
1076 1113 controller='forks', action='fork',
1077 1114 conditions={'function': check_repo},
1078 1115 requirements=URL_NAME_REQUIREMENTS)
1079 1116
1080 1117 rmap.connect('repo_forks_home', '/{repo_name}/forks',
1081 1118 controller='forks', action='forks',
1082 1119 conditions={'function': check_repo},
1083 1120 requirements=URL_NAME_REQUIREMENTS)
1084 1121
1085 1122 rmap.connect('repo_followers_home', '/{repo_name}/followers',
1086 1123 controller='followers', action='followers',
1087 1124 conditions={'function': check_repo},
1088 1125 requirements=URL_NAME_REQUIREMENTS)
1089 1126
1090 1127 # must be here for proper group/repo catching pattern
1091 1128 _connect_with_slash(
1092 1129 rmap, 'repo_group_home', '/{group_name}',
1093 1130 controller='home', action='index_repo_group',
1094 1131 conditions={'function': check_group},
1095 1132 requirements=URL_NAME_REQUIREMENTS)
1096 1133
1097 1134 # catch all, at the end
1098 1135 _connect_with_slash(
1099 rmap, 'summary_home', '/{repo_name}',
1136 rmap, 'summary_home', '/{repo_name}', jsroute=True,
1100 1137 controller='summary', action='index',
1101 1138 conditions={'function': check_repo},
1102 1139 requirements=URL_NAME_REQUIREMENTS)
1103 1140
1104 1141 return rmap
1105 1142
1106 1143
1107 1144 def _connect_with_slash(mapper, name, path, *args, **kwargs):
1108 1145 """
1109 1146 Connect a route with an optional trailing slash in `path`.
1110 1147 """
1111 1148 mapper.connect(name + '_slash', path + '/', *args, **kwargs)
1112 1149 mapper.connect(name, path, *args, **kwargs)
@@ -1,87 +1,99 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import shlex
23 23 import Pyro4
24 24 import platform
25 25
26 26 from rhodecode.model import init_model
27 27
28 28
29 29 def configure_pyro4(config):
30 30 """
31 31 Configure Pyro4 based on `config`.
32 32
33 33 This will mainly set the different configuration parameters of the Pyro4
34 34 library based on the settings in our INI files. The Pyro4 documentation
35 35 lists more details about the specific settings and their meaning.
36 36 """
37 37 Pyro4.config.COMMTIMEOUT = float(config['vcs.connection_timeout'])
38 38 Pyro4.config.SERIALIZER = 'pickle'
39 39 Pyro4.config.SERIALIZERS_ACCEPTED.add('pickle')
40 40
41 41 # Note: We need server configuration in the WSGI processes
42 42 # because we provide a callback server in certain vcs operations.
43 43 Pyro4.config.SERVERTYPE = "multiplex"
44 44 Pyro4.config.POLLTIMEOUT = 0.01
45 45
46 46
47 47 def configure_vcs(config):
48 48 """
49 49 Patch VCS config with some RhodeCode specific stuff
50 50 """
51 51 from rhodecode.lib.vcs import conf
52 52 from rhodecode.lib.utils2 import aslist
53 53 conf.settings.BACKENDS = {
54 54 'hg': 'rhodecode.lib.vcs.backends.hg.MercurialRepository',
55 55 'git': 'rhodecode.lib.vcs.backends.git.GitRepository',
56 56 'svn': 'rhodecode.lib.vcs.backends.svn.SubversionRepository',
57 57 }
58 58
59 59 conf.settings.HG_USE_REBASE_FOR_MERGING = config.get(
60 60 'rhodecode_hg_use_rebase_for_merging', False)
61 61 conf.settings.GIT_REV_FILTER = shlex.split(
62 62 config.get('git_rev_filter', '--all').strip())
63 63 conf.settings.DEFAULT_ENCODINGS = aslist(config.get('default_encoding',
64 64 'UTF-8'), sep=',')
65 65 conf.settings.ALIASES[:] = config.get('vcs.backends')
66 66 conf.settings.SVN_COMPATIBLE_VERSION = config.get(
67 67 'vcs.svn.compatible_version')
68 68
69 69
70 70 def initialize_database(config):
71 71 from rhodecode.lib.utils2 import engine_from_config
72 72 engine = engine_from_config(config, 'sqlalchemy.db1.')
73 73 init_model(engine, encryption_key=config['beaker.session.secret'])
74 74
75 75
76 def initialize_test_environment(settings, test_env=None):
77 if test_env is None:
78 test_env = not int(os.environ.get('RC_NO_TMP_PATH', 0))
79
80 from rhodecode.lib.utils import create_test_env, create_test_index
81 from rhodecode.tests import TESTS_TMP_PATH
82 # test repos
83 if test_env:
84 create_test_env(TESTS_TMP_PATH, settings)
85 create_test_index(TESTS_TMP_PATH, settings, True)
86
87
76 88 def get_vcs_server_protocol(config):
77 89 protocol = config.get('vcs.server.protocol', 'pyro4')
78 90 return protocol
79 91
80 92
81 93 def set_instance_id(config):
82 94 """ Sets a dynamic generated config['instance_id'] if missing or '*' """
83 95
84 96 config['instance_id'] = config.get('instance_id') or ''
85 97 if config['instance_id'] == '*' or not config['instance_id']:
86 98 _platform_id = platform.uname()[1] or 'instance'
87 99 config['instance_id'] = '%s-%s' % (_platform_id, os.getpid())
@@ -1,373 +1,348 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2013-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 my account controller for RhodeCode admin
24 24 """
25 25
26 26 import logging
27 27
28 28 import formencode
29 29 from formencode import htmlfill
30 30 from pylons import request, tmpl_context as c, url, session
31 31 from pylons.controllers.util import redirect
32 32 from pylons.i18n.translation import _
33 33 from sqlalchemy.orm import joinedload
34 34
35 35 from rhodecode.lib import helpers as h
36 36 from rhodecode.lib import auth
37 37 from rhodecode.lib.auth import (
38 38 LoginRequired, NotAnonymous, AuthUser, generate_auth_token)
39 39 from rhodecode.lib.base import BaseController, render
40 40 from rhodecode.lib.utils2 import safe_int, md5
41 41 from rhodecode.lib.ext_json import json
42 from rhodecode.model.db import (Repository, PullRequest, PullRequestReviewers,
43 UserEmailMap, User, UserFollowing,
44 ExternalIdentity)
42 from rhodecode.model.db import (
43 Repository, PullRequest, PullRequestReviewers, UserEmailMap, User,
44 UserFollowing)
45 45 from rhodecode.model.forms import UserForm, PasswordChangeForm
46 46 from rhodecode.model.scm import RepoList
47 47 from rhodecode.model.user import UserModel
48 48 from rhodecode.model.repo import RepoModel
49 49 from rhodecode.model.auth_token import AuthTokenModel
50 50 from rhodecode.model.meta import Session
51 from rhodecode.model.settings import SettingsModel
52 51
53 52 log = logging.getLogger(__name__)
54 53
55 54
56 55 class MyAccountController(BaseController):
57 56 """REST Controller styled on the Atom Publishing Protocol"""
58 57 # To properly map this controller, ensure your config/routing.py
59 58 # file has a resource setup:
60 59 # map.resource('setting', 'settings', controller='admin/settings',
61 60 # path_prefix='/admin', name_prefix='admin_')
62 61
63 62 @LoginRequired()
64 63 @NotAnonymous()
65 64 def __before__(self):
66 65 super(MyAccountController, self).__before__()
67 66
68 67 def __load_data(self):
69 68 c.user = User.get(c.rhodecode_user.user_id)
70 69 if c.user.username == User.DEFAULT_USER:
71 70 h.flash(_("You can't edit this user since it's"
72 71 " crucial for entire application"), category='warning')
73 72 return redirect(url('users'))
74 73
75 74 def _load_my_repos_data(self, watched=False):
76 75 if watched:
77 76 admin = False
78 77 follows_repos = Session().query(UserFollowing)\
79 78 .filter(UserFollowing.user_id == c.rhodecode_user.user_id)\
80 79 .options(joinedload(UserFollowing.follows_repository))\
81 80 .all()
82 81 repo_list = [x.follows_repository for x in follows_repos]
83 82 else:
84 83 admin = True
85 84 repo_list = Repository.get_all_repos(
86 85 user_id=c.rhodecode_user.user_id)
87 86 repo_list = RepoList(repo_list, perm_set=[
88 87 'repository.read', 'repository.write', 'repository.admin'])
89 88
90 89 repos_data = RepoModel().get_repos_as_dict(
91 90 repo_list=repo_list, admin=admin)
92 91 # json used to render the grid
93 92 return json.dumps(repos_data)
94 93
95 94 @auth.CSRFRequired()
96 95 def my_account_update(self):
97 96 """
98 97 POST /_admin/my_account Updates info of my account
99 98 """
100 99 # url('my_account')
101 100 c.active = 'profile_edit'
102 101 self.__load_data()
103 102 c.perm_user = AuthUser(user_id=c.rhodecode_user.user_id,
104 103 ip_addr=self.ip_addr)
105 104 c.extern_type = c.user.extern_type
106 105 c.extern_name = c.user.extern_name
107 106
108 107 defaults = c.user.get_dict()
109 108 update = False
110 109 _form = UserForm(edit=True,
111 110 old_data={'user_id': c.rhodecode_user.user_id,
112 111 'email': c.rhodecode_user.email})()
113 112 form_result = {}
114 113 try:
115 114 post_data = dict(request.POST)
116 115 post_data['new_password'] = ''
117 116 post_data['password_confirmation'] = ''
118 117 form_result = _form.to_python(post_data)
119 118 # skip updating those attrs for my account
120 119 skip_attrs = ['admin', 'active', 'extern_type', 'extern_name',
121 120 'new_password', 'password_confirmation']
122 121 # TODO: plugin should define if username can be updated
123 122 if c.extern_type != "rhodecode":
124 123 # forbid updating username for external accounts
125 124 skip_attrs.append('username')
126 125
127 126 UserModel().update_user(
128 127 c.rhodecode_user.user_id, skip_attrs=skip_attrs, **form_result)
129 128 h.flash(_('Your account was updated successfully'),
130 129 category='success')
131 130 Session().commit()
132 131 update = True
133 132
134 133 except formencode.Invalid as errors:
135 134 return htmlfill.render(
136 135 render('admin/my_account/my_account.html'),
137 136 defaults=errors.value,
138 137 errors=errors.error_dict or {},
139 138 prefix_error=False,
140 139 encoding="UTF-8",
141 140 force_defaults=False)
142 141 except Exception:
143 142 log.exception("Exception updating user")
144 143 h.flash(_('Error occurred during update of user %s')
145 144 % form_result.get('username'), category='error')
146 145
147 146 if update:
148 147 return redirect('my_account')
149 148
150 149 return htmlfill.render(
151 150 render('admin/my_account/my_account.html'),
152 151 defaults=defaults,
153 152 encoding="UTF-8",
154 153 force_defaults=False
155 154 )
156 155
157 156 def my_account(self):
158 157 """
159 158 GET /_admin/my_account Displays info about my account
160 159 """
161 160 # url('my_account')
162 161 c.active = 'profile'
163 162 self.__load_data()
164 163
165 164 defaults = c.user.get_dict()
166 165 return htmlfill.render(
167 166 render('admin/my_account/my_account.html'),
168 167 defaults=defaults, encoding="UTF-8", force_defaults=False)
169 168
170 169 def my_account_edit(self):
171 170 """
172 171 GET /_admin/my_account/edit Displays edit form of my account
173 172 """
174 173 c.active = 'profile_edit'
175 174 self.__load_data()
176 175 c.perm_user = AuthUser(user_id=c.rhodecode_user.user_id,
177 176 ip_addr=self.ip_addr)
178 177 c.extern_type = c.user.extern_type
179 178 c.extern_name = c.user.extern_name
180 179
181 180 defaults = c.user.get_dict()
182 181 return htmlfill.render(
183 182 render('admin/my_account/my_account.html'),
184 183 defaults=defaults,
185 184 encoding="UTF-8",
186 185 force_defaults=False
187 186 )
188 187
189 188 @auth.CSRFRequired()
190 189 def my_account_password_update(self):
191 190 c.active = 'password'
192 191 self.__load_data()
193 192 _form = PasswordChangeForm(c.rhodecode_user.username)()
194 193 try:
195 194 form_result = _form.to_python(request.POST)
196 195 UserModel().update_user(c.rhodecode_user.user_id, **form_result)
197 196 instance = c.rhodecode_user.get_instance()
198 197 instance.update_userdata(force_password_change=False)
199 198 Session().commit()
200 199 session.setdefault('rhodecode_user', {}).update(
201 200 {'password': md5(instance.password)})
202 201 session.save()
203 202 h.flash(_("Successfully updated password"), category='success')
204 203 except formencode.Invalid as errors:
205 204 return htmlfill.render(
206 205 render('admin/my_account/my_account.html'),
207 206 defaults=errors.value,
208 207 errors=errors.error_dict or {},
209 208 prefix_error=False,
210 209 encoding="UTF-8",
211 210 force_defaults=False)
212 211 except Exception:
213 212 log.exception("Exception updating password")
214 213 h.flash(_('Error occurred during update of user password'),
215 214 category='error')
216 215 return render('admin/my_account/my_account.html')
217 216
218 217 def my_account_password(self):
219 218 c.active = 'password'
220 219 self.__load_data()
221 220 return render('admin/my_account/my_account.html')
222 221
223 222 def my_account_repos(self):
224 223 c.active = 'repos'
225 224 self.__load_data()
226 225
227 226 # json used to render the grid
228 227 c.data = self._load_my_repos_data()
229 228 return render('admin/my_account/my_account.html')
230 229
231 230 def my_account_watched(self):
232 231 c.active = 'watched'
233 232 self.__load_data()
234 233
235 234 # json used to render the grid
236 235 c.data = self._load_my_repos_data(watched=True)
237 236 return render('admin/my_account/my_account.html')
238 237
239 238 def my_account_perms(self):
240 239 c.active = 'perms'
241 240 self.__load_data()
242 241 c.perm_user = AuthUser(user_id=c.rhodecode_user.user_id,
243 242 ip_addr=self.ip_addr)
244 243
245 244 return render('admin/my_account/my_account.html')
246 245
247 246 def my_account_emails(self):
248 247 c.active = 'emails'
249 248 self.__load_data()
250 249
251 250 c.user_email_map = UserEmailMap.query()\
252 251 .filter(UserEmailMap.user == c.user).all()
253 252 return render('admin/my_account/my_account.html')
254 253
255 254 @auth.CSRFRequired()
256 255 def my_account_emails_add(self):
257 256 email = request.POST.get('new_email')
258 257
259 258 try:
260 259 UserModel().add_extra_email(c.rhodecode_user.user_id, email)
261 260 Session().commit()
262 261 h.flash(_("Added new email address `%s` for user account") % email,
263 262 category='success')
264 263 except formencode.Invalid as error:
265 264 msg = error.error_dict['email']
266 265 h.flash(msg, category='error')
267 266 except Exception:
268 267 log.exception("Exception in my_account_emails")
269 268 h.flash(_('An error occurred during email saving'),
270 269 category='error')
271 270 return redirect(url('my_account_emails'))
272 271
273 272 @auth.CSRFRequired()
274 273 def my_account_emails_delete(self):
275 274 email_id = request.POST.get('del_email_id')
276 275 user_model = UserModel()
277 276 user_model.delete_extra_email(c.rhodecode_user.user_id, email_id)
278 277 Session().commit()
279 278 h.flash(_("Removed email address from user account"),
280 279 category='success')
281 280 return redirect(url('my_account_emails'))
282 281
283 282 def my_account_pullrequests(self):
284 283 c.active = 'pullrequests'
285 284 self.__load_data()
286 285 c.show_closed = request.GET.get('pr_show_closed')
287 286
288 287 def _filter(pr):
289 288 s = sorted(pr, key=lambda o: o.created_on, reverse=True)
290 289 if not c.show_closed:
291 290 s = filter(lambda p: p.status != PullRequest.STATUS_CLOSED, s)
292 291 return s
293 292
294 293 c.my_pull_requests = _filter(
295 294 PullRequest.query().filter(
296 295 PullRequest.user_id == c.rhodecode_user.user_id).all())
297 296 my_prs = [
298 297 x.pull_request for x in PullRequestReviewers.query().filter(
299 298 PullRequestReviewers.user_id == c.rhodecode_user.user_id).all()]
300 299 c.participate_in_pull_requests = _filter(my_prs)
301 300 return render('admin/my_account/my_account.html')
302 301
303 302 def my_account_auth_tokens(self):
304 303 c.active = 'auth_tokens'
305 304 self.__load_data()
306 305 show_expired = True
307 306 c.lifetime_values = [
308 307 (str(-1), _('forever')),
309 308 (str(5), _('5 minutes')),
310 309 (str(60), _('1 hour')),
311 310 (str(60 * 24), _('1 day')),
312 311 (str(60 * 24 * 30), _('1 month')),
313 312 ]
314 313 c.lifetime_options = [(c.lifetime_values, _("Lifetime"))]
315 314 c.role_values = [(x, AuthTokenModel.cls._get_role_name(x))
316 315 for x in AuthTokenModel.cls.ROLES]
317 316 c.role_options = [(c.role_values, _("Role"))]
318 317 c.user_auth_tokens = AuthTokenModel().get_auth_tokens(
319 318 c.rhodecode_user.user_id, show_expired=show_expired)
320 319 return render('admin/my_account/my_account.html')
321 320
322 321 @auth.CSRFRequired()
323 322 def my_account_auth_tokens_add(self):
324 323 lifetime = safe_int(request.POST.get('lifetime'), -1)
325 324 description = request.POST.get('description')
326 325 role = request.POST.get('role')
327 326 AuthTokenModel().create(c.rhodecode_user.user_id, description, lifetime,
328 327 role)
329 328 Session().commit()
330 329 h.flash(_("Auth token successfully created"), category='success')
331 330 return redirect(url('my_account_auth_tokens'))
332 331
333 332 @auth.CSRFRequired()
334 333 def my_account_auth_tokens_delete(self):
335 334 auth_token = request.POST.get('del_auth_token')
336 335 user_id = c.rhodecode_user.user_id
337 336 if request.POST.get('del_auth_token_builtin'):
338 337 user = User.get(user_id)
339 338 if user:
340 339 user.api_key = generate_auth_token(user.username)
341 340 Session().add(user)
342 341 Session().commit()
343 342 h.flash(_("Auth token successfully reset"), category='success')
344 343 elif auth_token:
345 344 AuthTokenModel().delete(auth_token, c.rhodecode_user.user_id)
346 345 Session().commit()
347 346 h.flash(_("Auth token successfully deleted"), category='success')
348 347
349 348 return redirect(url('my_account_auth_tokens'))
350
351 def my_account_oauth(self):
352 c.active = 'oauth'
353 self.__load_data()
354 c.user_oauth_tokens = ExternalIdentity().by_local_user_id(
355 c.rhodecode_user.user_id).all()
356 settings = SettingsModel().get_all_settings()
357 c.social_plugins = SettingsModel().list_enabled_social_plugins(
358 settings)
359 return render('admin/my_account/my_account.html')
360
361 @auth.CSRFRequired()
362 def my_account_oauth_delete(self):
363 token = ExternalIdentity.by_external_id_and_provider(
364 request.params.get('external_id'),
365 request.params.get('provider_name'),
366 local_user_id=c.rhodecode_user.user_id
367 )
368 if token:
369 Session().delete(token)
370 Session().commit()
371 h.flash(_("OAuth token successfully deleted"), category='success')
372
373 return redirect(url('my_account_oauth'))
@@ -1,471 +1,480 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 User Groups crud controller for pylons
23 23 """
24 24
25 25 import logging
26 26 import formencode
27 27
28 28 from formencode import htmlfill
29 29 from pylons import request, tmpl_context as c, url, config
30 30 from pylons.controllers.util import redirect
31 31 from pylons.i18n.translation import _
32 32
33 33 from sqlalchemy.orm import joinedload
34 34
35 35 from rhodecode.lib import auth
36 36 from rhodecode.lib import helpers as h
37 37 from rhodecode.lib.exceptions import UserGroupAssignedException,\
38 38 RepoGroupAssignmentError
39 from rhodecode.lib.utils import jsonify, action_logger
39 40 from rhodecode.lib.utils2 import safe_unicode, str2bool, safe_int
40 41 from rhodecode.lib.auth import (
41 42 LoginRequired, NotAnonymous, HasUserGroupPermissionAnyDecorator,
42 43 HasPermissionAnyDecorator)
43 44 from rhodecode.lib.base import BaseController, render
44 45 from rhodecode.model.permission import PermissionModel
45 46 from rhodecode.model.scm import UserGroupList
46 47 from rhodecode.model.user_group import UserGroupModel
47 48 from rhodecode.model.db import (
48 49 User, UserGroup, UserGroupRepoToPerm, UserGroupRepoGroupToPerm)
49 50 from rhodecode.model.forms import (
50 51 UserGroupForm, UserGroupPermsForm, UserIndividualPermissionsForm,
51 52 UserPermissionsForm)
52 53 from rhodecode.model.meta import Session
53 54 from rhodecode.lib.utils import action_logger
54 55 from rhodecode.lib.ext_json import json
55 56
56 57 log = logging.getLogger(__name__)
57 58
58 59
59 60 class UserGroupsController(BaseController):
60 61 """REST Controller styled on the Atom Publishing Protocol"""
61 62
62 63 @LoginRequired()
63 64 def __before__(self):
64 65 super(UserGroupsController, self).__before__()
65 66 c.available_permissions = config['available_permissions']
66 67 PermissionModel().set_global_permission_choices(c, translator=_)
67 68
68 69 def __load_data(self, user_group_id):
69 70 c.group_members_obj = [x.user for x in c.user_group.members]
70 71 c.group_members_obj.sort(key=lambda u: u.username.lower())
71 72
72 73 c.group_members = [(x.user_id, x.username) for x in c.group_members_obj]
73 74
74 75 c.available_members = [(x.user_id, x.username)
75 76 for x in User.query().all()]
76 77 c.available_members.sort(key=lambda u: u[1].lower())
77 78
78 79 def __load_defaults(self, user_group_id):
79 80 """
80 81 Load defaults settings for edit, and update
81 82
82 83 :param user_group_id:
83 84 """
84 85 user_group = UserGroup.get_or_404(user_group_id)
85 86 data = user_group.get_dict()
86 87 # fill owner
87 88 if user_group.user:
88 89 data.update({'user': user_group.user.username})
89 90 else:
90 91 replacement_user = User.get_first_admin().username
91 92 data.update({'user': replacement_user})
92 93 return data
93 94
94 95 def _revoke_perms_on_yourself(self, form_result):
95 96 _updates = filter(lambda u: c.rhodecode_user.user_id == int(u[0]),
96 97 form_result['perm_updates'])
97 98 _additions = filter(lambda u: c.rhodecode_user.user_id == int(u[0]),
98 99 form_result['perm_additions'])
99 100 _deletions = filter(lambda u: c.rhodecode_user.user_id == int(u[0]),
100 101 form_result['perm_deletions'])
101 102 admin_perm = 'usergroup.admin'
102 103 if _updates and _updates[0][1] != admin_perm or \
103 104 _additions and _additions[0][1] != admin_perm or \
104 105 _deletions and _deletions[0][1] != admin_perm:
105 106 return True
106 107 return False
107 108
108 109 # permission check inside
109 110 @NotAnonymous()
110 111 def index(self):
111 112 """GET /users_groups: All items in the collection"""
112 113 # url('users_groups')
113 114
114 115 from rhodecode.lib.utils import PartialRenderer
115 116 _render = PartialRenderer('data_table/_dt_elements.html')
116 117
117 118 def user_group_name(user_group_id, user_group_name):
118 119 return _render("user_group_name", user_group_id, user_group_name)
119 120
120 121 def user_group_actions(user_group_id, user_group_name):
121 122 return _render("user_group_actions", user_group_id, user_group_name)
122 123
123 124 ## json generate
124 125 group_iter = UserGroupList(UserGroup.query().all(),
125 126 perm_set=['usergroup.admin'])
126 127
127 128 user_groups_data = []
128 129 for user_gr in group_iter:
129 130 user_groups_data.append({
130 131 "group_name": user_group_name(
131 132 user_gr.users_group_id, h.escape(user_gr.users_group_name)),
132 133 "group_name_raw": user_gr.users_group_name,
133 134 "desc": h.escape(user_gr.user_group_description),
134 135 "members": len(user_gr.members),
135 136 "active": h.bool2icon(user_gr.users_group_active),
136 137 "owner": h.escape(h.link_to_user(user_gr.user.username)),
137 138 "action": user_group_actions(
138 139 user_gr.users_group_id, user_gr.users_group_name)
139 140 })
140 141
141 142 c.data = json.dumps(user_groups_data)
142 143 return render('admin/user_groups/user_groups.html')
143 144
144 145 @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true')
145 146 @auth.CSRFRequired()
146 147 def create(self):
147 148 """POST /users_groups: Create a new item"""
148 149 # url('users_groups')
149 150
150 151 users_group_form = UserGroupForm()()
151 152 try:
152 153 form_result = users_group_form.to_python(dict(request.POST))
153 154 user_group = UserGroupModel().create(
154 155 name=form_result['users_group_name'],
155 156 description=form_result['user_group_description'],
156 157 owner=c.rhodecode_user.user_id,
157 158 active=form_result['users_group_active'])
158 159 Session().flush()
159 160
160 161 user_group_name = form_result['users_group_name']
161 162 action_logger(c.rhodecode_user,
162 163 'admin_created_users_group:%s' % user_group_name,
163 164 None, self.ip_addr, self.sa)
164 165 user_group_link = h.link_to(h.escape(user_group_name),
165 166 url('edit_users_group',
166 167 user_group_id=user_group.users_group_id))
167 168 h.flash(h.literal(_('Created user group %(user_group_link)s')
168 169 % {'user_group_link': user_group_link}),
169 170 category='success')
170 171 Session().commit()
171 172 except formencode.Invalid as errors:
172 173 return htmlfill.render(
173 174 render('admin/user_groups/user_group_add.html'),
174 175 defaults=errors.value,
175 176 errors=errors.error_dict or {},
176 177 prefix_error=False,
177 178 encoding="UTF-8",
178 179 force_defaults=False)
179 180 except Exception:
180 181 log.exception("Exception creating user group")
181 182 h.flash(_('Error occurred during creation of user group %s') \
182 183 % request.POST.get('users_group_name'), category='error')
183 184
184 return redirect(url('users_groups'))
185 return redirect(
186 url('edit_users_group', user_group_id=user_group.users_group_id))
185 187
186 188 @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true')
187 189 def new(self):
188 190 """GET /user_groups/new: Form to create a new item"""
189 191 # url('new_users_group')
190 192 return render('admin/user_groups/user_group_add.html')
191 193
192 194 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
193 195 @auth.CSRFRequired()
194 196 def update(self, user_group_id):
195 197 """PUT /user_groups/user_group_id: Update an existing item"""
196 198 # Forms posted to this method should contain a hidden field:
197 199 # <input type="hidden" name="_method" value="PUT" />
198 200 # Or using helpers:
199 201 # h.form(url('users_group', user_group_id=ID),
200 202 # method='put')
201 203 # url('users_group', user_group_id=ID)
202 204
203 205 user_group_id = safe_int(user_group_id)
204 206 c.user_group = UserGroup.get_or_404(user_group_id)
205 207 c.active = 'settings'
206 208 self.__load_data(user_group_id)
207 209
208 210 available_members = [safe_unicode(x[0]) for x in c.available_members]
209 211
210 212 users_group_form = UserGroupForm(edit=True,
211 213 old_data=c.user_group.get_dict(),
212 214 available_members=available_members)()
213 215
214 216 try:
215 217 form_result = users_group_form.to_python(request.POST)
216 218 UserGroupModel().update(c.user_group, form_result)
217 219 gr = form_result['users_group_name']
218 220 action_logger(c.rhodecode_user,
219 221 'admin_updated_users_group:%s' % gr,
220 222 None, self.ip_addr, self.sa)
221 223 h.flash(_('Updated user group %s') % gr, category='success')
222 224 Session().commit()
223 225 except formencode.Invalid as errors:
224 226 defaults = errors.value
225 227 e = errors.error_dict or {}
226 228
227 229 return htmlfill.render(
228 230 render('admin/user_groups/user_group_edit.html'),
229 231 defaults=defaults,
230 232 errors=e,
231 233 prefix_error=False,
232 234 encoding="UTF-8",
233 235 force_defaults=False)
234 236 except Exception:
235 237 log.exception("Exception during update of user group")
236 238 h.flash(_('Error occurred during update of user group %s')
237 239 % request.POST.get('users_group_name'), category='error')
238 240
239 241 return redirect(url('edit_users_group', user_group_id=user_group_id))
240 242
241 243 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
242 244 @auth.CSRFRequired()
243 245 def delete(self, user_group_id):
244 246 """DELETE /user_groups/user_group_id: Delete an existing item"""
245 247 # Forms posted to this method should contain a hidden field:
246 248 # <input type="hidden" name="_method" value="DELETE" />
247 249 # Or using helpers:
248 250 # h.form(url('users_group', user_group_id=ID),
249 251 # method='delete')
250 252 # url('users_group', user_group_id=ID)
251 253 user_group_id = safe_int(user_group_id)
252 254 c.user_group = UserGroup.get_or_404(user_group_id)
253 255 force = str2bool(request.POST.get('force'))
254 256
255 257 try:
256 258 UserGroupModel().delete(c.user_group, force=force)
257 259 Session().commit()
258 260 h.flash(_('Successfully deleted user group'), category='success')
259 261 except UserGroupAssignedException as e:
260 262 h.flash(str(e), category='error')
261 263 except Exception:
262 264 log.exception("Exception during deletion of user group")
263 265 h.flash(_('An error occurred during deletion of user group'),
264 266 category='error')
265 267 return redirect(url('users_groups'))
266 268
267 269 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
268 270 def edit(self, user_group_id):
269 271 """GET /user_groups/user_group_id/edit: Form to edit an existing item"""
270 272 # url('edit_users_group', user_group_id=ID)
271 273
272 274 user_group_id = safe_int(user_group_id)
273 275 c.user_group = UserGroup.get_or_404(user_group_id)
274 276 c.active = 'settings'
275 277 self.__load_data(user_group_id)
276 278
277 279 defaults = self.__load_defaults(user_group_id)
278 280
279 281 return htmlfill.render(
280 282 render('admin/user_groups/user_group_edit.html'),
281 283 defaults=defaults,
282 284 encoding="UTF-8",
283 285 force_defaults=False
284 286 )
285 287
286 288 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
287 289 def edit_perms(self, user_group_id):
288 290 user_group_id = safe_int(user_group_id)
289 291 c.user_group = UserGroup.get_or_404(user_group_id)
290 292 c.active = 'perms'
291 293
292 294 defaults = {}
293 295 # fill user group users
294 296 for p in c.user_group.user_user_group_to_perm:
295 297 defaults.update({'u_perm_%s' % p.user.user_id:
296 298 p.permission.permission_name})
297 299
298 300 for p in c.user_group.user_group_user_group_to_perm:
299 301 defaults.update({'g_perm_%s' % p.user_group.users_group_id:
300 302 p.permission.permission_name})
301 303
302 304 return htmlfill.render(
303 305 render('admin/user_groups/user_group_edit.html'),
304 306 defaults=defaults,
305 307 encoding="UTF-8",
306 308 force_defaults=False
307 309 )
308 310
309 311 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
310 312 @auth.CSRFRequired()
311 313 def update_perms(self, user_group_id):
312 314 """
313 315 grant permission for given usergroup
314 316
315 317 :param user_group_id:
316 318 """
317 319 user_group_id = safe_int(user_group_id)
318 320 c.user_group = UserGroup.get_or_404(user_group_id)
319 321 form = UserGroupPermsForm()().to_python(request.POST)
320 322
321 323 if not c.rhodecode_user.is_admin:
322 324 if self._revoke_perms_on_yourself(form):
323 325 msg = _('Cannot change permission for yourself as admin')
324 326 h.flash(msg, category='warning')
325 327 return redirect(url('edit_user_group_perms', user_group_id=user_group_id))
326 328
327 329 try:
328 330 UserGroupModel().update_permissions(user_group_id,
329 331 form['perm_additions'], form['perm_updates'], form['perm_deletions'])
330 332 except RepoGroupAssignmentError:
331 333 h.flash(_('Target group cannot be the same'), category='error')
332 334 return redirect(url('edit_user_group_perms', user_group_id=user_group_id))
333 335 #TODO: implement this
334 336 #action_logger(c.rhodecode_user, 'admin_changed_repo_permissions',
335 337 # repo_name, self.ip_addr, self.sa)
336 338 Session().commit()
337 339 h.flash(_('User Group permissions updated'), category='success')
338 340 return redirect(url('edit_user_group_perms', user_group_id=user_group_id))
339 341
340 342 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
341 343 def edit_perms_summary(self, user_group_id):
342 344 user_group_id = safe_int(user_group_id)
343 345 c.user_group = UserGroup.get_or_404(user_group_id)
344 346 c.active = 'perms_summary'
345 347 permissions = {
346 348 'repositories': {},
347 349 'repositories_groups': {},
348 350 }
349 351 ugroup_repo_perms = UserGroupRepoToPerm.query()\
350 352 .options(joinedload(UserGroupRepoToPerm.permission))\
351 353 .options(joinedload(UserGroupRepoToPerm.repository))\
352 354 .filter(UserGroupRepoToPerm.users_group_id == user_group_id)\
353 355 .all()
354 356
355 357 for gr in ugroup_repo_perms:
356 358 permissions['repositories'][gr.repository.repo_name] \
357 359 = gr.permission.permission_name
358 360
359 361 ugroup_group_perms = UserGroupRepoGroupToPerm.query()\
360 362 .options(joinedload(UserGroupRepoGroupToPerm.permission))\
361 363 .options(joinedload(UserGroupRepoGroupToPerm.group))\
362 364 .filter(UserGroupRepoGroupToPerm.users_group_id == user_group_id)\
363 365 .all()
364 366
365 367 for gr in ugroup_group_perms:
366 368 permissions['repositories_groups'][gr.group.group_name] \
367 369 = gr.permission.permission_name
368 370 c.permissions = permissions
369 371 return render('admin/user_groups/user_group_edit.html')
370 372
371 373 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
372 374 def edit_global_perms(self, user_group_id):
373 375 user_group_id = safe_int(user_group_id)
374 376 c.user_group = UserGroup.get_or_404(user_group_id)
375 377 c.active = 'global_perms'
376 378
377 379 c.default_user = User.get_default_user()
378 380 defaults = c.user_group.get_dict()
379 381 defaults.update(c.default_user.get_default_perms(suffix='_inherited'))
380 382 defaults.update(c.user_group.get_default_perms())
381 383
382 384 return htmlfill.render(
383 385 render('admin/user_groups/user_group_edit.html'),
384 386 defaults=defaults,
385 387 encoding="UTF-8",
386 388 force_defaults=False
387 389 )
388 390
389 391 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
390 392 @auth.CSRFRequired()
391 393 def update_global_perms(self, user_group_id):
392 394 """PUT /users_perm/user_group_id: Update an existing item"""
393 395 # url('users_group_perm', user_group_id=ID, method='put')
394 396 user_group_id = safe_int(user_group_id)
395 397 user_group = UserGroup.get_or_404(user_group_id)
396 398 c.active = 'global_perms'
397 399
398 400 try:
399 401 # first stage that verifies the checkbox
400 402 _form = UserIndividualPermissionsForm()
401 403 form_result = _form.to_python(dict(request.POST))
402 404 inherit_perms = form_result['inherit_default_permissions']
403 405 user_group.inherit_default_permissions = inherit_perms
404 406 Session().add(user_group)
405 407
406 408 if not inherit_perms:
407 409 # only update the individual ones if we un check the flag
408 410 _form = UserPermissionsForm(
409 411 [x[0] for x in c.repo_create_choices],
410 412 [x[0] for x in c.repo_create_on_write_choices],
411 413 [x[0] for x in c.repo_group_create_choices],
412 414 [x[0] for x in c.user_group_create_choices],
413 415 [x[0] for x in c.fork_choices],
414 416 [x[0] for x in c.inherit_default_permission_choices])()
415 417
416 418 form_result = _form.to_python(dict(request.POST))
417 419 form_result.update({'perm_user_group_id': user_group.users_group_id})
418 420
419 421 PermissionModel().update_user_group_permissions(form_result)
420 422
421 423 Session().commit()
422 424 h.flash(_('User Group global permissions updated successfully'),
423 425 category='success')
424 426
425 427 except formencode.Invalid as errors:
426 428 defaults = errors.value
427 429 c.user_group = user_group
428 430 return htmlfill.render(
429 431 render('admin/user_groups/user_group_edit.html'),
430 432 defaults=defaults,
431 433 errors=errors.error_dict or {},
432 434 prefix_error=False,
433 435 encoding="UTF-8",
434 436 force_defaults=False)
435 437
436 438 except Exception:
437 439 log.exception("Exception during permissions saving")
438 440 h.flash(_('An error occurred during permissions saving'),
439 441 category='error')
440 442
441 443 return redirect(url('edit_user_group_global_perms', user_group_id=user_group_id))
442 444
443 445 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
444 446 def edit_advanced(self, user_group_id):
445 447 user_group_id = safe_int(user_group_id)
446 448 c.user_group = UserGroup.get_or_404(user_group_id)
447 449 c.active = 'advanced'
448 450 c.group_members_obj = sorted(
449 451 (x.user for x in c.user_group.members),
450 452 key=lambda u: u.username.lower())
451 453
452 454 c.group_to_repos = sorted(
453 455 (x.repository for x in c.user_group.users_group_repo_to_perm),
454 456 key=lambda u: u.repo_name.lower())
455 457
456 458 c.group_to_repo_groups = sorted(
457 459 (x.group for x in c.user_group.users_group_repo_group_to_perm),
458 460 key=lambda u: u.group_name.lower())
459 461
460 462 return render('admin/user_groups/user_group_edit.html')
461 463
462 464 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
463 465 def edit_members(self, user_group_id):
464 466 user_group_id = safe_int(user_group_id)
465 467 c.user_group = UserGroup.get_or_404(user_group_id)
466 468 c.active = 'members'
467 469 c.group_members_obj = sorted((x.user for x in c.user_group.members),
468 470 key=lambda u: u.username.lower())
469 471
470 c.group_members = [(x.user_id, x.username) for x in c.group_members_obj]
472 group_members = [(x.user_id, x.username) for x in c.group_members_obj]
473
474 if request.is_xhr:
475 return jsonify(lambda *a, **k: {
476 'members': group_members
477 })
478
479 c.group_members = group_members
471 480 return render('admin/user_groups/user_group_edit.html')
@@ -1,265 +1,267 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Compare controller for showing differences between two commits/refs/tags etc.
23 23 """
24 24
25 25 import logging
26 26
27 27 from webob.exc import HTTPBadRequest
28 28 from pylons import request, tmpl_context as c, url
29 29 from pylons.controllers.util import redirect
30 30 from pylons.i18n.translation import _
31 31
32 32 from rhodecode.controllers.utils import parse_path_ref, get_commit_from_ref_name
33 33 from rhodecode.lib import helpers as h
34 34 from rhodecode.lib import diffs
35 35 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
36 36 from rhodecode.lib.base import BaseRepoController, render
37 37 from rhodecode.lib.utils import safe_str
38 38 from rhodecode.lib.utils2 import safe_unicode, str2bool
39 39 from rhodecode.lib.vcs.exceptions import (
40 40 EmptyRepositoryError, RepositoryError, RepositoryRequirementError)
41 41 from rhodecode.model.db import Repository, ChangesetStatus
42 42
43 43 log = logging.getLogger(__name__)
44 44
45 45
46 46 class CompareController(BaseRepoController):
47 47
48 48 def __before__(self):
49 49 super(CompareController, self).__before__()
50 50
51 51 def _get_commit_or_redirect(
52 52 self, ref, ref_type, repo, redirect_after=True, partial=False):
53 53 """
54 54 This is a safe way to get a commit. If an error occurs it
55 55 redirects to a commit with a proper message. If partial is set
56 56 then it does not do redirect raise and throws an exception instead.
57 57 """
58 58 try:
59 59 return get_commit_from_ref_name(repo, safe_str(ref), ref_type)
60 60 except EmptyRepositoryError:
61 61 if not redirect_after:
62 62 return repo.scm_instance().EMPTY_COMMIT
63 63 h.flash(h.literal(_('There are no commits yet')),
64 64 category='warning')
65 65 redirect(url('summary_home', repo_name=repo.repo_name))
66 66
67 67 except RepositoryError as e:
68 68 msg = safe_str(e)
69 69 log.exception(msg)
70 70 h.flash(msg, category='warning')
71 71 if not partial:
72 72 redirect(h.url('summary_home', repo_name=repo.repo_name))
73 73 raise HTTPBadRequest()
74 74
75 75 @LoginRequired()
76 76 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
77 77 'repository.admin')
78 78 def index(self, repo_name):
79 79 c.compare_home = True
80 80 c.commit_ranges = []
81 81 c.files = []
82 82 c.limited_diff = False
83 83 source_repo = c.rhodecode_db_repo.repo_name
84 84 target_repo = request.GET.get('target_repo', source_repo)
85 85 c.source_repo = Repository.get_by_repo_name(source_repo)
86 86 c.target_repo = Repository.get_by_repo_name(target_repo)
87 87 c.source_ref = c.target_ref = _('Select commit')
88 88 c.source_ref_type = ""
89 89 c.target_ref_type = ""
90 90 c.commit_statuses = ChangesetStatus.STATUSES
91 91 c.preview_mode = False
92 92 return render('compare/compare_diff.html')
93 93
94 94 @LoginRequired()
95 95 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
96 96 'repository.admin')
97 97 def compare(self, repo_name, source_ref_type, source_ref,
98 98 target_ref_type, target_ref):
99 99 # source_ref will be evaluated in source_repo
100 100 source_repo_name = c.rhodecode_db_repo.repo_name
101 101 source_path, source_id = parse_path_ref(source_ref)
102 102
103 103 # target_ref will be evaluated in target_repo
104 104 target_repo_name = request.GET.get('target_repo', source_repo_name)
105 105 target_path, target_id = parse_path_ref(target_ref)
106 106
107 107 c.commit_statuses = ChangesetStatus.STATUSES
108 108
109 109 # if merge is True
110 110 # Show what changes since the shared ancestor commit of target/source
111 111 # the source would get if it was merged with target. Only commits
112 112 # which are in target but not in source will be shown.
113 113 merge = str2bool(request.GET.get('merge'))
114 114 # if merge is False
115 115 # Show a raw diff of source/target refs even if no ancestor exists
116 116
117 117
118 118 # c.fulldiff disables cut_off_limit
119 119 c.fulldiff = str2bool(request.GET.get('fulldiff'))
120 120
121 121 # if partial, returns just compare_commits.html (commits log)
122 122 partial = request.is_xhr
123 123
124 124 # swap url for compare_diff page
125 125 c.swap_url = h.url(
126 126 'compare_url',
127 127 repo_name=target_repo_name,
128 128 source_ref_type=target_ref_type,
129 129 source_ref=target_ref,
130 130 target_repo=source_repo_name,
131 131 target_ref_type=source_ref_type,
132 132 target_ref=source_ref,
133 133 merge=merge and '1' or '')
134 134
135 135 source_repo = Repository.get_by_repo_name(source_repo_name)
136 136 target_repo = Repository.get_by_repo_name(target_repo_name)
137 137
138 138 if source_repo is None:
139 139 msg = _('Could not find the original repo: %(repo)s') % {
140 140 'repo': source_repo}
141 141
142 142 log.error(msg)
143 143 h.flash(msg, category='error')
144 144 return redirect(url('compare_home', repo_name=c.repo_name))
145 145
146 146 if target_repo is None:
147 147 msg = _('Could not find the other repo: %(repo)s') % {
148 148 'repo': target_repo_name}
149 149 log.error(msg)
150 150 h.flash(msg, category='error')
151 151 return redirect(url('compare_home', repo_name=c.repo_name))
152 152
153 153 source_alias = source_repo.scm_instance().alias
154 154 target_alias = target_repo.scm_instance().alias
155 155 if source_alias != target_alias:
156 156 msg = _('The comparison of two different kinds of remote repos '
157 157 'is not available')
158 158 log.error(msg)
159 159 h.flash(msg, category='error')
160 160 return redirect(url('compare_home', repo_name=c.repo_name))
161 161
162 162 source_commit = self._get_commit_or_redirect(
163 163 ref=source_id, ref_type=source_ref_type, repo=source_repo,
164 164 partial=partial)
165 165 target_commit = self._get_commit_or_redirect(
166 166 ref=target_id, ref_type=target_ref_type, repo=target_repo,
167 167 partial=partial)
168 168
169 169 c.compare_home = False
170 170 c.source_repo = source_repo
171 171 c.target_repo = target_repo
172 172 c.source_ref = source_ref
173 173 c.target_ref = target_ref
174 174 c.source_ref_type = source_ref_type
175 175 c.target_ref_type = target_ref_type
176 176
177 177 source_scm = source_repo.scm_instance()
178 178 target_scm = target_repo.scm_instance()
179 179
180 180 pre_load = ["author", "branch", "date", "message"]
181 181 c.ancestor = None
182 182 try:
183 183 c.commit_ranges = source_scm.compare(
184 184 source_commit.raw_id, target_commit.raw_id,
185 185 target_scm, merge, pre_load=pre_load)
186 186 if merge:
187 187 c.ancestor = source_scm.get_common_ancestor(
188 188 source_commit.raw_id, target_commit.raw_id, target_scm)
189 189 except RepositoryRequirementError:
190 190 msg = _('Could not compare repos with different '
191 191 'large file settings')
192 192 log.error(msg)
193 193 if partial:
194 194 return msg
195 195 h.flash(msg, category='error')
196 196 return redirect(url('compare_home', repo_name=c.repo_name))
197 197
198 198 c.statuses = c.rhodecode_db_repo.statuses(
199 199 [x.raw_id for x in c.commit_ranges])
200 200
201 if partial:
201 if partial: # for PR ajax commits loader
202 if not c.ancestor:
203 return '' # cannot merge if there is no ancestor
202 204 return render('compare/compare_commits.html')
203 205
204 206 if c.ancestor:
205 207 # case we want a simple diff without incoming commits,
206 208 # previewing what will be merged.
207 209 # Make the diff on target repo (which is known to have target_ref)
208 210 log.debug('Using ancestor %s as source_ref instead of %s'
209 211 % (c.ancestor, source_ref))
210 212 source_repo = target_repo
211 213 source_commit = target_repo.get_commit(commit_id=c.ancestor)
212 214
213 215 # diff_limit will cut off the whole diff if the limit is applied
214 216 # otherwise it will just hide the big files from the front-end
215 217 diff_limit = self.cut_off_limit_diff
216 218 file_limit = self.cut_off_limit_file
217 219
218 220 log.debug('calculating diff between '
219 221 'source_ref:%s and target_ref:%s for repo `%s`',
220 222 source_commit, target_commit,
221 223 safe_unicode(source_repo.scm_instance().path))
222 224
223 225 if source_commit.repository != target_commit.repository:
224 226 msg = _(
225 227 "Repositories unrelated. "
226 228 "Cannot compare commit %(commit1)s from repository %(repo1)s "
227 229 "with commit %(commit2)s from repository %(repo2)s.") % {
228 230 'commit1': h.show_id(source_commit),
229 231 'repo1': source_repo.repo_name,
230 232 'commit2': h.show_id(target_commit),
231 233 'repo2': target_repo.repo_name,
232 234 }
233 235 h.flash(msg, category='error')
234 236 raise HTTPBadRequest()
235 237
236 238 txtdiff = source_repo.scm_instance().get_diff(
237 239 commit1=source_commit, commit2=target_commit,
238 240 path1=source_path, path=target_path)
239 241 diff_processor = diffs.DiffProcessor(
240 242 txtdiff, format='gitdiff', diff_limit=diff_limit,
241 243 file_limit=file_limit, show_full_diff=c.fulldiff)
242 244 _parsed = diff_processor.prepare()
243 245
244 246 c.limited_diff = False
245 247 if isinstance(_parsed, diffs.LimitedDiffContainer):
246 248 c.limited_diff = True
247 249
248 250 c.files = []
249 251 c.changes = {}
250 252 c.lines_added = 0
251 253 c.lines_deleted = 0
252 254 for f in _parsed:
253 255 st = f['stats']
254 256 if not st['binary']:
255 257 c.lines_added += st['added']
256 258 c.lines_deleted += st['deleted']
257 259 fid = h.FID('', f['filename'])
258 260 c.files.append([fid, f['operation'], f['filename'], f['stats'], f])
259 261 htmldiff = diff_processor.as_html(
260 262 enable_comments=False, parsed_lines=[f])
261 263 c.changes[fid] = [f['operation'], f['filename'], htmldiff, f]
262 264
263 265 c.preview_mode = merge
264 266
265 267 return render('compare/compare_diff.html')
@@ -1,232 +1,277 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Home controller for RhodeCode Enterprise
23 23 """
24 24
25 25 import logging
26 26 import time
27
27 import re
28 28
29 from pylons import tmpl_context as c, request
29 from pylons import tmpl_context as c, request, url, config
30 30 from pylons.i18n.translation import _
31 31 from sqlalchemy.sql import func
32 32
33 33 from rhodecode.lib.auth import (
34 LoginRequired, HasPermissionAllDecorator,
34 LoginRequired, HasPermissionAllDecorator, AuthUser,
35 35 HasRepoGroupPermissionAnyDecorator, XHRRequired)
36 36 from rhodecode.lib.base import BaseController, render
37 from rhodecode.lib.index import searcher_from_config
37 38 from rhodecode.lib.ext_json import json
38 39 from rhodecode.lib.utils import jsonify
39 40 from rhodecode.lib.utils2 import safe_unicode
40 41 from rhodecode.model.db import Repository, RepoGroup
41 42 from rhodecode.model.repo import RepoModel
42 43 from rhodecode.model.repo_group import RepoGroupModel
43 44 from rhodecode.model.scm import RepoList, RepoGroupList
44 45
45 46
46 47 log = logging.getLogger(__name__)
47 48
48 49
49 50 class HomeController(BaseController):
50 51 def __before__(self):
51 52 super(HomeController, self).__before__()
52 53
53 54 def ping(self):
54 55 """
55 56 Ping, doesn't require login, good for checking out the platform
56 57 """
57 58 instance_id = getattr(c, 'rhodecode_instanceid', '')
58 59 return 'pong[%s] => %s' % (instance_id, self.ip_addr,)
59 60
60 61 @LoginRequired()
61 62 @HasPermissionAllDecorator('hg.admin')
62 63 def error_test(self):
63 64 """
64 65 Test exception handling and emails on errors
65 66 """
66 67 class TestException(Exception):
67 68 pass
68 69
69 70 msg = ('RhodeCode Enterprise %s test exception. Generation time: %s'
70 71 % (c.rhodecode_name, time.time()))
71 72 raise TestException(msg)
72 73
73 74 def _get_groups_and_repos(self, repo_group_id=None):
74 75 # repo groups groups
75 76 repo_group_list = RepoGroup.get_all_repo_groups(group_id=repo_group_id)
76 77 _perms = ['group.read', 'group.write', 'group.admin']
77 78 repo_group_list_acl = RepoGroupList(repo_group_list, perm_set=_perms)
78 79 repo_group_data = RepoGroupModel().get_repo_groups_as_dict(
79 80 repo_group_list=repo_group_list_acl, admin=False)
80 81
81 82 # repositories
82 83 repo_list = Repository.get_all_repos(group_id=repo_group_id)
83 84 _perms = ['repository.read', 'repository.write', 'repository.admin']
84 85 repo_list_acl = RepoList(repo_list, perm_set=_perms)
85 86 repo_data = RepoModel().get_repos_as_dict(
86 87 repo_list=repo_list_acl, admin=False)
87 88
88 89 return repo_data, repo_group_data
89 90
90 91 @LoginRequired()
91 92 def index(self):
92 93 c.repo_group = None
93 94
94 95 repo_data, repo_group_data = self._get_groups_and_repos()
95 96 # json used to render the grids
96 97 c.repos_data = json.dumps(repo_data)
97 98 c.repo_groups_data = json.dumps(repo_group_data)
98 99
99 100 return render('/index.html')
100 101
101 102 @LoginRequired()
102 103 @HasRepoGroupPermissionAnyDecorator('group.read', 'group.write',
103 104 'group.admin')
104 105 def index_repo_group(self, group_name):
105 106 """GET /repo_group_name: Show a specific item"""
106 107 c.repo_group = RepoGroupModel()._get_repo_group(group_name)
107 108 repo_data, repo_group_data = self._get_groups_and_repos(
108 109 c.repo_group.group_id)
109 110
110 111 # json used to render the grids
111 112 c.repos_data = json.dumps(repo_data)
112 113 c.repo_groups_data = json.dumps(repo_group_data)
113 114
114 115 return render('index_repo_group.html')
115 116
116 117 def _get_repo_list(self, name_contains=None, repo_type=None, limit=20):
117 118 query = Repository.query()\
118 119 .order_by(func.length(Repository.repo_name))\
119 120 .order_by(Repository.repo_name)
120 121
121 122 if repo_type:
122 123 query = query.filter(Repository.repo_type == repo_type)
123 124
124 125 if name_contains:
125 126 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
126 127 query = query.filter(
127 128 Repository.repo_name.ilike(ilike_expression))
128 129 query = query.limit(limit)
129 130
130 131 all_repos = query.all()
131 132 repo_iter = self.scm_model.get_repos(all_repos)
132 133 return [
133 134 {
134 135 'id': obj['name'],
135 136 'text': obj['name'],
136 137 'type': 'repo',
137 'obj': obj['dbrepo']
138 'obj': obj['dbrepo'],
139 'url': url('summary_home', repo_name=obj['name'])
138 140 }
139 141 for obj in repo_iter]
140 142
141 143 def _get_repo_group_list(self, name_contains=None, limit=20):
142 144 query = RepoGroup.query()\
143 145 .order_by(func.length(RepoGroup.group_name))\
144 146 .order_by(RepoGroup.group_name)
145 147
146 148 if name_contains:
147 149 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
148 150 query = query.filter(
149 151 RepoGroup.group_name.ilike(ilike_expression))
150 152 query = query.limit(limit)
151 153
152 154 all_groups = query.all()
153 155 repo_groups_iter = self.scm_model.get_repo_groups(all_groups)
154 156 return [
155 157 {
156 158 'id': obj.group_name,
157 159 'text': obj.group_name,
158 160 'type': 'group',
159 'obj': {}
161 'obj': {},
162 'url': url('repo_group_home', group_name=obj.group_name)
160 163 }
161 164 for obj in repo_groups_iter]
162 165
166 def _get_hash_commit_list(self, hash_starts_with=None, limit=20):
167 if not hash_starts_with or len(hash_starts_with) < 3:
168 return []
169
170 commit_hashes = re.compile('([0-9a-f]{2,40})').findall(hash_starts_with)
171
172 if len(commit_hashes) != 1:
173 return []
174
175 commit_hash_prefix = commit_hashes[0]
176
177 auth_user = AuthUser(
178 user_id=c.rhodecode_user.user_id, ip_addr=self.ip_addr)
179 searcher = searcher_from_config(config)
180 result = searcher.search(
181 'commit_id:%s*' % commit_hash_prefix, 'commit', auth_user)
182
183 return [
184 {
185 'id': entry['commit_id'],
186 'text': entry['commit_id'],
187 'type': 'commit',
188 'obj': {'repo': entry['repository']},
189 'url': url('changeset_home',
190 repo_name=entry['repository'], revision=entry['commit_id'])
191 }
192 for entry in result['results']]
193
163 194 @LoginRequired()
164 195 @XHRRequired()
165 196 @jsonify
166 def repo_switcher_data(self):
197 def goto_switcher_data(self):
167 198 query = request.GET.get('query')
168 log.debug('generating switcher repo/groups list, query %s', query)
199 log.debug('generating goto switcher list, query %s', query)
169 200
170 201 res = []
171 202 repo_groups = self._get_repo_group_list(query)
172 203 if repo_groups:
173 204 res.append({
174 205 'text': _('Groups'),
175 206 'children': repo_groups
176 207 })
177 208
178 209 repos = self._get_repo_list(query)
179 210 if repos:
180 211 res.append({
181 212 'text': _('Repositories'),
182 213 'children': repos
183 214 })
184 215
216 commits = self._get_hash_commit_list(query)
217 if commits:
218 unique_repos = {}
219 for commit in commits:
220 unique_repos.setdefault(commit['obj']['repo'], []
221 ).append(commit)
222
223 for repo in unique_repos:
224 res.append({
225 'text': _('Commits in %(repo)s') % {'repo': repo},
226 'children': unique_repos[repo]
227 })
228
185 229 data = {
186 230 'more': False,
187 231 'results': res
188 232 }
189 233 return data
190 234
191 235 @LoginRequired()
192 236 @XHRRequired()
193 237 @jsonify
194 238 def repo_list_data(self):
195 239 query = request.GET.get('query')
196 240 repo_type = request.GET.get('repo_type')
197 241 log.debug('generating repo list, query:%s', query)
198 242
199 243 res = []
200 244 repos = self._get_repo_list(query, repo_type=repo_type)
201 245 if repos:
202 246 res.append({
203 247 'text': _('Repositories'),
204 248 'children': repos
205 249 })
250
206 251 data = {
207 252 'more': False,
208 253 'results': res
209 254 }
210 255 return data
211 256
212 257 @LoginRequired()
213 258 @XHRRequired()
214 259 @jsonify
215 260 def user_autocomplete_data(self):
216 261 query = request.GET.get('query')
217 262
218 263 repo_model = RepoModel()
219 264 _users = repo_model.get_users(name_contains=query)
220 265
221 266 if request.GET.get('user_groups'):
222 267 # extend with user groups
223 268 _user_groups = repo_model.get_user_groups(name_contains=query)
224 269 _users = _users + _user_groups
225 270
226 271 return {'suggestions': _users}
227 272
228 273 @LoginRequired()
229 274 @XHRRequired()
230 275 @jsonify
231 276 def user_group_autocomplete_data(self):
232 277 return {'suggestions': []}
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file renamed from rhodecode/public/js/src/rhodecode/pyroutes.js to rhodecode/public/js/rhodecode/routes.js
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now