Show More
@@ -0,0 +1,256 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | ||
|
3 | # Copyright (C) 2010-2017 RhodeCode GmbH | |
|
4 | # | |
|
5 | # This program is free software: you can redistribute it and/or modify | |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
|
7 | # (only), as published by the Free Software Foundation. | |
|
8 | # | |
|
9 | # This program is distributed in the hope that it will be useful, | |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
|
12 | # GNU General Public License for more details. | |
|
13 | # | |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
|
16 | # | |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
|
20 | """ | |
|
21 | Celery loader, run with:: | |
|
22 | ||
|
23 | celery worker --beat --app rhodecode.lib.celerylib.loader --loglevel DEBUG --ini=._dev/dev.ini | |
|
24 | """ | |
|
25 | import os | |
|
26 | import logging | |
|
27 | ||
|
28 | from celery import Celery | |
|
29 | from celery import signals | |
|
30 | from celery import Task | |
|
31 | from kombu.serialization import register | |
|
32 | from pyramid.threadlocal import get_current_request | |
|
33 | ||
|
34 | import rhodecode | |
|
35 | ||
|
36 | from rhodecode.lib.auth import AuthUser | |
|
37 | from rhodecode.lib.celerylib.utils import get_ini_config, parse_ini_vars | |
|
38 | from rhodecode.lib.ext_json import json | |
|
39 | from rhodecode.lib.pyramid_utils import bootstrap, setup_logging, prepare_request | |
|
40 | from rhodecode.lib.utils2 import str2bool | |
|
41 | from rhodecode.model import meta | |
|
42 | ||
|
43 | ||
|
44 | register('json_ext', json.dumps, json.loads, | |
|
45 | content_type='application/x-json-ext', | |
|
46 | content_encoding='utf-8') | |
|
47 | ||
|
48 | log = logging.getLogger('celery.rhodecode.loader') | |
|
49 | ||
|
50 | ||
|
51 | def add_preload_arguments(parser): | |
|
52 | parser.add_argument( | |
|
53 | '--ini', default=None, | |
|
54 | help='Path to ini configuration file.' | |
|
55 | ) | |
|
56 | parser.add_argument( | |
|
57 | '--ini-var', default=None, | |
|
58 | help='Comma separated list of key=value to pass to ini.' | |
|
59 | ) | |
|
60 | ||
|
61 | ||
|
62 | def get_logger(obj): | |
|
63 | custom_log = logging.getLogger( | |
|
64 | 'rhodecode.task.{}'.format(obj.__class__.__name__)) | |
|
65 | ||
|
66 | if rhodecode.CELERY_ENABLED: | |
|
67 | try: | |
|
68 | custom_log = obj.get_logger() | |
|
69 | except Exception: | |
|
70 | pass | |
|
71 | ||
|
72 | return custom_log | |
|
73 | ||
|
74 | ||
|
75 | base_celery_config = { | |
|
76 | 'result_backend': 'rpc://', | |
|
77 | 'result_expires': 60 * 60 * 24, | |
|
78 | 'result_persistent': True, | |
|
79 | 'imports': [], | |
|
80 | 'worker_max_tasks_per_child': 100, | |
|
81 | 'accept_content': ['json_ext'], | |
|
82 | 'task_serializer': 'json_ext', | |
|
83 | 'result_serializer': 'json_ext', | |
|
84 | 'worker_hijack_root_logger': False, | |
|
85 | } | |
|
86 | # init main celery app | |
|
87 | celery_app = Celery() | |
|
88 | celery_app.user_options['preload'].add(add_preload_arguments) | |
|
89 | ini_file_glob = None | |
|
90 | ||
|
91 | ||
|
92 | @signals.setup_logging.connect | |
|
93 | def setup_logging_callback(**kwargs): | |
|
94 | setup_logging(ini_file_glob) | |
|
95 | ||
|
96 | ||
|
97 | @signals.user_preload_options.connect | |
|
98 | def on_preload_parsed(options, **kwargs): | |
|
99 | ini_location = options['ini'] | |
|
100 | ini_vars = options['ini_var'] | |
|
101 | celery_app.conf['INI_PYRAMID'] = options['ini'] | |
|
102 | ||
|
103 | if ini_location is None: | |
|
104 | print('You must provide the paste --ini argument') | |
|
105 | exit(-1) | |
|
106 | ||
|
107 | options = None | |
|
108 | if ini_vars is not None: | |
|
109 | options = parse_ini_vars(ini_vars) | |
|
110 | ||
|
111 | global ini_file_glob | |
|
112 | ini_file_glob = ini_location | |
|
113 | ||
|
114 | log.debug('Bootstrapping RhodeCode application...') | |
|
115 | env = bootstrap(ini_location, options=options) | |
|
116 | ||
|
117 | setup_celery_app( | |
|
118 | app=env['app'], root=env['root'], request=env['request'], | |
|
119 | registry=env['registry'], closer=env['closer'], | |
|
120 | ini_location=ini_location) | |
|
121 | ||
|
122 | # fix the global flag even if it's disabled via .ini file because this | |
|
123 | # is a worker code that doesn't need this to be disabled. | |
|
124 | rhodecode.CELERY_ENABLED = True | |
|
125 | ||
|
126 | ||
|
127 | @signals.task_success.connect | |
|
128 | def task_success_signal(result, **kwargs): | |
|
129 | meta.Session.commit() | |
|
130 | celery_app.conf['PYRAMID_CLOSER']() | |
|
131 | ||
|
132 | ||
|
133 | @signals.task_retry.connect | |
|
134 | def task_retry_signal( | |
|
135 | request, reason, einfo, **kwargs): | |
|
136 | meta.Session.remove() | |
|
137 | celery_app.conf['PYRAMID_CLOSER']() | |
|
138 | ||
|
139 | ||
|
140 | @signals.task_failure.connect | |
|
141 | def task_failure_signal( | |
|
142 | task_id, exception, args, kwargs, traceback, einfo, **kargs): | |
|
143 | meta.Session.remove() | |
|
144 | celery_app.conf['PYRAMID_CLOSER']() | |
|
145 | ||
|
146 | ||
|
147 | @signals.task_revoked.connect | |
|
148 | def task_revoked_signal( | |
|
149 | request, terminated, signum, expired, **kwargs): | |
|
150 | celery_app.conf['PYRAMID_CLOSER']() | |
|
151 | ||
|
152 | ||
|
153 | def setup_celery_app(app, root, request, registry, closer, ini_location): | |
|
154 | ini_dir = os.path.dirname(os.path.abspath(ini_location)) | |
|
155 | celery_config = base_celery_config | |
|
156 | celery_config.update({ | |
|
157 | # store celerybeat scheduler db where the .ini file is | |
|
158 | 'beat_schedule_filename': os.path.join(ini_dir, 'celerybeat-schedule'), | |
|
159 | }) | |
|
160 | ini_settings = get_ini_config(ini_location) | |
|
161 | log.debug('Got custom celery conf: %s', ini_settings) | |
|
162 | ||
|
163 | celery_config.update(ini_settings) | |
|
164 | celery_app.config_from_object(celery_config) | |
|
165 | ||
|
166 | celery_app.conf.update({'PYRAMID_APP': app}) | |
|
167 | celery_app.conf.update({'PYRAMID_ROOT': root}) | |
|
168 | celery_app.conf.update({'PYRAMID_REQUEST': request}) | |
|
169 | celery_app.conf.update({'PYRAMID_REGISTRY': registry}) | |
|
170 | celery_app.conf.update({'PYRAMID_CLOSER': closer}) | |
|
171 | ||
|
172 | ||
|
173 | def configure_celery(config, ini_location): | |
|
174 | """ | |
|
175 | Helper that is called from our application creation logic. It gives | |
|
176 | connection info into running webapp and allows execution of tasks from | |
|
177 | RhodeCode itself | |
|
178 | """ | |
|
179 | # store some globals into rhodecode | |
|
180 | rhodecode.CELERY_ENABLED = str2bool( | |
|
181 | config.registry.settings.get('use_celery')) | |
|
182 | if rhodecode.CELERY_ENABLED: | |
|
183 | log.info('Configuring celery based on `%s` file', ini_location) | |
|
184 | setup_celery_app( | |
|
185 | app=None, root=None, request=None, registry=config.registry, | |
|
186 | closer=None, ini_location=ini_location) | |
|
187 | ||
|
188 | ||
|
189 | class RequestContextTask(Task): | |
|
190 | """ | |
|
191 | This is a celery task which will create a rhodecode app instance context | |
|
192 | for the task, patch pyramid with the original request | |
|
193 | that created the task and also add the user to the context. | |
|
194 | """ | |
|
195 | ||
|
196 | def apply_async(self, args=None, kwargs=None, task_id=None, producer=None, | |
|
197 | link=None, link_error=None, shadow=None, **options): | |
|
198 | """ queue the job to run (we are in web request context here) """ | |
|
199 | ||
|
200 | req = get_current_request() | |
|
201 | ||
|
202 | # web case | |
|
203 | if hasattr(req, 'user'): | |
|
204 | ip_addr = req.user.ip_addr | |
|
205 | user_id = req.user.user_id | |
|
206 | ||
|
207 | # api case | |
|
208 | elif hasattr(req, 'rpc_user'): | |
|
209 | ip_addr = req.rpc_user.ip_addr | |
|
210 | user_id = req.rpc_user.user_id | |
|
211 | else: | |
|
212 | raise Exception( | |
|
213 | 'Unable to fetch required data from request: {}. \n' | |
|
214 | 'This task is required to be executed from context of ' | |
|
215 | 'request in a webapp'.format(repr(req))) | |
|
216 | ||
|
217 | if req: | |
|
218 | # we hook into kwargs since it is the only way to pass our data to | |
|
219 | # the celery worker | |
|
220 | options['headers'] = options.get('headers', {}) | |
|
221 | options['headers'].update({ | |
|
222 | 'rhodecode_proxy_data': { | |
|
223 | 'environ': { | |
|
224 | 'PATH_INFO': req.environ['PATH_INFO'], | |
|
225 | 'SCRIPT_NAME': req.environ['SCRIPT_NAME'], | |
|
226 | 'HTTP_HOST': req.environ.get('HTTP_HOST', | |
|
227 | req.environ['SERVER_NAME']), | |
|
228 | 'SERVER_NAME': req.environ['SERVER_NAME'], | |
|
229 | 'SERVER_PORT': req.environ['SERVER_PORT'], | |
|
230 | 'wsgi.url_scheme': req.environ['wsgi.url_scheme'], | |
|
231 | }, | |
|
232 | 'auth_user': { | |
|
233 | 'ip_addr': ip_addr, | |
|
234 | 'user_id': user_id | |
|
235 | }, | |
|
236 | } | |
|
237 | }) | |
|
238 | ||
|
239 | return super(RequestContextTask, self).apply_async( | |
|
240 | args, kwargs, task_id, producer, link, link_error, shadow, **options) | |
|
241 | ||
|
242 | def __call__(self, *args, **kwargs): | |
|
243 | """ rebuild the context and then run task on celery worker """ | |
|
244 | ||
|
245 | proxy_data = getattr(self.request, 'rhodecode_proxy_data', None) | |
|
246 | if not proxy_data: | |
|
247 | return super(RequestContextTask, self).__call__(*args, **kwargs) | |
|
248 | ||
|
249 | log.debug('using celery proxy data to run task: %r', proxy_data) | |
|
250 | # re-inject and register threadlocals for proper routing support | |
|
251 | request = prepare_request(proxy_data['environ']) | |
|
252 | request.user = AuthUser(user_id=proxy_data['auth_user']['user_id'], | |
|
253 | ip_addr=proxy_data['auth_user']['ip_addr']) | |
|
254 | ||
|
255 | return super(RequestContextTask, self).__call__(*args, **kwargs) | |
|
256 |
@@ -0,0 +1,156 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | ||
|
3 | # Copyright (C) 2010-2017 RhodeCode GmbH | |
|
4 | # | |
|
5 | # This program is free software: you can redistribute it and/or modify | |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
|
7 | # (only), as published by the Free Software Foundation. | |
|
8 | # | |
|
9 | # This program is distributed in the hope that it will be useful, | |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
|
12 | # GNU General Public License for more details. | |
|
13 | # | |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
|
16 | # | |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
|
20 | ||
|
21 | import os | |
|
22 | import json | |
|
23 | import logging | |
|
24 | import datetime | |
|
25 | ||
|
26 | from functools import partial | |
|
27 | ||
|
28 | from pyramid.compat import configparser | |
|
29 | from celery.result import AsyncResult | |
|
30 | import celery.loaders.base | |
|
31 | import celery.schedules | |
|
32 | ||
|
33 | ||
|
34 | log = logging.getLogger(__name__) | |
|
35 | ||
|
36 | ||
|
37 | def get_task_id(task): | |
|
38 | task_id = None | |
|
39 | if isinstance(task, AsyncResult): | |
|
40 | task_id = task.task_id | |
|
41 | ||
|
42 | return task_id | |
|
43 | ||
|
44 | ||
|
45 | def crontab(value): | |
|
46 | return celery.schedules.crontab(**value) | |
|
47 | ||
|
48 | ||
|
49 | def timedelta(value): | |
|
50 | return datetime.timedelta(**value) | |
|
51 | ||
|
52 | ||
|
53 | def safe_json(get, section, key): | |
|
54 | value = '' | |
|
55 | try: | |
|
56 | value = get(key) | |
|
57 | json_value = json.loads(value) | |
|
58 | except ValueError: | |
|
59 | msg = 'The %s=%s is not valid json in section %s' % ( | |
|
60 | key, value, section | |
|
61 | ) | |
|
62 | raise ValueError(msg) | |
|
63 | ||
|
64 | return json_value | |
|
65 | ||
|
66 | ||
|
67 | def get_beat_config(parser, section): | |
|
68 | SCHEDULE_TYPE_MAP = { | |
|
69 | 'crontab': crontab, | |
|
70 | 'timedelta': timedelta, | |
|
71 | 'integer': int | |
|
72 | } | |
|
73 | get = partial(parser.get, section) | |
|
74 | has_option = partial(parser.has_option, section) | |
|
75 | ||
|
76 | schedule_type = get('type') | |
|
77 | schedule_value = safe_json(get, section, 'schedule') | |
|
78 | ||
|
79 | scheduler_cls = SCHEDULE_TYPE_MAP.get(schedule_type) | |
|
80 | ||
|
81 | if scheduler_cls is None: | |
|
82 | raise ValueError( | |
|
83 | 'schedule type %s in section %s is invalid' % ( | |
|
84 | schedule_type, | |
|
85 | section | |
|
86 | ) | |
|
87 | ) | |
|
88 | ||
|
89 | schedule = scheduler_cls(schedule_value) | |
|
90 | ||
|
91 | config = { | |
|
92 | 'task': get('task'), | |
|
93 | 'schedule': schedule, | |
|
94 | } | |
|
95 | ||
|
96 | if has_option('args'): | |
|
97 | config['args'] = safe_json(get, section, 'args') | |
|
98 | ||
|
99 | if has_option('kwargs'): | |
|
100 | config['kwargs'] = safe_json(get, section, 'kwargs') | |
|
101 | ||
|
102 | return config | |
|
103 | ||
|
104 | ||
|
105 | def get_ini_config(ini_location): | |
|
106 | """ | |
|
107 | Converts basic ini configuration into celery 4.X options | |
|
108 | """ | |
|
109 | def key_converter(key_name): | |
|
110 | pref = 'celery.' | |
|
111 | if key_name.startswith(pref): | |
|
112 | return key_name[len(pref):].replace('.', '_').lower() | |
|
113 | ||
|
114 | def type_converter(parsed_key, value): | |
|
115 | # cast to int | |
|
116 | if value.isdigit(): | |
|
117 | return int(value) | |
|
118 | ||
|
119 | # cast to bool | |
|
120 | if value.lower() in ['true', 'false', 'True', 'False']: | |
|
121 | return value.lower() == 'true' | |
|
122 | return value | |
|
123 | ||
|
124 | parser = configparser.SafeConfigParser( | |
|
125 | defaults={'here': os.path.abspath(ini_location)}) | |
|
126 | parser.read(ini_location) | |
|
127 | ||
|
128 | ini_config = {} | |
|
129 | for k, v in parser.items('app:main'): | |
|
130 | pref = 'celery.' | |
|
131 | if k.startswith(pref): | |
|
132 | ini_config[key_converter(k)] = type_converter(key_converter(k), v) | |
|
133 | ||
|
134 | beat_config = {} | |
|
135 | for section in parser.sections(): | |
|
136 | if section.startswith('celerybeat:'): | |
|
137 | name = section.split(':', 1)[1] | |
|
138 | beat_config[name] = get_beat_config(parser, section) | |
|
139 | ||
|
140 | # final compose of settings | |
|
141 | celery_settings = {} | |
|
142 | ||
|
143 | if ini_config: | |
|
144 | celery_settings.update(ini_config) | |
|
145 | if beat_config: | |
|
146 | celery_settings.update({'beat_schedule': beat_config}) | |
|
147 | ||
|
148 | return celery_settings | |
|
149 | ||
|
150 | ||
|
151 | def parse_ini_vars(ini_vars): | |
|
152 | options = {} | |
|
153 | for pairs in ini_vars.split(','): | |
|
154 | key, value = pairs.split('=') | |
|
155 | options[key] = value | |
|
156 | return options |
@@ -1,725 +1,717 b'' | |||
|
1 | 1 | |
|
2 | 2 | |
|
3 | 3 | ################################################################################ |
|
4 | 4 | ## RHODECODE COMMUNITY EDITION CONFIGURATION ## |
|
5 | 5 | # The %(here)s variable will be replaced with the parent directory of this file# |
|
6 | 6 | ################################################################################ |
|
7 | 7 | |
|
8 | 8 | [DEFAULT] |
|
9 | 9 | debug = true |
|
10 | 10 | |
|
11 | 11 | ################################################################################ |
|
12 | 12 | ## EMAIL CONFIGURATION ## |
|
13 | 13 | ## Uncomment and replace with the email address which should receive ## |
|
14 | 14 | ## any error reports after an application crash ## |
|
15 | 15 | ## Additionally these settings will be used by the RhodeCode mailing system ## |
|
16 | 16 | ################################################################################ |
|
17 | 17 | |
|
18 | 18 | ## prefix all emails subjects with given prefix, helps filtering out emails |
|
19 | 19 | #email_prefix = [RhodeCode] |
|
20 | 20 | |
|
21 | 21 | ## email FROM address all mails will be sent |
|
22 | 22 | #app_email_from = rhodecode-noreply@localhost |
|
23 | 23 | |
|
24 | 24 | ## Uncomment and replace with the address which should receive any error report |
|
25 | 25 | ## note: using appenlight for error handling doesn't need this to be uncommented |
|
26 | 26 | #email_to = admin@localhost |
|
27 | 27 | |
|
28 | 28 | ## in case of Application errors, sent an error email form |
|
29 | 29 | #error_email_from = rhodecode_error@localhost |
|
30 | 30 | |
|
31 | 31 | ## additional error message to be send in case of server crash |
|
32 | 32 | #error_message = |
|
33 | 33 | |
|
34 | 34 | |
|
35 | 35 | #smtp_server = mail.server.com |
|
36 | 36 | #smtp_username = |
|
37 | 37 | #smtp_password = |
|
38 | 38 | #smtp_port = |
|
39 | 39 | #smtp_use_tls = false |
|
40 | 40 | #smtp_use_ssl = true |
|
41 | 41 | ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.) |
|
42 | 42 | #smtp_auth = |
|
43 | 43 | |
|
44 | 44 | [server:main] |
|
45 | 45 | ## COMMON ## |
|
46 | 46 | host = 127.0.0.1 |
|
47 | 47 | port = 5000 |
|
48 | 48 | |
|
49 | 49 | ################################## |
|
50 | 50 | ## WAITRESS WSGI SERVER ## |
|
51 | 51 | ## Recommended for Development ## |
|
52 | 52 | ################################## |
|
53 | 53 | |
|
54 | 54 | use = egg:waitress#main |
|
55 | 55 | ## number of worker threads |
|
56 | 56 | threads = 5 |
|
57 | 57 | ## MAX BODY SIZE 100GB |
|
58 | 58 | max_request_body_size = 107374182400 |
|
59 | 59 | ## Use poll instead of select, fixes file descriptors limits problems. |
|
60 | 60 | ## May not work on old windows systems. |
|
61 | 61 | asyncore_use_poll = true |
|
62 | 62 | |
|
63 | 63 | |
|
64 | 64 | ########################## |
|
65 | 65 | ## GUNICORN WSGI SERVER ## |
|
66 | 66 | ########################## |
|
67 | 67 | ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini |
|
68 | 68 | |
|
69 | 69 | #use = egg:gunicorn#main |
|
70 | 70 | ## Sets the number of process workers. You must set `instance_id = *` |
|
71 | 71 | ## when this option is set to more than one worker, recommended |
|
72 | 72 | ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers |
|
73 | 73 | ## The `instance_id = *` must be set in the [app:main] section below |
|
74 | 74 | #workers = 2 |
|
75 | 75 | ## number of threads for each of the worker, must be set to 1 for gevent |
|
76 | 76 | ## generally recommended to be at 1 |
|
77 | 77 | #threads = 1 |
|
78 | 78 | ## process name |
|
79 | 79 | #proc_name = rhodecode |
|
80 | 80 | ## type of worker class, one of sync, gevent |
|
81 | 81 | ## recommended for bigger setup is using of of other than sync one |
|
82 | 82 | #worker_class = sync |
|
83 | 83 | ## The maximum number of simultaneous clients. Valid only for Gevent |
|
84 | 84 | #worker_connections = 10 |
|
85 | 85 | ## max number of requests that worker will handle before being gracefully |
|
86 | 86 | ## restarted, could prevent memory leaks |
|
87 | 87 | #max_requests = 1000 |
|
88 | 88 | #max_requests_jitter = 30 |
|
89 | 89 | ## amount of time a worker can spend with handling a request before it |
|
90 | 90 | ## gets killed and restarted. Set to 6hrs |
|
91 | 91 | #timeout = 21600 |
|
92 | 92 | |
|
93 | 93 | |
|
94 | 94 | ## prefix middleware for RhodeCode. |
|
95 | 95 | ## recommended when using proxy setup. |
|
96 | 96 | ## allows to set RhodeCode under a prefix in server. |
|
97 | 97 | ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well. |
|
98 | 98 | ## And set your prefix like: `prefix = /custom_prefix` |
|
99 | 99 | ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need |
|
100 | 100 | ## to make your cookies only work on prefix url |
|
101 | 101 | [filter:proxy-prefix] |
|
102 | 102 | use = egg:PasteDeploy#prefix |
|
103 | 103 | prefix = / |
|
104 | 104 | |
|
105 | 105 | [app:main] |
|
106 | 106 | use = egg:rhodecode-enterprise-ce |
|
107 | 107 | |
|
108 | 108 | ## enable proxy prefix middleware, defined above |
|
109 | 109 | #filter-with = proxy-prefix |
|
110 | 110 | |
|
111 | 111 | # During development the we want to have the debug toolbar enabled |
|
112 | 112 | pyramid.includes = |
|
113 | 113 | pyramid_debugtoolbar |
|
114 | 114 | rhodecode.lib.middleware.request_wrapper |
|
115 | 115 | |
|
116 | 116 | pyramid.reload_templates = true |
|
117 | 117 | |
|
118 | 118 | debugtoolbar.hosts = 0.0.0.0/0 |
|
119 | 119 | debugtoolbar.exclude_prefixes = |
|
120 | 120 | /css |
|
121 | 121 | /fonts |
|
122 | 122 | /images |
|
123 | 123 | /js |
|
124 | 124 | |
|
125 | 125 | ## RHODECODE PLUGINS ## |
|
126 | 126 | rhodecode.includes = |
|
127 | 127 | rhodecode.api |
|
128 | 128 | |
|
129 | 129 | |
|
130 | 130 | # api prefix url |
|
131 | 131 | rhodecode.api.url = /_admin/api |
|
132 | 132 | |
|
133 | 133 | |
|
134 | 134 | ## END RHODECODE PLUGINS ## |
|
135 | 135 | |
|
136 | 136 | ## encryption key used to encrypt social plugin tokens, |
|
137 | 137 | ## remote_urls with credentials etc, if not set it defaults to |
|
138 | 138 | ## `beaker.session.secret` |
|
139 | 139 | #rhodecode.encrypted_values.secret = |
|
140 | 140 | |
|
141 | 141 | ## decryption strict mode (enabled by default). It controls if decryption raises |
|
142 | 142 | ## `SignatureVerificationError` in case of wrong key, or damaged encryption data. |
|
143 | 143 | #rhodecode.encrypted_values.strict = false |
|
144 | 144 | |
|
145 | 145 | ## return gzipped responses from Rhodecode (static files/application) |
|
146 | 146 | gzip_responses = false |
|
147 | 147 | |
|
148 | 148 | ## autogenerate javascript routes file on startup |
|
149 | 149 | generate_js_files = false |
|
150 | 150 | |
|
151 | 151 | ## Optional Languages |
|
152 | 152 | ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh |
|
153 | 153 | lang = en |
|
154 | 154 | |
|
155 | 155 | ## perform a full repository scan on each server start, this should be |
|
156 | 156 | ## set to false after first startup, to allow faster server restarts. |
|
157 | 157 | startup.import_repos = false |
|
158 | 158 | |
|
159 | 159 | ## Uncomment and set this path to use archive download cache. |
|
160 | 160 | ## Once enabled, generated archives will be cached at this location |
|
161 | 161 | ## and served from the cache during subsequent requests for the same archive of |
|
162 | 162 | ## the repository. |
|
163 | 163 | #archive_cache_dir = /tmp/tarballcache |
|
164 | 164 | |
|
165 | 165 | ## URL at which the application is running. This is used for bootstraping |
|
166 | 166 | ## requests in context when no web request is available. Used in ishell, or |
|
167 | 167 | ## SSH calls. Set this for events to receive proper url for SSH calls. |
|
168 | 168 | app.base_url = http://rhodecode.local |
|
169 | 169 | |
|
170 | 170 | ## change this to unique ID for security |
|
171 | 171 | app_instance_uuid = rc-production |
|
172 | 172 | |
|
173 | 173 | ## cut off limit for large diffs (size in bytes). If overall diff size on |
|
174 | 174 | ## commit, or pull request exceeds this limit this diff will be displayed |
|
175 | 175 | ## partially. E.g 512000 == 512Kb |
|
176 | 176 | cut_off_limit_diff = 512000 |
|
177 | 177 | |
|
178 | 178 | ## cut off limit for large files inside diffs (size in bytes). Each individual |
|
179 | 179 | ## file inside diff which exceeds this limit will be displayed partially. |
|
180 | 180 | ## E.g 128000 == 128Kb |
|
181 | 181 | cut_off_limit_file = 128000 |
|
182 | 182 | |
|
183 | 183 | ## use cache version of scm repo everywhere |
|
184 | 184 | vcs_full_cache = true |
|
185 | 185 | |
|
186 | 186 | ## force https in RhodeCode, fixes https redirects, assumes it's always https |
|
187 | 187 | ## Normally this is controlled by proper http flags sent from http server |
|
188 | 188 | force_https = false |
|
189 | 189 | |
|
190 | 190 | ## use Strict-Transport-Security headers |
|
191 | 191 | use_htsts = false |
|
192 | 192 | |
|
193 | 193 | ## number of commits stats will parse on each iteration |
|
194 | 194 | commit_parse_limit = 25 |
|
195 | 195 | |
|
196 | 196 | ## git rev filter option, --all is the default filter, if you need to |
|
197 | 197 | ## hide all refs in changelog switch this to --branches --tags |
|
198 | 198 | git_rev_filter = --branches --tags |
|
199 | 199 | |
|
200 | 200 | # Set to true if your repos are exposed using the dumb protocol |
|
201 | 201 | git_update_server_info = false |
|
202 | 202 | |
|
203 | 203 | ## RSS/ATOM feed options |
|
204 | 204 | rss_cut_off_limit = 256000 |
|
205 | 205 | rss_items_per_page = 10 |
|
206 | 206 | rss_include_diff = false |
|
207 | 207 | |
|
208 | 208 | ## gist URL alias, used to create nicer urls for gist. This should be an |
|
209 | 209 | ## url that does rewrites to _admin/gists/{gistid}. |
|
210 | 210 | ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal |
|
211 | 211 | ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid} |
|
212 | 212 | gist_alias_url = |
|
213 | 213 | |
|
214 | 214 | ## List of views (using glob pattern syntax) that AUTH TOKENS could be |
|
215 | 215 | ## used for access. |
|
216 | 216 | ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it |
|
217 | 217 | ## came from the the logged in user who own this authentication token. |
|
218 | 218 | ## Additionally @TOKEN syntaxt can be used to bound the view to specific |
|
219 | 219 | ## authentication token. Such view would be only accessible when used together |
|
220 | 220 | ## with this authentication token |
|
221 | 221 | ## |
|
222 | 222 | ## list of all views can be found under `/_admin/permissions/auth_token_access` |
|
223 | 223 | ## The list should be "," separated and on a single line. |
|
224 | 224 | ## |
|
225 | 225 | ## Most common views to enable: |
|
226 | 226 | # RepoCommitsView:repo_commit_download |
|
227 | 227 | # RepoCommitsView:repo_commit_patch |
|
228 | 228 | # RepoCommitsView:repo_commit_raw |
|
229 | 229 | # RepoCommitsView:repo_commit_raw@TOKEN |
|
230 | 230 | # RepoFilesView:repo_files_diff |
|
231 | 231 | # RepoFilesView:repo_archivefile |
|
232 | 232 | # RepoFilesView:repo_file_raw |
|
233 | 233 | # GistView:* |
|
234 | 234 | api_access_controllers_whitelist = |
|
235 | 235 | |
|
236 | 236 | ## default encoding used to convert from and to unicode |
|
237 | 237 | ## can be also a comma separated list of encoding in case of mixed encodings |
|
238 | 238 | default_encoding = UTF-8 |
|
239 | 239 | |
|
240 | 240 | ## instance-id prefix |
|
241 | 241 | ## a prefix key for this instance used for cache invalidation when running |
|
242 | 242 | ## multiple instances of rhodecode, make sure it's globally unique for |
|
243 | 243 | ## all running rhodecode instances. Leave empty if you don't use it |
|
244 | 244 | instance_id = |
|
245 | 245 | |
|
246 | 246 | ## Fallback authentication plugin. Set this to a plugin ID to force the usage |
|
247 | 247 | ## of an authentication plugin also if it is disabled by it's settings. |
|
248 | 248 | ## This could be useful if you are unable to log in to the system due to broken |
|
249 | 249 | ## authentication settings. Then you can enable e.g. the internal rhodecode auth |
|
250 | 250 | ## module to log in again and fix the settings. |
|
251 | 251 | ## |
|
252 | 252 | ## Available builtin plugin IDs (hash is part of the ID): |
|
253 | 253 | ## egg:rhodecode-enterprise-ce#rhodecode |
|
254 | 254 | ## egg:rhodecode-enterprise-ce#pam |
|
255 | 255 | ## egg:rhodecode-enterprise-ce#ldap |
|
256 | 256 | ## egg:rhodecode-enterprise-ce#jasig_cas |
|
257 | 257 | ## egg:rhodecode-enterprise-ce#headers |
|
258 | 258 | ## egg:rhodecode-enterprise-ce#crowd |
|
259 | 259 | #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode |
|
260 | 260 | |
|
261 | 261 | ## alternative return HTTP header for failed authentication. Default HTTP |
|
262 | 262 | ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with |
|
263 | 263 | ## handling that causing a series of failed authentication calls. |
|
264 | 264 | ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code |
|
265 | 265 | ## This will be served instead of default 401 on bad authnetication |
|
266 | 266 | auth_ret_code = |
|
267 | 267 | |
|
268 | 268 | ## use special detection method when serving auth_ret_code, instead of serving |
|
269 | 269 | ## ret_code directly, use 401 initially (Which triggers credentials prompt) |
|
270 | 270 | ## and then serve auth_ret_code to clients |
|
271 | 271 | auth_ret_code_detection = false |
|
272 | 272 | |
|
273 | 273 | ## locking return code. When repository is locked return this HTTP code. 2XX |
|
274 | 274 | ## codes don't break the transactions while 4XX codes do |
|
275 | 275 | lock_ret_code = 423 |
|
276 | 276 | |
|
277 | 277 | ## allows to change the repository location in settings page |
|
278 | 278 | allow_repo_location_change = true |
|
279 | 279 | |
|
280 | 280 | ## allows to setup custom hooks in settings page |
|
281 | 281 | allow_custom_hooks_settings = true |
|
282 | 282 | |
|
283 | 283 | ## generated license token, goto license page in RhodeCode settings to obtain |
|
284 | 284 | ## new token |
|
285 | 285 | license_token = |
|
286 | 286 | |
|
287 | 287 | ## supervisor connection uri, for managing supervisor and logs. |
|
288 | 288 | supervisor.uri = |
|
289 | 289 | ## supervisord group name/id we only want this RC instance to handle |
|
290 | 290 | supervisor.group_id = dev |
|
291 | 291 | |
|
292 | 292 | ## Display extended labs settings |
|
293 | 293 | labs_settings_active = true |
|
294 | 294 | |
|
295 | 295 | #################################### |
|
296 | 296 | ### CELERY CONFIG #### |
|
297 | 297 | #################################### |
|
298 | 298 | use_celery = false |
|
299 | broker.host = localhost | |
|
300 | broker.vhost = rabbitmqhost | |
|
301 | broker.port = 5672 | |
|
302 | broker.user = rabbitmq | |
|
303 | broker.password = qweqwe | |
|
304 | ||
|
305 | celery.imports = rhodecode.lib.celerylib.tasks | |
|
306 | 299 | |
|
307 | celery.result.backend = amqp | |
|
308 | celery.result.dburi = amqp:// | |
|
309 | celery.result.serialier = json | |
|
300 | # connection url to the message broker (default rabbitmq) | |
|
301 | celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost | |
|
310 | 302 | |
|
311 | #celery.send.task.error.emails = true | |
|
312 | #celery.amqp.task.result.expires = 18000 | |
|
313 | ||
|
314 | celeryd.concurrency = 2 | |
|
315 | #celeryd.log.file = celeryd.log | |
|
316 | celeryd.log.level = debug | |
|
317 | celeryd.max.tasks.per.child = 1 | |
|
303 | # maximum tasks to execute before worker restart | |
|
304 | celery.max_tasks_per_child = 100 | |
|
318 | 305 | |
|
319 | 306 | ## tasks will never be sent to the queue, but executed locally instead. |
|
320 |
celery.always |
|
|
307 | celery.task_always_eager = false | |
|
321 | 308 | |
|
322 | 309 | #################################### |
|
323 | 310 | ### BEAKER CACHE #### |
|
324 | 311 | #################################### |
|
325 | 312 | # default cache dir for templates. Putting this into a ramdisk |
|
326 | 313 | ## can boost performance, eg. %(here)s/data_ramdisk |
|
327 | 314 | cache_dir = %(here)s/data |
|
328 | 315 | |
|
329 | 316 | ## locking and default file storage for Beaker. Putting this into a ramdisk |
|
330 | 317 | ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data |
|
331 | 318 | beaker.cache.data_dir = %(here)s/data/cache/beaker_data |
|
332 | 319 | beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock |
|
333 | 320 | |
|
334 | 321 | beaker.cache.regions = super_short_term, short_term, long_term, sql_cache_short, auth_plugins, repo_cache_long |
|
335 | 322 | |
|
336 | 323 | beaker.cache.super_short_term.type = memory |
|
337 | 324 | beaker.cache.super_short_term.expire = 10 |
|
338 | 325 | beaker.cache.super_short_term.key_length = 256 |
|
339 | 326 | |
|
340 | 327 | beaker.cache.short_term.type = memory |
|
341 | 328 | beaker.cache.short_term.expire = 60 |
|
342 | 329 | beaker.cache.short_term.key_length = 256 |
|
343 | 330 | |
|
344 | 331 | beaker.cache.long_term.type = memory |
|
345 | 332 | beaker.cache.long_term.expire = 36000 |
|
346 | 333 | beaker.cache.long_term.key_length = 256 |
|
347 | 334 | |
|
348 | 335 | beaker.cache.sql_cache_short.type = memory |
|
349 | 336 | beaker.cache.sql_cache_short.expire = 10 |
|
350 | 337 | beaker.cache.sql_cache_short.key_length = 256 |
|
351 | 338 | |
|
352 | 339 | ## default is memory cache, configure only if required |
|
353 | 340 | ## using multi-node or multi-worker setup |
|
354 | 341 | #beaker.cache.auth_plugins.type = ext:database |
|
355 | 342 | #beaker.cache.auth_plugins.lock_dir = %(here)s/data/cache/auth_plugin_lock |
|
356 | 343 | #beaker.cache.auth_plugins.url = postgresql://postgres:secret@localhost/rhodecode |
|
357 | 344 | #beaker.cache.auth_plugins.url = mysql://root:secret@127.0.0.1/rhodecode |
|
358 | 345 | #beaker.cache.auth_plugins.sa.pool_recycle = 3600 |
|
359 | 346 | #beaker.cache.auth_plugins.sa.pool_size = 10 |
|
360 | 347 | #beaker.cache.auth_plugins.sa.max_overflow = 0 |
|
361 | 348 | |
|
362 | 349 | beaker.cache.repo_cache_long.type = memorylru_base |
|
363 | 350 | beaker.cache.repo_cache_long.max_items = 4096 |
|
364 | 351 | beaker.cache.repo_cache_long.expire = 2592000 |
|
365 | 352 | |
|
366 | 353 | ## default is memorylru_base cache, configure only if required |
|
367 | 354 | ## using multi-node or multi-worker setup |
|
368 | 355 | #beaker.cache.repo_cache_long.type = ext:memcached |
|
369 | 356 | #beaker.cache.repo_cache_long.url = localhost:11211 |
|
370 | 357 | #beaker.cache.repo_cache_long.expire = 1209600 |
|
371 | 358 | #beaker.cache.repo_cache_long.key_length = 256 |
|
372 | 359 | |
|
373 | 360 | #################################### |
|
374 | 361 | ### BEAKER SESSION #### |
|
375 | 362 | #################################### |
|
376 | 363 | |
|
377 | 364 | ## .session.type is type of storage options for the session, current allowed |
|
378 | 365 | ## types are file, ext:memcached, ext:database, and memory (default). |
|
379 | 366 | beaker.session.type = file |
|
380 | 367 | beaker.session.data_dir = %(here)s/data/sessions/data |
|
381 | 368 | |
|
382 | 369 | ## db based session, fast, and allows easy management over logged in users |
|
383 | 370 | #beaker.session.type = ext:database |
|
384 | 371 | #beaker.session.table_name = db_session |
|
385 | 372 | #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode |
|
386 | 373 | #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode |
|
387 | 374 | #beaker.session.sa.pool_recycle = 3600 |
|
388 | 375 | #beaker.session.sa.echo = false |
|
389 | 376 | |
|
390 | 377 | beaker.session.key = rhodecode |
|
391 | 378 | beaker.session.secret = develop-rc-uytcxaz |
|
392 | 379 | beaker.session.lock_dir = %(here)s/data/sessions/lock |
|
393 | 380 | |
|
394 | 381 | ## Secure encrypted cookie. Requires AES and AES python libraries |
|
395 | 382 | ## you must disable beaker.session.secret to use this |
|
396 | 383 | #beaker.session.encrypt_key = key_for_encryption |
|
397 | 384 | #beaker.session.validate_key = validation_key |
|
398 | 385 | |
|
399 | 386 | ## sets session as invalid(also logging out user) if it haven not been |
|
400 | 387 | ## accessed for given amount of time in seconds |
|
401 | 388 | beaker.session.timeout = 2592000 |
|
402 | 389 | beaker.session.httponly = true |
|
403 | 390 | ## Path to use for the cookie. Set to prefix if you use prefix middleware |
|
404 | 391 | #beaker.session.cookie_path = /custom_prefix |
|
405 | 392 | |
|
406 | 393 | ## uncomment for https secure cookie |
|
407 | 394 | beaker.session.secure = false |
|
408 | 395 | |
|
409 | 396 | ## auto save the session to not to use .save() |
|
410 | 397 | beaker.session.auto = false |
|
411 | 398 | |
|
412 | 399 | ## default cookie expiration time in seconds, set to `true` to set expire |
|
413 | 400 | ## at browser close |
|
414 | 401 | #beaker.session.cookie_expires = 3600 |
|
415 | 402 | |
|
416 | 403 | ################################### |
|
417 | 404 | ## SEARCH INDEXING CONFIGURATION ## |
|
418 | 405 | ################################### |
|
419 | 406 | ## Full text search indexer is available in rhodecode-tools under |
|
420 | 407 | ## `rhodecode-tools index` command |
|
421 | 408 | |
|
422 | 409 | ## WHOOSH Backend, doesn't require additional services to run |
|
423 | 410 | ## it works good with few dozen repos |
|
424 | 411 | search.module = rhodecode.lib.index.whoosh |
|
425 | 412 | search.location = %(here)s/data/index |
|
426 | 413 | |
|
427 | 414 | ######################################## |
|
428 | 415 | ### CHANNELSTREAM CONFIG #### |
|
429 | 416 | ######################################## |
|
430 | 417 | ## channelstream enables persistent connections and live notification |
|
431 | 418 | ## in the system. It's also used by the chat system |
|
432 | 419 | channelstream.enabled = false |
|
433 | 420 | |
|
434 | 421 | ## server address for channelstream server on the backend |
|
435 | 422 | channelstream.server = 127.0.0.1:9800 |
|
436 | 423 | |
|
437 | 424 | ## location of the channelstream server from outside world |
|
438 | 425 | ## use ws:// for http or wss:// for https. This address needs to be handled |
|
439 | 426 | ## by external HTTP server such as Nginx or Apache |
|
440 | 427 | ## see nginx/apache configuration examples in our docs |
|
441 | 428 | channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream |
|
442 | 429 | channelstream.secret = secret |
|
443 | 430 | channelstream.history.location = %(here)s/channelstream_history |
|
444 | 431 | |
|
445 | 432 | ## Internal application path that Javascript uses to connect into. |
|
446 | 433 | ## If you use proxy-prefix the prefix should be added before /_channelstream |
|
447 | 434 | channelstream.proxy_path = /_channelstream |
|
448 | 435 | |
|
449 | 436 | |
|
450 | 437 | ################################### |
|
451 | 438 | ## APPENLIGHT CONFIG ## |
|
452 | 439 | ################################### |
|
453 | 440 | |
|
454 | 441 | ## Appenlight is tailored to work with RhodeCode, see |
|
455 | 442 | ## http://appenlight.com for details how to obtain an account |
|
456 | 443 | |
|
457 | 444 | ## appenlight integration enabled |
|
458 | 445 | appenlight = false |
|
459 | 446 | |
|
460 | 447 | appenlight.server_url = https://api.appenlight.com |
|
461 | 448 | appenlight.api_key = YOUR_API_KEY |
|
462 | 449 | #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5 |
|
463 | 450 | |
|
464 | 451 | # used for JS client |
|
465 | 452 | appenlight.api_public_key = YOUR_API_PUBLIC_KEY |
|
466 | 453 | |
|
467 | 454 | ## TWEAK AMOUNT OF INFO SENT HERE |
|
468 | 455 | |
|
469 | 456 | ## enables 404 error logging (default False) |
|
470 | 457 | appenlight.report_404 = false |
|
471 | 458 | |
|
472 | 459 | ## time in seconds after request is considered being slow (default 1) |
|
473 | 460 | appenlight.slow_request_time = 1 |
|
474 | 461 | |
|
475 | 462 | ## record slow requests in application |
|
476 | 463 | ## (needs to be enabled for slow datastore recording and time tracking) |
|
477 | 464 | appenlight.slow_requests = true |
|
478 | 465 | |
|
479 | 466 | ## enable hooking to application loggers |
|
480 | 467 | appenlight.logging = true |
|
481 | 468 | |
|
482 | 469 | ## minimum log level for log capture |
|
483 | 470 | appenlight.logging.level = WARNING |
|
484 | 471 | |
|
485 | 472 | ## send logs only from erroneous/slow requests |
|
486 | 473 | ## (saves API quota for intensive logging) |
|
487 | 474 | appenlight.logging_on_error = false |
|
488 | 475 | |
|
489 | 476 | ## list of additonal keywords that should be grabbed from environ object |
|
490 | 477 | ## can be string with comma separated list of words in lowercase |
|
491 | 478 | ## (by default client will always send following info: |
|
492 | 479 | ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that |
|
493 | 480 | ## start with HTTP* this list be extended with additional keywords here |
|
494 | 481 | appenlight.environ_keys_whitelist = |
|
495 | 482 | |
|
496 | 483 | ## list of keywords that should be blanked from request object |
|
497 | 484 | ## can be string with comma separated list of words in lowercase |
|
498 | 485 | ## (by default client will always blank keys that contain following words |
|
499 | 486 | ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf' |
|
500 | 487 | ## this list be extended with additional keywords set here |
|
501 | 488 | appenlight.request_keys_blacklist = |
|
502 | 489 | |
|
503 | 490 | ## list of namespaces that should be ignores when gathering log entries |
|
504 | 491 | ## can be string with comma separated list of namespaces |
|
505 | 492 | ## (by default the client ignores own entries: appenlight_client.client) |
|
506 | 493 | appenlight.log_namespace_blacklist = |
|
507 | 494 | |
|
508 | 495 | |
|
509 | 496 | ################################################################################ |
|
510 | 497 | ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ## |
|
511 | 498 | ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ## |
|
512 | 499 | ## execute malicious code after an exception is raised. ## |
|
513 | 500 | ################################################################################ |
|
514 | 501 | #set debug = false |
|
515 | 502 | |
|
516 | 503 | |
|
517 | 504 | ############## |
|
518 | 505 | ## STYLING ## |
|
519 | 506 | ############## |
|
520 | 507 | debug_style = true |
|
521 | 508 | |
|
522 | 509 | ########################################### |
|
523 | 510 | ### MAIN RHODECODE DATABASE CONFIG ### |
|
524 | 511 | ########################################### |
|
525 | 512 | #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30 |
|
526 | 513 | #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode |
|
527 | 514 | #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode |
|
528 | 515 | sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30 |
|
529 | 516 | |
|
530 | 517 | # see sqlalchemy docs for other advanced settings |
|
531 | 518 | |
|
532 | 519 | ## print the sql statements to output |
|
533 | 520 | sqlalchemy.db1.echo = false |
|
534 | 521 | ## recycle the connections after this amount of seconds |
|
535 | 522 | sqlalchemy.db1.pool_recycle = 3600 |
|
536 | 523 | sqlalchemy.db1.convert_unicode = true |
|
537 | 524 | |
|
538 | 525 | ## the number of connections to keep open inside the connection pool. |
|
539 | 526 | ## 0 indicates no limit |
|
540 | 527 | #sqlalchemy.db1.pool_size = 5 |
|
541 | 528 | |
|
542 | 529 | ## the number of connections to allow in connection pool "overflow", that is |
|
543 | 530 | ## connections that can be opened above and beyond the pool_size setting, |
|
544 | 531 | ## which defaults to five. |
|
545 | 532 | #sqlalchemy.db1.max_overflow = 10 |
|
546 | 533 | |
|
547 | 534 | |
|
548 | 535 | ################## |
|
549 | 536 | ### VCS CONFIG ### |
|
550 | 537 | ################## |
|
551 | 538 | vcs.server.enable = true |
|
552 | 539 | vcs.server = localhost:9900 |
|
553 | 540 | |
|
554 | 541 | ## Web server connectivity protocol, responsible for web based VCS operatations |
|
555 | 542 | ## Available protocols are: |
|
556 | 543 | ## `http` - use http-rpc backend (default) |
|
557 | 544 | vcs.server.protocol = http |
|
558 | 545 | |
|
559 | 546 | ## Push/Pull operations protocol, available options are: |
|
560 | 547 | ## `http` - use http-rpc backend (default) |
|
561 | 548 | ## |
|
562 | 549 | vcs.scm_app_implementation = http |
|
563 | 550 | |
|
564 | 551 | ## Push/Pull operations hooks protocol, available options are: |
|
565 | 552 | ## `http` - use http-rpc backend (default) |
|
566 | 553 | vcs.hooks.protocol = http |
|
567 | 554 | |
|
568 | 555 | vcs.server.log_level = debug |
|
569 | 556 | ## Start VCSServer with this instance as a subprocess, usefull for development |
|
570 | 557 | vcs.start_server = true |
|
571 | 558 | |
|
572 | 559 | ## List of enabled VCS backends, available options are: |
|
573 | 560 | ## `hg` - mercurial |
|
574 | 561 | ## `git` - git |
|
575 | 562 | ## `svn` - subversion |
|
576 | 563 | vcs.backends = hg, git, svn |
|
577 | 564 | |
|
578 | 565 | vcs.connection_timeout = 3600 |
|
579 | 566 | ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out. |
|
580 | 567 | ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible |
|
581 | 568 | #vcs.svn.compatible_version = pre-1.8-compatible |
|
582 | 569 | |
|
583 | 570 | |
|
584 | 571 | ############################################################ |
|
585 | 572 | ### Subversion proxy support (mod_dav_svn) ### |
|
586 | 573 | ### Maps RhodeCode repo groups into SVN paths for Apache ### |
|
587 | 574 | ############################################################ |
|
588 | 575 | ## Enable or disable the config file generation. |
|
589 | 576 | svn.proxy.generate_config = false |
|
590 | 577 | ## Generate config file with `SVNListParentPath` set to `On`. |
|
591 | 578 | svn.proxy.list_parent_path = true |
|
592 | 579 | ## Set location and file name of generated config file. |
|
593 | 580 | svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf |
|
594 | 581 | ## alternative mod_dav config template. This needs to be a mako template |
|
595 | 582 | #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako |
|
596 | 583 | ## Used as a prefix to the `Location` block in the generated config file. |
|
597 | 584 | ## In most cases it should be set to `/`. |
|
598 | 585 | svn.proxy.location_root = / |
|
599 | 586 | ## Command to reload the mod dav svn configuration on change. |
|
600 | 587 | ## Example: `/etc/init.d/apache2 reload` |
|
601 | 588 | #svn.proxy.reload_cmd = /etc/init.d/apache2 reload |
|
602 | 589 | ## If the timeout expires before the reload command finishes, the command will |
|
603 | 590 | ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds. |
|
604 | 591 | #svn.proxy.reload_timeout = 10 |
|
605 | 592 | |
|
606 | 593 | ############################################################ |
|
607 | 594 | ### SSH Support Settings ### |
|
608 | 595 | ############################################################ |
|
609 | 596 | |
|
610 | 597 | ## Defines if a custom authorized_keys file should be created and written on |
|
611 | 598 | ## any change user ssh keys. Setting this to false also disables posibility |
|
612 | 599 | ## of adding SSH keys by users from web interface. Super admins can still |
|
613 | 600 | ## manage SSH Keys. |
|
614 | 601 | ssh.generate_authorized_keyfile = false |
|
615 | 602 | |
|
616 | 603 | ## Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding` |
|
617 | 604 | # ssh.authorized_keys_ssh_opts = |
|
618 | 605 | |
|
619 | 606 | ## Path to the authrozied_keys file where the generate entries are placed. |
|
620 | 607 | ## It is possible to have multiple key files specified in `sshd_config` e.g. |
|
621 | 608 | ## AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode |
|
622 | 609 | ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode |
|
623 | 610 | |
|
624 | 611 | ## Command to execute the SSH wrapper. The binary is available in the |
|
625 | 612 | ## rhodecode installation directory. |
|
626 | 613 | ## e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper |
|
627 | 614 | ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper |
|
628 | 615 | |
|
629 | 616 | ## Allow shell when executing the ssh-wrapper command |
|
630 | 617 | ssh.wrapper_cmd_allow_shell = false |
|
631 | 618 | |
|
632 | 619 | ## Enables logging, and detailed output send back to the client during SSH |
|
633 | 620 | ## operations. Usefull for debugging, shouldn't be used in production. |
|
634 | 621 | ssh.enable_debug_logging = true |
|
635 | 622 | |
|
636 | 623 | ## Paths to binary executable, by default they are the names, but we can |
|
637 | 624 | ## override them if we want to use a custom one |
|
638 | 625 | ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg |
|
639 | 626 | ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git |
|
640 | 627 | ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve |
|
641 | 628 | |
|
642 | 629 | |
|
643 | 630 | ## Dummy marker to add new entries after. |
|
644 | 631 | ## Add any custom entries below. Please don't remove. |
|
645 | 632 | custom.conf = 1 |
|
646 | 633 | |
|
647 | 634 | |
|
648 | 635 | ################################ |
|
649 | 636 | ### LOGGING CONFIGURATION #### |
|
650 | 637 | ################################ |
|
651 | 638 | [loggers] |
|
652 | keys = root, sqlalchemy, beaker, rhodecode, ssh_wrapper | |
|
639 | keys = root, sqlalchemy, beaker, rhodecode, ssh_wrapper, celery | |
|
653 | 640 | |
|
654 | 641 | [handlers] |
|
655 | 642 | keys = console, console_sql |
|
656 | 643 | |
|
657 | 644 | [formatters] |
|
658 | 645 | keys = generic, color_formatter, color_formatter_sql |
|
659 | 646 | |
|
660 | 647 | ############# |
|
661 | 648 | ## LOGGERS ## |
|
662 | 649 | ############# |
|
663 | 650 | [logger_root] |
|
664 | 651 | level = NOTSET |
|
665 | 652 | handlers = console |
|
666 | 653 | |
|
667 | 654 | [logger_sqlalchemy] |
|
668 | 655 | level = INFO |
|
669 | 656 | handlers = console_sql |
|
670 | 657 | qualname = sqlalchemy.engine |
|
671 | 658 | propagate = 0 |
|
672 | 659 | |
|
673 | 660 | [logger_beaker] |
|
674 | 661 | level = DEBUG |
|
675 | 662 | handlers = |
|
676 | 663 | qualname = beaker.container |
|
677 | 664 | propagate = 1 |
|
678 | 665 | |
|
679 | 666 | [logger_rhodecode] |
|
680 | 667 | level = DEBUG |
|
681 | 668 | handlers = |
|
682 | 669 | qualname = rhodecode |
|
683 | 670 | propagate = 1 |
|
684 | 671 | |
|
685 | 672 | [logger_ssh_wrapper] |
|
686 | 673 | level = DEBUG |
|
687 | 674 | handlers = |
|
688 | 675 | qualname = ssh_wrapper |
|
689 | 676 | propagate = 1 |
|
690 | 677 | |
|
678 | [logger_celery] | |
|
679 | level = DEBUG | |
|
680 | handlers = | |
|
681 | qualname = celery | |
|
682 | ||
|
691 | 683 | |
|
692 | 684 | ############## |
|
693 | 685 | ## HANDLERS ## |
|
694 | 686 | ############## |
|
695 | 687 | |
|
696 | 688 | [handler_console] |
|
697 | 689 | class = StreamHandler |
|
698 | 690 | args = (sys.stderr, ) |
|
699 | 691 | level = DEBUG |
|
700 | 692 | formatter = color_formatter |
|
701 | 693 | |
|
702 | 694 | [handler_console_sql] |
|
703 | 695 | class = StreamHandler |
|
704 | 696 | args = (sys.stderr, ) |
|
705 | 697 | level = DEBUG |
|
706 | 698 | formatter = color_formatter_sql |
|
707 | 699 | |
|
708 | 700 | ################ |
|
709 | 701 | ## FORMATTERS ## |
|
710 | 702 | ################ |
|
711 | 703 | |
|
712 | 704 | [formatter_generic] |
|
713 | 705 | class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter |
|
714 | 706 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
715 | 707 | datefmt = %Y-%m-%d %H:%M:%S |
|
716 | 708 | |
|
717 | 709 | [formatter_color_formatter] |
|
718 | 710 | class = rhodecode.lib.logging_formatter.ColorFormatter |
|
719 | 711 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
720 | 712 | datefmt = %Y-%m-%d %H:%M:%S |
|
721 | 713 | |
|
722 | 714 | [formatter_color_formatter_sql] |
|
723 | 715 | class = rhodecode.lib.logging_formatter.ColorFormatterSql |
|
724 | 716 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
725 | 717 | datefmt = %Y-%m-%d %H:%M:%S |
@@ -1,695 +1,687 b'' | |||
|
1 | 1 | |
|
2 | 2 | |
|
3 | 3 | ################################################################################ |
|
4 | 4 | ## RHODECODE COMMUNITY EDITION CONFIGURATION ## |
|
5 | 5 | # The %(here)s variable will be replaced with the parent directory of this file# |
|
6 | 6 | ################################################################################ |
|
7 | 7 | |
|
8 | 8 | [DEFAULT] |
|
9 | 9 | debug = true |
|
10 | 10 | |
|
11 | 11 | ################################################################################ |
|
12 | 12 | ## EMAIL CONFIGURATION ## |
|
13 | 13 | ## Uncomment and replace with the email address which should receive ## |
|
14 | 14 | ## any error reports after an application crash ## |
|
15 | 15 | ## Additionally these settings will be used by the RhodeCode mailing system ## |
|
16 | 16 | ################################################################################ |
|
17 | 17 | |
|
18 | 18 | ## prefix all emails subjects with given prefix, helps filtering out emails |
|
19 | 19 | #email_prefix = [RhodeCode] |
|
20 | 20 | |
|
21 | 21 | ## email FROM address all mails will be sent |
|
22 | 22 | #app_email_from = rhodecode-noreply@localhost |
|
23 | 23 | |
|
24 | 24 | ## Uncomment and replace with the address which should receive any error report |
|
25 | 25 | ## note: using appenlight for error handling doesn't need this to be uncommented |
|
26 | 26 | #email_to = admin@localhost |
|
27 | 27 | |
|
28 | 28 | ## in case of Application errors, sent an error email form |
|
29 | 29 | #error_email_from = rhodecode_error@localhost |
|
30 | 30 | |
|
31 | 31 | ## additional error message to be send in case of server crash |
|
32 | 32 | #error_message = |
|
33 | 33 | |
|
34 | 34 | |
|
35 | 35 | #smtp_server = mail.server.com |
|
36 | 36 | #smtp_username = |
|
37 | 37 | #smtp_password = |
|
38 | 38 | #smtp_port = |
|
39 | 39 | #smtp_use_tls = false |
|
40 | 40 | #smtp_use_ssl = true |
|
41 | 41 | ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.) |
|
42 | 42 | #smtp_auth = |
|
43 | 43 | |
|
44 | 44 | [server:main] |
|
45 | 45 | ## COMMON ## |
|
46 | 46 | host = 127.0.0.1 |
|
47 | 47 | port = 5000 |
|
48 | 48 | |
|
49 | 49 | ################################## |
|
50 | 50 | ## WAITRESS WSGI SERVER ## |
|
51 | 51 | ## Recommended for Development ## |
|
52 | 52 | ################################## |
|
53 | 53 | |
|
54 | 54 | #use = egg:waitress#main |
|
55 | 55 | ## number of worker threads |
|
56 | 56 | #threads = 5 |
|
57 | 57 | ## MAX BODY SIZE 100GB |
|
58 | 58 | #max_request_body_size = 107374182400 |
|
59 | 59 | ## Use poll instead of select, fixes file descriptors limits problems. |
|
60 | 60 | ## May not work on old windows systems. |
|
61 | 61 | #asyncore_use_poll = true |
|
62 | 62 | |
|
63 | 63 | |
|
64 | 64 | ########################## |
|
65 | 65 | ## GUNICORN WSGI SERVER ## |
|
66 | 66 | ########################## |
|
67 | 67 | ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini |
|
68 | 68 | |
|
69 | 69 | use = egg:gunicorn#main |
|
70 | 70 | ## Sets the number of process workers. You must set `instance_id = *` |
|
71 | 71 | ## when this option is set to more than one worker, recommended |
|
72 | 72 | ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers |
|
73 | 73 | ## The `instance_id = *` must be set in the [app:main] section below |
|
74 | 74 | workers = 2 |
|
75 | 75 | ## number of threads for each of the worker, must be set to 1 for gevent |
|
76 | 76 | ## generally recommended to be at 1 |
|
77 | 77 | #threads = 1 |
|
78 | 78 | ## process name |
|
79 | 79 | proc_name = rhodecode |
|
80 | 80 | ## type of worker class, one of sync, gevent |
|
81 | 81 | ## recommended for bigger setup is using of of other than sync one |
|
82 | 82 | worker_class = sync |
|
83 | 83 | ## The maximum number of simultaneous clients. Valid only for Gevent |
|
84 | 84 | #worker_connections = 10 |
|
85 | 85 | ## max number of requests that worker will handle before being gracefully |
|
86 | 86 | ## restarted, could prevent memory leaks |
|
87 | 87 | max_requests = 1000 |
|
88 | 88 | max_requests_jitter = 30 |
|
89 | 89 | ## amount of time a worker can spend with handling a request before it |
|
90 | 90 | ## gets killed and restarted. Set to 6hrs |
|
91 | 91 | timeout = 21600 |
|
92 | 92 | |
|
93 | 93 | |
|
94 | 94 | ## prefix middleware for RhodeCode. |
|
95 | 95 | ## recommended when using proxy setup. |
|
96 | 96 | ## allows to set RhodeCode under a prefix in server. |
|
97 | 97 | ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well. |
|
98 | 98 | ## And set your prefix like: `prefix = /custom_prefix` |
|
99 | 99 | ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need |
|
100 | 100 | ## to make your cookies only work on prefix url |
|
101 | 101 | [filter:proxy-prefix] |
|
102 | 102 | use = egg:PasteDeploy#prefix |
|
103 | 103 | prefix = / |
|
104 | 104 | |
|
105 | 105 | [app:main] |
|
106 | 106 | use = egg:rhodecode-enterprise-ce |
|
107 | 107 | |
|
108 | 108 | ## enable proxy prefix middleware, defined above |
|
109 | 109 | #filter-with = proxy-prefix |
|
110 | 110 | |
|
111 | 111 | ## encryption key used to encrypt social plugin tokens, |
|
112 | 112 | ## remote_urls with credentials etc, if not set it defaults to |
|
113 | 113 | ## `beaker.session.secret` |
|
114 | 114 | #rhodecode.encrypted_values.secret = |
|
115 | 115 | |
|
116 | 116 | ## decryption strict mode (enabled by default). It controls if decryption raises |
|
117 | 117 | ## `SignatureVerificationError` in case of wrong key, or damaged encryption data. |
|
118 | 118 | #rhodecode.encrypted_values.strict = false |
|
119 | 119 | |
|
120 | 120 | ## return gzipped responses from Rhodecode (static files/application) |
|
121 | 121 | gzip_responses = false |
|
122 | 122 | |
|
123 | 123 | ## autogenerate javascript routes file on startup |
|
124 | 124 | generate_js_files = false |
|
125 | 125 | |
|
126 | 126 | ## Optional Languages |
|
127 | 127 | ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh |
|
128 | 128 | lang = en |
|
129 | 129 | |
|
130 | 130 | ## perform a full repository scan on each server start, this should be |
|
131 | 131 | ## set to false after first startup, to allow faster server restarts. |
|
132 | 132 | startup.import_repos = false |
|
133 | 133 | |
|
134 | 134 | ## Uncomment and set this path to use archive download cache. |
|
135 | 135 | ## Once enabled, generated archives will be cached at this location |
|
136 | 136 | ## and served from the cache during subsequent requests for the same archive of |
|
137 | 137 | ## the repository. |
|
138 | 138 | #archive_cache_dir = /tmp/tarballcache |
|
139 | 139 | |
|
140 | 140 | ## URL at which the application is running. This is used for bootstraping |
|
141 | 141 | ## requests in context when no web request is available. Used in ishell, or |
|
142 | 142 | ## SSH calls. Set this for events to receive proper url for SSH calls. |
|
143 | 143 | app.base_url = http://rhodecode.local |
|
144 | 144 | |
|
145 | 145 | ## change this to unique ID for security |
|
146 | 146 | app_instance_uuid = rc-production |
|
147 | 147 | |
|
148 | 148 | ## cut off limit for large diffs (size in bytes). If overall diff size on |
|
149 | 149 | ## commit, or pull request exceeds this limit this diff will be displayed |
|
150 | 150 | ## partially. E.g 512000 == 512Kb |
|
151 | 151 | cut_off_limit_diff = 512000 |
|
152 | 152 | |
|
153 | 153 | ## cut off limit for large files inside diffs (size in bytes). Each individual |
|
154 | 154 | ## file inside diff which exceeds this limit will be displayed partially. |
|
155 | 155 | ## E.g 128000 == 128Kb |
|
156 | 156 | cut_off_limit_file = 128000 |
|
157 | 157 | |
|
158 | 158 | ## use cache version of scm repo everywhere |
|
159 | 159 | vcs_full_cache = true |
|
160 | 160 | |
|
161 | 161 | ## force https in RhodeCode, fixes https redirects, assumes it's always https |
|
162 | 162 | ## Normally this is controlled by proper http flags sent from http server |
|
163 | 163 | force_https = false |
|
164 | 164 | |
|
165 | 165 | ## use Strict-Transport-Security headers |
|
166 | 166 | use_htsts = false |
|
167 | 167 | |
|
168 | 168 | ## number of commits stats will parse on each iteration |
|
169 | 169 | commit_parse_limit = 25 |
|
170 | 170 | |
|
171 | 171 | ## git rev filter option, --all is the default filter, if you need to |
|
172 | 172 | ## hide all refs in changelog switch this to --branches --tags |
|
173 | 173 | git_rev_filter = --branches --tags |
|
174 | 174 | |
|
175 | 175 | # Set to true if your repos are exposed using the dumb protocol |
|
176 | 176 | git_update_server_info = false |
|
177 | 177 | |
|
178 | 178 | ## RSS/ATOM feed options |
|
179 | 179 | rss_cut_off_limit = 256000 |
|
180 | 180 | rss_items_per_page = 10 |
|
181 | 181 | rss_include_diff = false |
|
182 | 182 | |
|
183 | 183 | ## gist URL alias, used to create nicer urls for gist. This should be an |
|
184 | 184 | ## url that does rewrites to _admin/gists/{gistid}. |
|
185 | 185 | ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal |
|
186 | 186 | ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid} |
|
187 | 187 | gist_alias_url = |
|
188 | 188 | |
|
189 | 189 | ## List of views (using glob pattern syntax) that AUTH TOKENS could be |
|
190 | 190 | ## used for access. |
|
191 | 191 | ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it |
|
192 | 192 | ## came from the the logged in user who own this authentication token. |
|
193 | 193 | ## Additionally @TOKEN syntaxt can be used to bound the view to specific |
|
194 | 194 | ## authentication token. Such view would be only accessible when used together |
|
195 | 195 | ## with this authentication token |
|
196 | 196 | ## |
|
197 | 197 | ## list of all views can be found under `/_admin/permissions/auth_token_access` |
|
198 | 198 | ## The list should be "," separated and on a single line. |
|
199 | 199 | ## |
|
200 | 200 | ## Most common views to enable: |
|
201 | 201 | # RepoCommitsView:repo_commit_download |
|
202 | 202 | # RepoCommitsView:repo_commit_patch |
|
203 | 203 | # RepoCommitsView:repo_commit_raw |
|
204 | 204 | # RepoCommitsView:repo_commit_raw@TOKEN |
|
205 | 205 | # RepoFilesView:repo_files_diff |
|
206 | 206 | # RepoFilesView:repo_archivefile |
|
207 | 207 | # RepoFilesView:repo_file_raw |
|
208 | 208 | # GistView:* |
|
209 | 209 | api_access_controllers_whitelist = |
|
210 | 210 | |
|
211 | 211 | ## default encoding used to convert from and to unicode |
|
212 | 212 | ## can be also a comma separated list of encoding in case of mixed encodings |
|
213 | 213 | default_encoding = UTF-8 |
|
214 | 214 | |
|
215 | 215 | ## instance-id prefix |
|
216 | 216 | ## a prefix key for this instance used for cache invalidation when running |
|
217 | 217 | ## multiple instances of rhodecode, make sure it's globally unique for |
|
218 | 218 | ## all running rhodecode instances. Leave empty if you don't use it |
|
219 | 219 | instance_id = |
|
220 | 220 | |
|
221 | 221 | ## Fallback authentication plugin. Set this to a plugin ID to force the usage |
|
222 | 222 | ## of an authentication plugin also if it is disabled by it's settings. |
|
223 | 223 | ## This could be useful if you are unable to log in to the system due to broken |
|
224 | 224 | ## authentication settings. Then you can enable e.g. the internal rhodecode auth |
|
225 | 225 | ## module to log in again and fix the settings. |
|
226 | 226 | ## |
|
227 | 227 | ## Available builtin plugin IDs (hash is part of the ID): |
|
228 | 228 | ## egg:rhodecode-enterprise-ce#rhodecode |
|
229 | 229 | ## egg:rhodecode-enterprise-ce#pam |
|
230 | 230 | ## egg:rhodecode-enterprise-ce#ldap |
|
231 | 231 | ## egg:rhodecode-enterprise-ce#jasig_cas |
|
232 | 232 | ## egg:rhodecode-enterprise-ce#headers |
|
233 | 233 | ## egg:rhodecode-enterprise-ce#crowd |
|
234 | 234 | #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode |
|
235 | 235 | |
|
236 | 236 | ## alternative return HTTP header for failed authentication. Default HTTP |
|
237 | 237 | ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with |
|
238 | 238 | ## handling that causing a series of failed authentication calls. |
|
239 | 239 | ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code |
|
240 | 240 | ## This will be served instead of default 401 on bad authnetication |
|
241 | 241 | auth_ret_code = |
|
242 | 242 | |
|
243 | 243 | ## use special detection method when serving auth_ret_code, instead of serving |
|
244 | 244 | ## ret_code directly, use 401 initially (Which triggers credentials prompt) |
|
245 | 245 | ## and then serve auth_ret_code to clients |
|
246 | 246 | auth_ret_code_detection = false |
|
247 | 247 | |
|
248 | 248 | ## locking return code. When repository is locked return this HTTP code. 2XX |
|
249 | 249 | ## codes don't break the transactions while 4XX codes do |
|
250 | 250 | lock_ret_code = 423 |
|
251 | 251 | |
|
252 | 252 | ## allows to change the repository location in settings page |
|
253 | 253 | allow_repo_location_change = true |
|
254 | 254 | |
|
255 | 255 | ## allows to setup custom hooks in settings page |
|
256 | 256 | allow_custom_hooks_settings = true |
|
257 | 257 | |
|
258 | 258 | ## generated license token, goto license page in RhodeCode settings to obtain |
|
259 | 259 | ## new token |
|
260 | 260 | license_token = |
|
261 | 261 | |
|
262 | 262 | ## supervisor connection uri, for managing supervisor and logs. |
|
263 | 263 | supervisor.uri = |
|
264 | 264 | ## supervisord group name/id we only want this RC instance to handle |
|
265 | 265 | supervisor.group_id = prod |
|
266 | 266 | |
|
267 | 267 | ## Display extended labs settings |
|
268 | 268 | labs_settings_active = true |
|
269 | 269 | |
|
270 | 270 | #################################### |
|
271 | 271 | ### CELERY CONFIG #### |
|
272 | 272 | #################################### |
|
273 | 273 | use_celery = false |
|
274 | broker.host = localhost | |
|
275 | broker.vhost = rabbitmqhost | |
|
276 | broker.port = 5672 | |
|
277 | broker.user = rabbitmq | |
|
278 | broker.password = qweqwe | |
|
279 | ||
|
280 | celery.imports = rhodecode.lib.celerylib.tasks | |
|
281 | 274 | |
|
282 | celery.result.backend = amqp | |
|
283 | celery.result.dburi = amqp:// | |
|
284 | celery.result.serialier = json | |
|
275 | # connection url to the message broker (default rabbitmq) | |
|
276 | celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost | |
|
285 | 277 | |
|
286 | #celery.send.task.error.emails = true | |
|
287 | #celery.amqp.task.result.expires = 18000 | |
|
288 | ||
|
289 | celeryd.concurrency = 2 | |
|
290 | #celeryd.log.file = celeryd.log | |
|
291 | celeryd.log.level = debug | |
|
292 | celeryd.max.tasks.per.child = 1 | |
|
278 | # maximum tasks to execute before worker restart | |
|
279 | celery.max_tasks_per_child = 100 | |
|
293 | 280 | |
|
294 | 281 | ## tasks will never be sent to the queue, but executed locally instead. |
|
295 |
celery.always |
|
|
282 | celery.task_always_eager = false | |
|
296 | 283 | |
|
297 | 284 | #################################### |
|
298 | 285 | ### BEAKER CACHE #### |
|
299 | 286 | #################################### |
|
300 | 287 | # default cache dir for templates. Putting this into a ramdisk |
|
301 | 288 | ## can boost performance, eg. %(here)s/data_ramdisk |
|
302 | 289 | cache_dir = %(here)s/data |
|
303 | 290 | |
|
304 | 291 | ## locking and default file storage for Beaker. Putting this into a ramdisk |
|
305 | 292 | ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data |
|
306 | 293 | beaker.cache.data_dir = %(here)s/data/cache/beaker_data |
|
307 | 294 | beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock |
|
308 | 295 | |
|
309 | 296 | beaker.cache.regions = super_short_term, short_term, long_term, sql_cache_short, auth_plugins, repo_cache_long |
|
310 | 297 | |
|
311 | 298 | beaker.cache.super_short_term.type = memory |
|
312 | 299 | beaker.cache.super_short_term.expire = 10 |
|
313 | 300 | beaker.cache.super_short_term.key_length = 256 |
|
314 | 301 | |
|
315 | 302 | beaker.cache.short_term.type = memory |
|
316 | 303 | beaker.cache.short_term.expire = 60 |
|
317 | 304 | beaker.cache.short_term.key_length = 256 |
|
318 | 305 | |
|
319 | 306 | beaker.cache.long_term.type = memory |
|
320 | 307 | beaker.cache.long_term.expire = 36000 |
|
321 | 308 | beaker.cache.long_term.key_length = 256 |
|
322 | 309 | |
|
323 | 310 | beaker.cache.sql_cache_short.type = memory |
|
324 | 311 | beaker.cache.sql_cache_short.expire = 10 |
|
325 | 312 | beaker.cache.sql_cache_short.key_length = 256 |
|
326 | 313 | |
|
327 | 314 | ## default is memory cache, configure only if required |
|
328 | 315 | ## using multi-node or multi-worker setup |
|
329 | 316 | #beaker.cache.auth_plugins.type = ext:database |
|
330 | 317 | #beaker.cache.auth_plugins.lock_dir = %(here)s/data/cache/auth_plugin_lock |
|
331 | 318 | #beaker.cache.auth_plugins.url = postgresql://postgres:secret@localhost/rhodecode |
|
332 | 319 | #beaker.cache.auth_plugins.url = mysql://root:secret@127.0.0.1/rhodecode |
|
333 | 320 | #beaker.cache.auth_plugins.sa.pool_recycle = 3600 |
|
334 | 321 | #beaker.cache.auth_plugins.sa.pool_size = 10 |
|
335 | 322 | #beaker.cache.auth_plugins.sa.max_overflow = 0 |
|
336 | 323 | |
|
337 | 324 | beaker.cache.repo_cache_long.type = memorylru_base |
|
338 | 325 | beaker.cache.repo_cache_long.max_items = 4096 |
|
339 | 326 | beaker.cache.repo_cache_long.expire = 2592000 |
|
340 | 327 | |
|
341 | 328 | ## default is memorylru_base cache, configure only if required |
|
342 | 329 | ## using multi-node or multi-worker setup |
|
343 | 330 | #beaker.cache.repo_cache_long.type = ext:memcached |
|
344 | 331 | #beaker.cache.repo_cache_long.url = localhost:11211 |
|
345 | 332 | #beaker.cache.repo_cache_long.expire = 1209600 |
|
346 | 333 | #beaker.cache.repo_cache_long.key_length = 256 |
|
347 | 334 | |
|
348 | 335 | #################################### |
|
349 | 336 | ### BEAKER SESSION #### |
|
350 | 337 | #################################### |
|
351 | 338 | |
|
352 | 339 | ## .session.type is type of storage options for the session, current allowed |
|
353 | 340 | ## types are file, ext:memcached, ext:database, and memory (default). |
|
354 | 341 | beaker.session.type = file |
|
355 | 342 | beaker.session.data_dir = %(here)s/data/sessions/data |
|
356 | 343 | |
|
357 | 344 | ## db based session, fast, and allows easy management over logged in users |
|
358 | 345 | #beaker.session.type = ext:database |
|
359 | 346 | #beaker.session.table_name = db_session |
|
360 | 347 | #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode |
|
361 | 348 | #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode |
|
362 | 349 | #beaker.session.sa.pool_recycle = 3600 |
|
363 | 350 | #beaker.session.sa.echo = false |
|
364 | 351 | |
|
365 | 352 | beaker.session.key = rhodecode |
|
366 | 353 | beaker.session.secret = production-rc-uytcxaz |
|
367 | 354 | beaker.session.lock_dir = %(here)s/data/sessions/lock |
|
368 | 355 | |
|
369 | 356 | ## Secure encrypted cookie. Requires AES and AES python libraries |
|
370 | 357 | ## you must disable beaker.session.secret to use this |
|
371 | 358 | #beaker.session.encrypt_key = key_for_encryption |
|
372 | 359 | #beaker.session.validate_key = validation_key |
|
373 | 360 | |
|
374 | 361 | ## sets session as invalid(also logging out user) if it haven not been |
|
375 | 362 | ## accessed for given amount of time in seconds |
|
376 | 363 | beaker.session.timeout = 2592000 |
|
377 | 364 | beaker.session.httponly = true |
|
378 | 365 | ## Path to use for the cookie. Set to prefix if you use prefix middleware |
|
379 | 366 | #beaker.session.cookie_path = /custom_prefix |
|
380 | 367 | |
|
381 | 368 | ## uncomment for https secure cookie |
|
382 | 369 | beaker.session.secure = false |
|
383 | 370 | |
|
384 | 371 | ## auto save the session to not to use .save() |
|
385 | 372 | beaker.session.auto = false |
|
386 | 373 | |
|
387 | 374 | ## default cookie expiration time in seconds, set to `true` to set expire |
|
388 | 375 | ## at browser close |
|
389 | 376 | #beaker.session.cookie_expires = 3600 |
|
390 | 377 | |
|
391 | 378 | ################################### |
|
392 | 379 | ## SEARCH INDEXING CONFIGURATION ## |
|
393 | 380 | ################################### |
|
394 | 381 | ## Full text search indexer is available in rhodecode-tools under |
|
395 | 382 | ## `rhodecode-tools index` command |
|
396 | 383 | |
|
397 | 384 | ## WHOOSH Backend, doesn't require additional services to run |
|
398 | 385 | ## it works good with few dozen repos |
|
399 | 386 | search.module = rhodecode.lib.index.whoosh |
|
400 | 387 | search.location = %(here)s/data/index |
|
401 | 388 | |
|
402 | 389 | ######################################## |
|
403 | 390 | ### CHANNELSTREAM CONFIG #### |
|
404 | 391 | ######################################## |
|
405 | 392 | ## channelstream enables persistent connections and live notification |
|
406 | 393 | ## in the system. It's also used by the chat system |
|
407 | 394 | channelstream.enabled = false |
|
408 | 395 | |
|
409 | 396 | ## server address for channelstream server on the backend |
|
410 | 397 | channelstream.server = 127.0.0.1:9800 |
|
411 | 398 | |
|
412 | 399 | ## location of the channelstream server from outside world |
|
413 | 400 | ## use ws:// for http or wss:// for https. This address needs to be handled |
|
414 | 401 | ## by external HTTP server such as Nginx or Apache |
|
415 | 402 | ## see nginx/apache configuration examples in our docs |
|
416 | 403 | channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream |
|
417 | 404 | channelstream.secret = secret |
|
418 | 405 | channelstream.history.location = %(here)s/channelstream_history |
|
419 | 406 | |
|
420 | 407 | ## Internal application path that Javascript uses to connect into. |
|
421 | 408 | ## If you use proxy-prefix the prefix should be added before /_channelstream |
|
422 | 409 | channelstream.proxy_path = /_channelstream |
|
423 | 410 | |
|
424 | 411 | |
|
425 | 412 | ################################### |
|
426 | 413 | ## APPENLIGHT CONFIG ## |
|
427 | 414 | ################################### |
|
428 | 415 | |
|
429 | 416 | ## Appenlight is tailored to work with RhodeCode, see |
|
430 | 417 | ## http://appenlight.com for details how to obtain an account |
|
431 | 418 | |
|
432 | 419 | ## appenlight integration enabled |
|
433 | 420 | appenlight = false |
|
434 | 421 | |
|
435 | 422 | appenlight.server_url = https://api.appenlight.com |
|
436 | 423 | appenlight.api_key = YOUR_API_KEY |
|
437 | 424 | #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5 |
|
438 | 425 | |
|
439 | 426 | # used for JS client |
|
440 | 427 | appenlight.api_public_key = YOUR_API_PUBLIC_KEY |
|
441 | 428 | |
|
442 | 429 | ## TWEAK AMOUNT OF INFO SENT HERE |
|
443 | 430 | |
|
444 | 431 | ## enables 404 error logging (default False) |
|
445 | 432 | appenlight.report_404 = false |
|
446 | 433 | |
|
447 | 434 | ## time in seconds after request is considered being slow (default 1) |
|
448 | 435 | appenlight.slow_request_time = 1 |
|
449 | 436 | |
|
450 | 437 | ## record slow requests in application |
|
451 | 438 | ## (needs to be enabled for slow datastore recording and time tracking) |
|
452 | 439 | appenlight.slow_requests = true |
|
453 | 440 | |
|
454 | 441 | ## enable hooking to application loggers |
|
455 | 442 | appenlight.logging = true |
|
456 | 443 | |
|
457 | 444 | ## minimum log level for log capture |
|
458 | 445 | appenlight.logging.level = WARNING |
|
459 | 446 | |
|
460 | 447 | ## send logs only from erroneous/slow requests |
|
461 | 448 | ## (saves API quota for intensive logging) |
|
462 | 449 | appenlight.logging_on_error = false |
|
463 | 450 | |
|
464 | 451 | ## list of additonal keywords that should be grabbed from environ object |
|
465 | 452 | ## can be string with comma separated list of words in lowercase |
|
466 | 453 | ## (by default client will always send following info: |
|
467 | 454 | ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that |
|
468 | 455 | ## start with HTTP* this list be extended with additional keywords here |
|
469 | 456 | appenlight.environ_keys_whitelist = |
|
470 | 457 | |
|
471 | 458 | ## list of keywords that should be blanked from request object |
|
472 | 459 | ## can be string with comma separated list of words in lowercase |
|
473 | 460 | ## (by default client will always blank keys that contain following words |
|
474 | 461 | ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf' |
|
475 | 462 | ## this list be extended with additional keywords set here |
|
476 | 463 | appenlight.request_keys_blacklist = |
|
477 | 464 | |
|
478 | 465 | ## list of namespaces that should be ignores when gathering log entries |
|
479 | 466 | ## can be string with comma separated list of namespaces |
|
480 | 467 | ## (by default the client ignores own entries: appenlight_client.client) |
|
481 | 468 | appenlight.log_namespace_blacklist = |
|
482 | 469 | |
|
483 | 470 | |
|
484 | 471 | ################################################################################ |
|
485 | 472 | ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ## |
|
486 | 473 | ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ## |
|
487 | 474 | ## execute malicious code after an exception is raised. ## |
|
488 | 475 | ################################################################################ |
|
489 | 476 | set debug = false |
|
490 | 477 | |
|
491 | 478 | |
|
492 | 479 | ########################################### |
|
493 | 480 | ### MAIN RHODECODE DATABASE CONFIG ### |
|
494 | 481 | ########################################### |
|
495 | 482 | #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30 |
|
496 | 483 | #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode |
|
497 | 484 | #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode |
|
498 | 485 | sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode |
|
499 | 486 | |
|
500 | 487 | # see sqlalchemy docs for other advanced settings |
|
501 | 488 | |
|
502 | 489 | ## print the sql statements to output |
|
503 | 490 | sqlalchemy.db1.echo = false |
|
504 | 491 | ## recycle the connections after this amount of seconds |
|
505 | 492 | sqlalchemy.db1.pool_recycle = 3600 |
|
506 | 493 | sqlalchemy.db1.convert_unicode = true |
|
507 | 494 | |
|
508 | 495 | ## the number of connections to keep open inside the connection pool. |
|
509 | 496 | ## 0 indicates no limit |
|
510 | 497 | #sqlalchemy.db1.pool_size = 5 |
|
511 | 498 | |
|
512 | 499 | ## the number of connections to allow in connection pool "overflow", that is |
|
513 | 500 | ## connections that can be opened above and beyond the pool_size setting, |
|
514 | 501 | ## which defaults to five. |
|
515 | 502 | #sqlalchemy.db1.max_overflow = 10 |
|
516 | 503 | |
|
517 | 504 | |
|
518 | 505 | ################## |
|
519 | 506 | ### VCS CONFIG ### |
|
520 | 507 | ################## |
|
521 | 508 | vcs.server.enable = true |
|
522 | 509 | vcs.server = localhost:9900 |
|
523 | 510 | |
|
524 | 511 | ## Web server connectivity protocol, responsible for web based VCS operatations |
|
525 | 512 | ## Available protocols are: |
|
526 | 513 | ## `http` - use http-rpc backend (default) |
|
527 | 514 | vcs.server.protocol = http |
|
528 | 515 | |
|
529 | 516 | ## Push/Pull operations protocol, available options are: |
|
530 | 517 | ## `http` - use http-rpc backend (default) |
|
531 | 518 | ## |
|
532 | 519 | vcs.scm_app_implementation = http |
|
533 | 520 | |
|
534 | 521 | ## Push/Pull operations hooks protocol, available options are: |
|
535 | 522 | ## `http` - use http-rpc backend (default) |
|
536 | 523 | vcs.hooks.protocol = http |
|
537 | 524 | |
|
538 | 525 | vcs.server.log_level = info |
|
539 | 526 | ## Start VCSServer with this instance as a subprocess, usefull for development |
|
540 | 527 | vcs.start_server = false |
|
541 | 528 | |
|
542 | 529 | ## List of enabled VCS backends, available options are: |
|
543 | 530 | ## `hg` - mercurial |
|
544 | 531 | ## `git` - git |
|
545 | 532 | ## `svn` - subversion |
|
546 | 533 | vcs.backends = hg, git, svn |
|
547 | 534 | |
|
548 | 535 | vcs.connection_timeout = 3600 |
|
549 | 536 | ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out. |
|
550 | 537 | ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible |
|
551 | 538 | #vcs.svn.compatible_version = pre-1.8-compatible |
|
552 | 539 | |
|
553 | 540 | |
|
554 | 541 | ############################################################ |
|
555 | 542 | ### Subversion proxy support (mod_dav_svn) ### |
|
556 | 543 | ### Maps RhodeCode repo groups into SVN paths for Apache ### |
|
557 | 544 | ############################################################ |
|
558 | 545 | ## Enable or disable the config file generation. |
|
559 | 546 | svn.proxy.generate_config = false |
|
560 | 547 | ## Generate config file with `SVNListParentPath` set to `On`. |
|
561 | 548 | svn.proxy.list_parent_path = true |
|
562 | 549 | ## Set location and file name of generated config file. |
|
563 | 550 | svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf |
|
564 | 551 | ## alternative mod_dav config template. This needs to be a mako template |
|
565 | 552 | #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako |
|
566 | 553 | ## Used as a prefix to the `Location` block in the generated config file. |
|
567 | 554 | ## In most cases it should be set to `/`. |
|
568 | 555 | svn.proxy.location_root = / |
|
569 | 556 | ## Command to reload the mod dav svn configuration on change. |
|
570 | 557 | ## Example: `/etc/init.d/apache2 reload` |
|
571 | 558 | #svn.proxy.reload_cmd = /etc/init.d/apache2 reload |
|
572 | 559 | ## If the timeout expires before the reload command finishes, the command will |
|
573 | 560 | ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds. |
|
574 | 561 | #svn.proxy.reload_timeout = 10 |
|
575 | 562 | |
|
576 | 563 | ############################################################ |
|
577 | 564 | ### SSH Support Settings ### |
|
578 | 565 | ############################################################ |
|
579 | 566 | |
|
580 | 567 | ## Defines if a custom authorized_keys file should be created and written on |
|
581 | 568 | ## any change user ssh keys. Setting this to false also disables posibility |
|
582 | 569 | ## of adding SSH keys by users from web interface. Super admins can still |
|
583 | 570 | ## manage SSH Keys. |
|
584 | 571 | ssh.generate_authorized_keyfile = false |
|
585 | 572 | |
|
586 | 573 | ## Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding` |
|
587 | 574 | # ssh.authorized_keys_ssh_opts = |
|
588 | 575 | |
|
589 | 576 | ## Path to the authrozied_keys file where the generate entries are placed. |
|
590 | 577 | ## It is possible to have multiple key files specified in `sshd_config` e.g. |
|
591 | 578 | ## AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode |
|
592 | 579 | ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode |
|
593 | 580 | |
|
594 | 581 | ## Command to execute the SSH wrapper. The binary is available in the |
|
595 | 582 | ## rhodecode installation directory. |
|
596 | 583 | ## e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper |
|
597 | 584 | ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper |
|
598 | 585 | |
|
599 | 586 | ## Allow shell when executing the ssh-wrapper command |
|
600 | 587 | ssh.wrapper_cmd_allow_shell = false |
|
601 | 588 | |
|
602 | 589 | ## Enables logging, and detailed output send back to the client during SSH |
|
603 | 590 | ## operations. Usefull for debugging, shouldn't be used in production. |
|
604 | 591 | ssh.enable_debug_logging = false |
|
605 | 592 | |
|
606 | 593 | ## Paths to binary executable, by default they are the names, but we can |
|
607 | 594 | ## override them if we want to use a custom one |
|
608 | 595 | ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg |
|
609 | 596 | ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git |
|
610 | 597 | ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve |
|
611 | 598 | |
|
612 | 599 | |
|
613 | 600 | ## Dummy marker to add new entries after. |
|
614 | 601 | ## Add any custom entries below. Please don't remove. |
|
615 | 602 | custom.conf = 1 |
|
616 | 603 | |
|
617 | 604 | |
|
618 | 605 | ################################ |
|
619 | 606 | ### LOGGING CONFIGURATION #### |
|
620 | 607 | ################################ |
|
621 | 608 | [loggers] |
|
622 | keys = root, sqlalchemy, beaker, rhodecode, ssh_wrapper | |
|
609 | keys = root, sqlalchemy, beaker, rhodecode, ssh_wrapper, celery | |
|
623 | 610 | |
|
624 | 611 | [handlers] |
|
625 | 612 | keys = console, console_sql |
|
626 | 613 | |
|
627 | 614 | [formatters] |
|
628 | 615 | keys = generic, color_formatter, color_formatter_sql |
|
629 | 616 | |
|
630 | 617 | ############# |
|
631 | 618 | ## LOGGERS ## |
|
632 | 619 | ############# |
|
633 | 620 | [logger_root] |
|
634 | 621 | level = NOTSET |
|
635 | 622 | handlers = console |
|
636 | 623 | |
|
637 | 624 | [logger_sqlalchemy] |
|
638 | 625 | level = INFO |
|
639 | 626 | handlers = console_sql |
|
640 | 627 | qualname = sqlalchemy.engine |
|
641 | 628 | propagate = 0 |
|
642 | 629 | |
|
643 | 630 | [logger_beaker] |
|
644 | 631 | level = DEBUG |
|
645 | 632 | handlers = |
|
646 | 633 | qualname = beaker.container |
|
647 | 634 | propagate = 1 |
|
648 | 635 | |
|
649 | 636 | [logger_rhodecode] |
|
650 | 637 | level = DEBUG |
|
651 | 638 | handlers = |
|
652 | 639 | qualname = rhodecode |
|
653 | 640 | propagate = 1 |
|
654 | 641 | |
|
655 | 642 | [logger_ssh_wrapper] |
|
656 | 643 | level = DEBUG |
|
657 | 644 | handlers = |
|
658 | 645 | qualname = ssh_wrapper |
|
659 | 646 | propagate = 1 |
|
660 | 647 | |
|
648 | [logger_celery] | |
|
649 | level = DEBUG | |
|
650 | handlers = | |
|
651 | qualname = celery | |
|
652 | ||
|
661 | 653 | |
|
662 | 654 | ############## |
|
663 | 655 | ## HANDLERS ## |
|
664 | 656 | ############## |
|
665 | 657 | |
|
666 | 658 | [handler_console] |
|
667 | 659 | class = StreamHandler |
|
668 | 660 | args = (sys.stderr, ) |
|
669 | 661 | level = INFO |
|
670 | 662 | formatter = generic |
|
671 | 663 | |
|
672 | 664 | [handler_console_sql] |
|
673 | 665 | class = StreamHandler |
|
674 | 666 | args = (sys.stderr, ) |
|
675 | 667 | level = WARN |
|
676 | 668 | formatter = generic |
|
677 | 669 | |
|
678 | 670 | ################ |
|
679 | 671 | ## FORMATTERS ## |
|
680 | 672 | ################ |
|
681 | 673 | |
|
682 | 674 | [formatter_generic] |
|
683 | 675 | class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter |
|
684 | 676 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
685 | 677 | datefmt = %Y-%m-%d %H:%M:%S |
|
686 | 678 | |
|
687 | 679 | [formatter_color_formatter] |
|
688 | 680 | class = rhodecode.lib.logging_formatter.ColorFormatter |
|
689 | 681 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
690 | 682 | datefmt = %Y-%m-%d %H:%M:%S |
|
691 | 683 | |
|
692 | 684 | [formatter_color_formatter_sql] |
|
693 | 685 | class = rhodecode.lib.logging_formatter.ColorFormatterSql |
|
694 | 686 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
695 | 687 | datefmt = %Y-%m-%d %H:%M:%S |
@@ -1,2067 +1,2063 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2011-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import logging |
|
22 | 22 | import time |
|
23 | 23 | |
|
24 | 24 | import rhodecode |
|
25 | 25 | from rhodecode.api import ( |
|
26 | 26 | jsonrpc_method, JSONRPCError, JSONRPCForbidden, JSONRPCValidationError) |
|
27 | 27 | from rhodecode.api.utils import ( |
|
28 | 28 | has_superadmin_permission, Optional, OAttr, get_repo_or_error, |
|
29 | 29 | get_user_group_or_error, get_user_or_error, validate_repo_permissions, |
|
30 | 30 | get_perm_or_error, parse_args, get_origin, build_commit_data, |
|
31 | 31 | validate_set_owner_permissions) |
|
32 | 32 | from rhodecode.lib import audit_logger |
|
33 | 33 | from rhodecode.lib import repo_maintenance |
|
34 | 34 | from rhodecode.lib.auth import HasPermissionAnyApi, HasUserGroupPermissionAnyApi |
|
35 | from rhodecode.lib.celerylib.utils import get_task_id | |
|
35 | 36 | from rhodecode.lib.utils2 import str2bool, time_to_datetime |
|
36 | 37 | from rhodecode.lib.ext_json import json |
|
37 | 38 | from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError |
|
38 | 39 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
39 | 40 | from rhodecode.model.comment import CommentsModel |
|
40 | 41 | from rhodecode.model.db import ( |
|
41 | 42 | Session, ChangesetStatus, RepositoryField, Repository, RepoGroup, |
|
42 | 43 | ChangesetComment) |
|
43 | 44 | from rhodecode.model.repo import RepoModel |
|
44 | 45 | from rhodecode.model.scm import ScmModel, RepoList |
|
45 | 46 | from rhodecode.model.settings import SettingsModel, VcsSettingsModel |
|
46 | 47 | from rhodecode.model import validation_schema |
|
47 | 48 | from rhodecode.model.validation_schema.schemas import repo_schema |
|
48 | 49 | |
|
49 | 50 | log = logging.getLogger(__name__) |
|
50 | 51 | |
|
51 | 52 | |
|
52 | 53 | @jsonrpc_method() |
|
53 | 54 | def get_repo(request, apiuser, repoid, cache=Optional(True)): |
|
54 | 55 | """ |
|
55 | 56 | Gets an existing repository by its name or repository_id. |
|
56 | 57 | |
|
57 | 58 | The members section so the output returns users groups or users |
|
58 | 59 | associated with that repository. |
|
59 | 60 | |
|
60 | 61 | This command can only be run using an |authtoken| with admin rights, |
|
61 | 62 | or users with at least read rights to the |repo|. |
|
62 | 63 | |
|
63 | 64 | :param apiuser: This is filled automatically from the |authtoken|. |
|
64 | 65 | :type apiuser: AuthUser |
|
65 | 66 | :param repoid: The repository name or repository id. |
|
66 | 67 | :type repoid: str or int |
|
67 | 68 | :param cache: use the cached value for last changeset |
|
68 | 69 | :type: cache: Optional(bool) |
|
69 | 70 | |
|
70 | 71 | Example output: |
|
71 | 72 | |
|
72 | 73 | .. code-block:: bash |
|
73 | 74 | |
|
74 | 75 | { |
|
75 | 76 | "error": null, |
|
76 | 77 | "id": <repo_id>, |
|
77 | 78 | "result": { |
|
78 | 79 | "clone_uri": null, |
|
79 | 80 | "created_on": "timestamp", |
|
80 | 81 | "description": "repo description", |
|
81 | 82 | "enable_downloads": false, |
|
82 | 83 | "enable_locking": false, |
|
83 | 84 | "enable_statistics": false, |
|
84 | 85 | "followers": [ |
|
85 | 86 | { |
|
86 | 87 | "active": true, |
|
87 | 88 | "admin": false, |
|
88 | 89 | "api_key": "****************************************", |
|
89 | 90 | "api_keys": [ |
|
90 | 91 | "****************************************" |
|
91 | 92 | ], |
|
92 | 93 | "email": "user@example.com", |
|
93 | 94 | "emails": [ |
|
94 | 95 | "user@example.com" |
|
95 | 96 | ], |
|
96 | 97 | "extern_name": "rhodecode", |
|
97 | 98 | "extern_type": "rhodecode", |
|
98 | 99 | "firstname": "username", |
|
99 | 100 | "ip_addresses": [], |
|
100 | 101 | "language": null, |
|
101 | 102 | "last_login": "2015-09-16T17:16:35.854", |
|
102 | 103 | "lastname": "surname", |
|
103 | 104 | "user_id": <user_id>, |
|
104 | 105 | "username": "name" |
|
105 | 106 | } |
|
106 | 107 | ], |
|
107 | 108 | "fork_of": "parent-repo", |
|
108 | 109 | "landing_rev": [ |
|
109 | 110 | "rev", |
|
110 | 111 | "tip" |
|
111 | 112 | ], |
|
112 | 113 | "last_changeset": { |
|
113 | 114 | "author": "User <user@example.com>", |
|
114 | 115 | "branch": "default", |
|
115 | 116 | "date": "timestamp", |
|
116 | 117 | "message": "last commit message", |
|
117 | 118 | "parents": [ |
|
118 | 119 | { |
|
119 | 120 | "raw_id": "commit-id" |
|
120 | 121 | } |
|
121 | 122 | ], |
|
122 | 123 | "raw_id": "commit-id", |
|
123 | 124 | "revision": <revision number>, |
|
124 | 125 | "short_id": "short id" |
|
125 | 126 | }, |
|
126 | 127 | "lock_reason": null, |
|
127 | 128 | "locked_by": null, |
|
128 | 129 | "locked_date": null, |
|
129 | 130 | "members": [ |
|
130 | 131 | { |
|
131 | 132 | "name": "super-admin-name", |
|
132 | 133 | "origin": "super-admin", |
|
133 | 134 | "permission": "repository.admin", |
|
134 | 135 | "type": "user" |
|
135 | 136 | }, |
|
136 | 137 | { |
|
137 | 138 | "name": "owner-name", |
|
138 | 139 | "origin": "owner", |
|
139 | 140 | "permission": "repository.admin", |
|
140 | 141 | "type": "user" |
|
141 | 142 | }, |
|
142 | 143 | { |
|
143 | 144 | "name": "user-group-name", |
|
144 | 145 | "origin": "permission", |
|
145 | 146 | "permission": "repository.write", |
|
146 | 147 | "type": "user_group" |
|
147 | 148 | } |
|
148 | 149 | ], |
|
149 | 150 | "owner": "owner-name", |
|
150 | 151 | "permissions": [ |
|
151 | 152 | { |
|
152 | 153 | "name": "super-admin-name", |
|
153 | 154 | "origin": "super-admin", |
|
154 | 155 | "permission": "repository.admin", |
|
155 | 156 | "type": "user" |
|
156 | 157 | }, |
|
157 | 158 | { |
|
158 | 159 | "name": "owner-name", |
|
159 | 160 | "origin": "owner", |
|
160 | 161 | "permission": "repository.admin", |
|
161 | 162 | "type": "user" |
|
162 | 163 | }, |
|
163 | 164 | { |
|
164 | 165 | "name": "user-group-name", |
|
165 | 166 | "origin": "permission", |
|
166 | 167 | "permission": "repository.write", |
|
167 | 168 | "type": "user_group" |
|
168 | 169 | } |
|
169 | 170 | ], |
|
170 | 171 | "private": true, |
|
171 | 172 | "repo_id": 676, |
|
172 | 173 | "repo_name": "user-group/repo-name", |
|
173 | 174 | "repo_type": "hg" |
|
174 | 175 | } |
|
175 | 176 | } |
|
176 | 177 | """ |
|
177 | 178 | |
|
178 | 179 | repo = get_repo_or_error(repoid) |
|
179 | 180 | cache = Optional.extract(cache) |
|
180 | 181 | |
|
181 | 182 | include_secrets = False |
|
182 | 183 | if has_superadmin_permission(apiuser): |
|
183 | 184 | include_secrets = True |
|
184 | 185 | else: |
|
185 | 186 | # check if we have at least read permission for this repo ! |
|
186 | 187 | _perms = ( |
|
187 | 188 | 'repository.admin', 'repository.write', 'repository.read',) |
|
188 | 189 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
189 | 190 | |
|
190 | 191 | permissions = [] |
|
191 | 192 | for _user in repo.permissions(): |
|
192 | 193 | user_data = { |
|
193 | 194 | 'name': _user.username, |
|
194 | 195 | 'permission': _user.permission, |
|
195 | 196 | 'origin': get_origin(_user), |
|
196 | 197 | 'type': "user", |
|
197 | 198 | } |
|
198 | 199 | permissions.append(user_data) |
|
199 | 200 | |
|
200 | 201 | for _user_group in repo.permission_user_groups(): |
|
201 | 202 | user_group_data = { |
|
202 | 203 | 'name': _user_group.users_group_name, |
|
203 | 204 | 'permission': _user_group.permission, |
|
204 | 205 | 'origin': get_origin(_user_group), |
|
205 | 206 | 'type': "user_group", |
|
206 | 207 | } |
|
207 | 208 | permissions.append(user_group_data) |
|
208 | 209 | |
|
209 | 210 | following_users = [ |
|
210 | 211 | user.user.get_api_data(include_secrets=include_secrets) |
|
211 | 212 | for user in repo.followers] |
|
212 | 213 | |
|
213 | 214 | if not cache: |
|
214 | 215 | repo.update_commit_cache() |
|
215 | 216 | data = repo.get_api_data(include_secrets=include_secrets) |
|
216 | 217 | data['members'] = permissions # TODO: this should be deprecated soon |
|
217 | 218 | data['permissions'] = permissions |
|
218 | 219 | data['followers'] = following_users |
|
219 | 220 | return data |
|
220 | 221 | |
|
221 | 222 | |
|
222 | 223 | @jsonrpc_method() |
|
223 | 224 | def get_repos(request, apiuser, root=Optional(None), traverse=Optional(True)): |
|
224 | 225 | """ |
|
225 | 226 | Lists all existing repositories. |
|
226 | 227 | |
|
227 | 228 | This command can only be run using an |authtoken| with admin rights, |
|
228 | 229 | or users with at least read rights to |repos|. |
|
229 | 230 | |
|
230 | 231 | :param apiuser: This is filled automatically from the |authtoken|. |
|
231 | 232 | :type apiuser: AuthUser |
|
232 | 233 | :param root: specify root repository group to fetch repositories. |
|
233 | 234 | filters the returned repositories to be members of given root group. |
|
234 | 235 | :type root: Optional(None) |
|
235 | 236 | :param traverse: traverse given root into subrepositories. With this flag |
|
236 | 237 | set to False, it will only return top-level repositories from `root`. |
|
237 | 238 | if root is empty it will return just top-level repositories. |
|
238 | 239 | :type traverse: Optional(True) |
|
239 | 240 | |
|
240 | 241 | |
|
241 | 242 | Example output: |
|
242 | 243 | |
|
243 | 244 | .. code-block:: bash |
|
244 | 245 | |
|
245 | 246 | id : <id_given_in_input> |
|
246 | 247 | result: [ |
|
247 | 248 | { |
|
248 | 249 | "repo_id" : "<repo_id>", |
|
249 | 250 | "repo_name" : "<reponame>" |
|
250 | 251 | "repo_type" : "<repo_type>", |
|
251 | 252 | "clone_uri" : "<clone_uri>", |
|
252 | 253 | "private": : "<bool>", |
|
253 | 254 | "created_on" : "<datetimecreated>", |
|
254 | 255 | "description" : "<description>", |
|
255 | 256 | "landing_rev": "<landing_rev>", |
|
256 | 257 | "owner": "<repo_owner>", |
|
257 | 258 | "fork_of": "<name_of_fork_parent>", |
|
258 | 259 | "enable_downloads": "<bool>", |
|
259 | 260 | "enable_locking": "<bool>", |
|
260 | 261 | "enable_statistics": "<bool>", |
|
261 | 262 | }, |
|
262 | 263 | ... |
|
263 | 264 | ] |
|
264 | 265 | error: null |
|
265 | 266 | """ |
|
266 | 267 | |
|
267 | 268 | include_secrets = has_superadmin_permission(apiuser) |
|
268 | 269 | _perms = ('repository.read', 'repository.write', 'repository.admin',) |
|
269 | 270 | extras = {'user': apiuser} |
|
270 | 271 | |
|
271 | 272 | root = Optional.extract(root) |
|
272 | 273 | traverse = Optional.extract(traverse, binary=True) |
|
273 | 274 | |
|
274 | 275 | if root: |
|
275 | 276 | # verify parent existance, if it's empty return an error |
|
276 | 277 | parent = RepoGroup.get_by_group_name(root) |
|
277 | 278 | if not parent: |
|
278 | 279 | raise JSONRPCError( |
|
279 | 280 | 'Root repository group `{}` does not exist'.format(root)) |
|
280 | 281 | |
|
281 | 282 | if traverse: |
|
282 | 283 | repos = RepoModel().get_repos_for_root(root=root, traverse=traverse) |
|
283 | 284 | else: |
|
284 | 285 | repos = RepoModel().get_repos_for_root(root=parent) |
|
285 | 286 | else: |
|
286 | 287 | if traverse: |
|
287 | 288 | repos = RepoModel().get_all() |
|
288 | 289 | else: |
|
289 | 290 | # return just top-level |
|
290 | 291 | repos = RepoModel().get_repos_for_root(root=None) |
|
291 | 292 | |
|
292 | 293 | repo_list = RepoList(repos, perm_set=_perms, extra_kwargs=extras) |
|
293 | 294 | return [repo.get_api_data(include_secrets=include_secrets) |
|
294 | 295 | for repo in repo_list] |
|
295 | 296 | |
|
296 | 297 | |
|
297 | 298 | @jsonrpc_method() |
|
298 | 299 | def get_repo_changeset(request, apiuser, repoid, revision, |
|
299 | 300 | details=Optional('basic')): |
|
300 | 301 | """ |
|
301 | 302 | Returns information about a changeset. |
|
302 | 303 | |
|
303 | 304 | Additionally parameters define the amount of details returned by |
|
304 | 305 | this function. |
|
305 | 306 | |
|
306 | 307 | This command can only be run using an |authtoken| with admin rights, |
|
307 | 308 | or users with at least read rights to the |repo|. |
|
308 | 309 | |
|
309 | 310 | :param apiuser: This is filled automatically from the |authtoken|. |
|
310 | 311 | :type apiuser: AuthUser |
|
311 | 312 | :param repoid: The repository name or repository id |
|
312 | 313 | :type repoid: str or int |
|
313 | 314 | :param revision: revision for which listing should be done |
|
314 | 315 | :type revision: str |
|
315 | 316 | :param details: details can be 'basic|extended|full' full gives diff |
|
316 | 317 | info details like the diff itself, and number of changed files etc. |
|
317 | 318 | :type details: Optional(str) |
|
318 | 319 | |
|
319 | 320 | """ |
|
320 | 321 | repo = get_repo_or_error(repoid) |
|
321 | 322 | if not has_superadmin_permission(apiuser): |
|
322 | 323 | _perms = ( |
|
323 | 324 | 'repository.admin', 'repository.write', 'repository.read',) |
|
324 | 325 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
325 | 326 | |
|
326 | 327 | changes_details = Optional.extract(details) |
|
327 | 328 | _changes_details_types = ['basic', 'extended', 'full'] |
|
328 | 329 | if changes_details not in _changes_details_types: |
|
329 | 330 | raise JSONRPCError( |
|
330 | 331 | 'ret_type must be one of %s' % ( |
|
331 | 332 | ','.join(_changes_details_types))) |
|
332 | 333 | |
|
333 | 334 | pre_load = ['author', 'branch', 'date', 'message', 'parents', |
|
334 | 335 | 'status', '_commit', '_file_paths'] |
|
335 | 336 | |
|
336 | 337 | try: |
|
337 | 338 | cs = repo.get_commit(commit_id=revision, pre_load=pre_load) |
|
338 | 339 | except TypeError as e: |
|
339 | 340 | raise JSONRPCError(e.message) |
|
340 | 341 | _cs_json = cs.__json__() |
|
341 | 342 | _cs_json['diff'] = build_commit_data(cs, changes_details) |
|
342 | 343 | if changes_details == 'full': |
|
343 | 344 | _cs_json['refs'] = cs._get_refs() |
|
344 | 345 | return _cs_json |
|
345 | 346 | |
|
346 | 347 | |
|
347 | 348 | @jsonrpc_method() |
|
348 | 349 | def get_repo_changesets(request, apiuser, repoid, start_rev, limit, |
|
349 | 350 | details=Optional('basic')): |
|
350 | 351 | """ |
|
351 | 352 | Returns a set of commits limited by the number starting |
|
352 | 353 | from the `start_rev` option. |
|
353 | 354 | |
|
354 | 355 | Additional parameters define the amount of details returned by this |
|
355 | 356 | function. |
|
356 | 357 | |
|
357 | 358 | This command can only be run using an |authtoken| with admin rights, |
|
358 | 359 | or users with at least read rights to |repos|. |
|
359 | 360 | |
|
360 | 361 | :param apiuser: This is filled automatically from the |authtoken|. |
|
361 | 362 | :type apiuser: AuthUser |
|
362 | 363 | :param repoid: The repository name or repository ID. |
|
363 | 364 | :type repoid: str or int |
|
364 | 365 | :param start_rev: The starting revision from where to get changesets. |
|
365 | 366 | :type start_rev: str |
|
366 | 367 | :param limit: Limit the number of commits to this amount |
|
367 | 368 | :type limit: str or int |
|
368 | 369 | :param details: Set the level of detail returned. Valid option are: |
|
369 | 370 | ``basic``, ``extended`` and ``full``. |
|
370 | 371 | :type details: Optional(str) |
|
371 | 372 | |
|
372 | 373 | .. note:: |
|
373 | 374 | |
|
374 | 375 | Setting the parameter `details` to the value ``full`` is extensive |
|
375 | 376 | and returns details like the diff itself, and the number |
|
376 | 377 | of changed files. |
|
377 | 378 | |
|
378 | 379 | """ |
|
379 | 380 | repo = get_repo_or_error(repoid) |
|
380 | 381 | if not has_superadmin_permission(apiuser): |
|
381 | 382 | _perms = ( |
|
382 | 383 | 'repository.admin', 'repository.write', 'repository.read',) |
|
383 | 384 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
384 | 385 | |
|
385 | 386 | changes_details = Optional.extract(details) |
|
386 | 387 | _changes_details_types = ['basic', 'extended', 'full'] |
|
387 | 388 | if changes_details not in _changes_details_types: |
|
388 | 389 | raise JSONRPCError( |
|
389 | 390 | 'ret_type must be one of %s' % ( |
|
390 | 391 | ','.join(_changes_details_types))) |
|
391 | 392 | |
|
392 | 393 | limit = int(limit) |
|
393 | 394 | pre_load = ['author', 'branch', 'date', 'message', 'parents', |
|
394 | 395 | 'status', '_commit', '_file_paths'] |
|
395 | 396 | |
|
396 | 397 | vcs_repo = repo.scm_instance() |
|
397 | 398 | # SVN needs a special case to distinguish its index and commit id |
|
398 | 399 | if vcs_repo and vcs_repo.alias == 'svn' and (start_rev == '0'): |
|
399 | 400 | start_rev = vcs_repo.commit_ids[0] |
|
400 | 401 | |
|
401 | 402 | try: |
|
402 | 403 | commits = vcs_repo.get_commits( |
|
403 | 404 | start_id=start_rev, pre_load=pre_load) |
|
404 | 405 | except TypeError as e: |
|
405 | 406 | raise JSONRPCError(e.message) |
|
406 | 407 | except Exception: |
|
407 | 408 | log.exception('Fetching of commits failed') |
|
408 | 409 | raise JSONRPCError('Error occurred during commit fetching') |
|
409 | 410 | |
|
410 | 411 | ret = [] |
|
411 | 412 | for cnt, commit in enumerate(commits): |
|
412 | 413 | if cnt >= limit != -1: |
|
413 | 414 | break |
|
414 | 415 | _cs_json = commit.__json__() |
|
415 | 416 | _cs_json['diff'] = build_commit_data(commit, changes_details) |
|
416 | 417 | if changes_details == 'full': |
|
417 | 418 | _cs_json['refs'] = { |
|
418 | 419 | 'branches': [commit.branch], |
|
419 | 420 | 'bookmarks': getattr(commit, 'bookmarks', []), |
|
420 | 421 | 'tags': commit.tags |
|
421 | 422 | } |
|
422 | 423 | ret.append(_cs_json) |
|
423 | 424 | return ret |
|
424 | 425 | |
|
425 | 426 | |
|
426 | 427 | @jsonrpc_method() |
|
427 | 428 | def get_repo_nodes(request, apiuser, repoid, revision, root_path, |
|
428 | 429 | ret_type=Optional('all'), details=Optional('basic'), |
|
429 | 430 | max_file_bytes=Optional(None)): |
|
430 | 431 | """ |
|
431 | 432 | Returns a list of nodes and children in a flat list for a given |
|
432 | 433 | path at given revision. |
|
433 | 434 | |
|
434 | 435 | It's possible to specify ret_type to show only `files` or `dirs`. |
|
435 | 436 | |
|
436 | 437 | This command can only be run using an |authtoken| with admin rights, |
|
437 | 438 | or users with at least read rights to |repos|. |
|
438 | 439 | |
|
439 | 440 | :param apiuser: This is filled automatically from the |authtoken|. |
|
440 | 441 | :type apiuser: AuthUser |
|
441 | 442 | :param repoid: The repository name or repository ID. |
|
442 | 443 | :type repoid: str or int |
|
443 | 444 | :param revision: The revision for which listing should be done. |
|
444 | 445 | :type revision: str |
|
445 | 446 | :param root_path: The path from which to start displaying. |
|
446 | 447 | :type root_path: str |
|
447 | 448 | :param ret_type: Set the return type. Valid options are |
|
448 | 449 | ``all`` (default), ``files`` and ``dirs``. |
|
449 | 450 | :type ret_type: Optional(str) |
|
450 | 451 | :param details: Returns extended information about nodes, such as |
|
451 | 452 | md5, binary, and or content. The valid options are ``basic`` and |
|
452 | 453 | ``full``. |
|
453 | 454 | :type details: Optional(str) |
|
454 | 455 | :param max_file_bytes: Only return file content under this file size bytes |
|
455 | 456 | :type details: Optional(int) |
|
456 | 457 | |
|
457 | 458 | Example output: |
|
458 | 459 | |
|
459 | 460 | .. code-block:: bash |
|
460 | 461 | |
|
461 | 462 | id : <id_given_in_input> |
|
462 | 463 | result: [ |
|
463 | 464 | { |
|
464 | 465 | "name" : "<name>" |
|
465 | 466 | "type" : "<type>", |
|
466 | 467 | "binary": "<true|false>" (only in extended mode) |
|
467 | 468 | "md5" : "<md5 of file content>" (only in extended mode) |
|
468 | 469 | }, |
|
469 | 470 | ... |
|
470 | 471 | ] |
|
471 | 472 | error: null |
|
472 | 473 | """ |
|
473 | 474 | |
|
474 | 475 | repo = get_repo_or_error(repoid) |
|
475 | 476 | if not has_superadmin_permission(apiuser): |
|
476 | 477 | _perms = ( |
|
477 | 478 | 'repository.admin', 'repository.write', 'repository.read',) |
|
478 | 479 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
479 | 480 | |
|
480 | 481 | ret_type = Optional.extract(ret_type) |
|
481 | 482 | details = Optional.extract(details) |
|
482 | 483 | _extended_types = ['basic', 'full'] |
|
483 | 484 | if details not in _extended_types: |
|
484 | 485 | raise JSONRPCError( |
|
485 | 486 | 'ret_type must be one of %s' % (','.join(_extended_types))) |
|
486 | 487 | extended_info = False |
|
487 | 488 | content = False |
|
488 | 489 | if details == 'basic': |
|
489 | 490 | extended_info = True |
|
490 | 491 | |
|
491 | 492 | if details == 'full': |
|
492 | 493 | extended_info = content = True |
|
493 | 494 | |
|
494 | 495 | _map = {} |
|
495 | 496 | try: |
|
496 | 497 | # check if repo is not empty by any chance, skip quicker if it is. |
|
497 | 498 | _scm = repo.scm_instance() |
|
498 | 499 | if _scm.is_empty(): |
|
499 | 500 | return [] |
|
500 | 501 | |
|
501 | 502 | _d, _f = ScmModel().get_nodes( |
|
502 | 503 | repo, revision, root_path, flat=False, |
|
503 | 504 | extended_info=extended_info, content=content, |
|
504 | 505 | max_file_bytes=max_file_bytes) |
|
505 | 506 | _map = { |
|
506 | 507 | 'all': _d + _f, |
|
507 | 508 | 'files': _f, |
|
508 | 509 | 'dirs': _d, |
|
509 | 510 | } |
|
510 | 511 | return _map[ret_type] |
|
511 | 512 | except KeyError: |
|
512 | 513 | raise JSONRPCError( |
|
513 | 514 | 'ret_type must be one of %s' % (','.join(sorted(_map.keys())))) |
|
514 | 515 | except Exception: |
|
515 | 516 | log.exception("Exception occurred while trying to get repo nodes") |
|
516 | 517 | raise JSONRPCError( |
|
517 | 518 | 'failed to get repo: `%s` nodes' % repo.repo_name |
|
518 | 519 | ) |
|
519 | 520 | |
|
520 | 521 | |
|
521 | 522 | @jsonrpc_method() |
|
522 | 523 | def get_repo_refs(request, apiuser, repoid): |
|
523 | 524 | """ |
|
524 | 525 | Returns a dictionary of current references. It returns |
|
525 | 526 | bookmarks, branches, closed_branches, and tags for given repository |
|
526 | 527 | |
|
527 | 528 | It's possible to specify ret_type to show only `files` or `dirs`. |
|
528 | 529 | |
|
529 | 530 | This command can only be run using an |authtoken| with admin rights, |
|
530 | 531 | or users with at least read rights to |repos|. |
|
531 | 532 | |
|
532 | 533 | :param apiuser: This is filled automatically from the |authtoken|. |
|
533 | 534 | :type apiuser: AuthUser |
|
534 | 535 | :param repoid: The repository name or repository ID. |
|
535 | 536 | :type repoid: str or int |
|
536 | 537 | |
|
537 | 538 | Example output: |
|
538 | 539 | |
|
539 | 540 | .. code-block:: bash |
|
540 | 541 | |
|
541 | 542 | id : <id_given_in_input> |
|
542 | 543 | "result": { |
|
543 | 544 | "bookmarks": { |
|
544 | 545 | "dev": "5611d30200f4040ba2ab4f3d64e5b06408a02188", |
|
545 | 546 | "master": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf" |
|
546 | 547 | }, |
|
547 | 548 | "branches": { |
|
548 | 549 | "default": "5611d30200f4040ba2ab4f3d64e5b06408a02188", |
|
549 | 550 | "stable": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf" |
|
550 | 551 | }, |
|
551 | 552 | "branches_closed": {}, |
|
552 | 553 | "tags": { |
|
553 | 554 | "tip": "5611d30200f4040ba2ab4f3d64e5b06408a02188", |
|
554 | 555 | "v4.4.0": "1232313f9e6adac5ce5399c2a891dc1e72b79022", |
|
555 | 556 | "v4.4.1": "cbb9f1d329ae5768379cdec55a62ebdd546c4e27", |
|
556 | 557 | "v4.4.2": "24ffe44a27fcd1c5b6936144e176b9f6dd2f3a17", |
|
557 | 558 | } |
|
558 | 559 | } |
|
559 | 560 | error: null |
|
560 | 561 | """ |
|
561 | 562 | |
|
562 | 563 | repo = get_repo_or_error(repoid) |
|
563 | 564 | if not has_superadmin_permission(apiuser): |
|
564 | 565 | _perms = ('repository.admin', 'repository.write', 'repository.read',) |
|
565 | 566 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
566 | 567 | |
|
567 | 568 | try: |
|
568 | 569 | # check if repo is not empty by any chance, skip quicker if it is. |
|
569 | 570 | vcs_instance = repo.scm_instance() |
|
570 | 571 | refs = vcs_instance.refs() |
|
571 | 572 | return refs |
|
572 | 573 | except Exception: |
|
573 | 574 | log.exception("Exception occurred while trying to get repo refs") |
|
574 | 575 | raise JSONRPCError( |
|
575 | 576 | 'failed to get repo: `%s` references' % repo.repo_name |
|
576 | 577 | ) |
|
577 | 578 | |
|
578 | 579 | |
|
579 | 580 | @jsonrpc_method() |
|
580 | 581 | def create_repo( |
|
581 | 582 | request, apiuser, repo_name, repo_type, |
|
582 | 583 | owner=Optional(OAttr('apiuser')), |
|
583 | 584 | description=Optional(''), |
|
584 | 585 | private=Optional(False), |
|
585 | 586 | clone_uri=Optional(None), |
|
586 | 587 | landing_rev=Optional('rev:tip'), |
|
587 | 588 | enable_statistics=Optional(False), |
|
588 | 589 | enable_locking=Optional(False), |
|
589 | 590 | enable_downloads=Optional(False), |
|
590 | 591 | copy_permissions=Optional(False)): |
|
591 | 592 | """ |
|
592 | 593 | Creates a repository. |
|
593 | 594 | |
|
594 | 595 | * If the repository name contains "/", repository will be created inside |
|
595 | 596 | a repository group or nested repository groups |
|
596 | 597 | |
|
597 | 598 | For example "foo/bar/repo1" will create |repo| called "repo1" inside |
|
598 | 599 | group "foo/bar". You have to have permissions to access and write to |
|
599 | 600 | the last repository group ("bar" in this example) |
|
600 | 601 | |
|
601 | 602 | This command can only be run using an |authtoken| with at least |
|
602 | 603 | permissions to create repositories, or write permissions to |
|
603 | 604 | parent repository groups. |
|
604 | 605 | |
|
605 | 606 | :param apiuser: This is filled automatically from the |authtoken|. |
|
606 | 607 | :type apiuser: AuthUser |
|
607 | 608 | :param repo_name: Set the repository name. |
|
608 | 609 | :type repo_name: str |
|
609 | 610 | :param repo_type: Set the repository type; 'hg','git', or 'svn'. |
|
610 | 611 | :type repo_type: str |
|
611 | 612 | :param owner: user_id or username |
|
612 | 613 | :type owner: Optional(str) |
|
613 | 614 | :param description: Set the repository description. |
|
614 | 615 | :type description: Optional(str) |
|
615 | 616 | :param private: set repository as private |
|
616 | 617 | :type private: bool |
|
617 | 618 | :param clone_uri: set clone_uri |
|
618 | 619 | :type clone_uri: str |
|
619 | 620 | :param landing_rev: <rev_type>:<rev> |
|
620 | 621 | :type landing_rev: str |
|
621 | 622 | :param enable_locking: |
|
622 | 623 | :type enable_locking: bool |
|
623 | 624 | :param enable_downloads: |
|
624 | 625 | :type enable_downloads: bool |
|
625 | 626 | :param enable_statistics: |
|
626 | 627 | :type enable_statistics: bool |
|
627 | 628 | :param copy_permissions: Copy permission from group in which the |
|
628 | 629 | repository is being created. |
|
629 | 630 | :type copy_permissions: bool |
|
630 | 631 | |
|
631 | 632 | |
|
632 | 633 | Example output: |
|
633 | 634 | |
|
634 | 635 | .. code-block:: bash |
|
635 | 636 | |
|
636 | 637 | id : <id_given_in_input> |
|
637 | 638 | result: { |
|
638 | 639 | "msg": "Created new repository `<reponame>`", |
|
639 | 640 | "success": true, |
|
640 | 641 | "task": "<celery task id or None if done sync>" |
|
641 | 642 | } |
|
642 | 643 | error: null |
|
643 | 644 | |
|
644 | 645 | |
|
645 | 646 | Example error output: |
|
646 | 647 | |
|
647 | 648 | .. code-block:: bash |
|
648 | 649 | |
|
649 | 650 | id : <id_given_in_input> |
|
650 | 651 | result : null |
|
651 | 652 | error : { |
|
652 | 653 | 'failed to create repository `<repo_name>`' |
|
653 | 654 | } |
|
654 | 655 | |
|
655 | 656 | """ |
|
656 | 657 | |
|
657 | 658 | owner = validate_set_owner_permissions(apiuser, owner) |
|
658 | 659 | |
|
659 | 660 | description = Optional.extract(description) |
|
660 | 661 | copy_permissions = Optional.extract(copy_permissions) |
|
661 | 662 | clone_uri = Optional.extract(clone_uri) |
|
662 | 663 | landing_commit_ref = Optional.extract(landing_rev) |
|
663 | 664 | |
|
664 | 665 | defs = SettingsModel().get_default_repo_settings(strip_prefix=True) |
|
665 | 666 | if isinstance(private, Optional): |
|
666 | 667 | private = defs.get('repo_private') or Optional.extract(private) |
|
667 | 668 | if isinstance(repo_type, Optional): |
|
668 | 669 | repo_type = defs.get('repo_type') |
|
669 | 670 | if isinstance(enable_statistics, Optional): |
|
670 | 671 | enable_statistics = defs.get('repo_enable_statistics') |
|
671 | 672 | if isinstance(enable_locking, Optional): |
|
672 | 673 | enable_locking = defs.get('repo_enable_locking') |
|
673 | 674 | if isinstance(enable_downloads, Optional): |
|
674 | 675 | enable_downloads = defs.get('repo_enable_downloads') |
|
675 | 676 | |
|
676 | 677 | schema = repo_schema.RepoSchema().bind( |
|
677 | 678 | repo_type_options=rhodecode.BACKENDS.keys(), |
|
678 | 679 | # user caller |
|
679 | 680 | user=apiuser) |
|
680 | 681 | |
|
681 | 682 | try: |
|
682 | 683 | schema_data = schema.deserialize(dict( |
|
683 | 684 | repo_name=repo_name, |
|
684 | 685 | repo_type=repo_type, |
|
685 | 686 | repo_owner=owner.username, |
|
686 | 687 | repo_description=description, |
|
687 | 688 | repo_landing_commit_ref=landing_commit_ref, |
|
688 | 689 | repo_clone_uri=clone_uri, |
|
689 | 690 | repo_private=private, |
|
690 | 691 | repo_copy_permissions=copy_permissions, |
|
691 | 692 | repo_enable_statistics=enable_statistics, |
|
692 | 693 | repo_enable_downloads=enable_downloads, |
|
693 | 694 | repo_enable_locking=enable_locking)) |
|
694 | 695 | except validation_schema.Invalid as err: |
|
695 | 696 | raise JSONRPCValidationError(colander_exc=err) |
|
696 | 697 | |
|
697 | 698 | try: |
|
698 | 699 | data = { |
|
699 | 700 | 'owner': owner, |
|
700 | 701 | 'repo_name': schema_data['repo_group']['repo_name_without_group'], |
|
701 | 702 | 'repo_name_full': schema_data['repo_name'], |
|
702 | 703 | 'repo_group': schema_data['repo_group']['repo_group_id'], |
|
703 | 704 | 'repo_type': schema_data['repo_type'], |
|
704 | 705 | 'repo_description': schema_data['repo_description'], |
|
705 | 706 | 'repo_private': schema_data['repo_private'], |
|
706 | 707 | 'clone_uri': schema_data['repo_clone_uri'], |
|
707 | 708 | 'repo_landing_rev': schema_data['repo_landing_commit_ref'], |
|
708 | 709 | 'enable_statistics': schema_data['repo_enable_statistics'], |
|
709 | 710 | 'enable_locking': schema_data['repo_enable_locking'], |
|
710 | 711 | 'enable_downloads': schema_data['repo_enable_downloads'], |
|
711 | 712 | 'repo_copy_permissions': schema_data['repo_copy_permissions'], |
|
712 | 713 | } |
|
713 | 714 | |
|
714 | 715 | task = RepoModel().create(form_data=data, cur_user=owner) |
|
715 | from celery.result import BaseAsyncResult | |
|
716 | task_id = None | |
|
717 | if isinstance(task, BaseAsyncResult): | |
|
718 | task_id = task.task_id | |
|
716 | task_id = get_task_id(task) | |
|
719 | 717 | # no commit, it's done in RepoModel, or async via celery |
|
720 | 718 | return { |
|
721 | 719 | 'msg': "Created new repository `%s`" % (schema_data['repo_name'],), |
|
722 | 720 | 'success': True, # cannot return the repo data here since fork |
|
723 | 721 | # can be done async |
|
724 | 722 | 'task': task_id |
|
725 | 723 | } |
|
726 | 724 | except Exception: |
|
727 | 725 | log.exception( |
|
728 | 726 | u"Exception while trying to create the repository %s", |
|
729 | 727 | schema_data['repo_name']) |
|
730 | 728 | raise JSONRPCError( |
|
731 | 729 | 'failed to create repository `%s`' % (schema_data['repo_name'],)) |
|
732 | 730 | |
|
733 | 731 | |
|
734 | 732 | @jsonrpc_method() |
|
735 | 733 | def add_field_to_repo(request, apiuser, repoid, key, label=Optional(''), |
|
736 | 734 | description=Optional('')): |
|
737 | 735 | """ |
|
738 | 736 | Adds an extra field to a repository. |
|
739 | 737 | |
|
740 | 738 | This command can only be run using an |authtoken| with at least |
|
741 | 739 | write permissions to the |repo|. |
|
742 | 740 | |
|
743 | 741 | :param apiuser: This is filled automatically from the |authtoken|. |
|
744 | 742 | :type apiuser: AuthUser |
|
745 | 743 | :param repoid: Set the repository name or repository id. |
|
746 | 744 | :type repoid: str or int |
|
747 | 745 | :param key: Create a unique field key for this repository. |
|
748 | 746 | :type key: str |
|
749 | 747 | :param label: |
|
750 | 748 | :type label: Optional(str) |
|
751 | 749 | :param description: |
|
752 | 750 | :type description: Optional(str) |
|
753 | 751 | """ |
|
754 | 752 | repo = get_repo_or_error(repoid) |
|
755 | 753 | if not has_superadmin_permission(apiuser): |
|
756 | 754 | _perms = ('repository.admin',) |
|
757 | 755 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
758 | 756 | |
|
759 | 757 | label = Optional.extract(label) or key |
|
760 | 758 | description = Optional.extract(description) |
|
761 | 759 | |
|
762 | 760 | field = RepositoryField.get_by_key_name(key, repo) |
|
763 | 761 | if field: |
|
764 | 762 | raise JSONRPCError('Field with key ' |
|
765 | 763 | '`%s` exists for repo `%s`' % (key, repoid)) |
|
766 | 764 | |
|
767 | 765 | try: |
|
768 | 766 | RepoModel().add_repo_field(repo, key, field_label=label, |
|
769 | 767 | field_desc=description) |
|
770 | 768 | Session().commit() |
|
771 | 769 | return { |
|
772 | 770 | 'msg': "Added new repository field `%s`" % (key,), |
|
773 | 771 | 'success': True, |
|
774 | 772 | } |
|
775 | 773 | except Exception: |
|
776 | 774 | log.exception("Exception occurred while trying to add field to repo") |
|
777 | 775 | raise JSONRPCError( |
|
778 | 776 | 'failed to create new field for repository `%s`' % (repoid,)) |
|
779 | 777 | |
|
780 | 778 | |
|
781 | 779 | @jsonrpc_method() |
|
782 | 780 | def remove_field_from_repo(request, apiuser, repoid, key): |
|
783 | 781 | """ |
|
784 | 782 | Removes an extra field from a repository. |
|
785 | 783 | |
|
786 | 784 | This command can only be run using an |authtoken| with at least |
|
787 | 785 | write permissions to the |repo|. |
|
788 | 786 | |
|
789 | 787 | :param apiuser: This is filled automatically from the |authtoken|. |
|
790 | 788 | :type apiuser: AuthUser |
|
791 | 789 | :param repoid: Set the repository name or repository ID. |
|
792 | 790 | :type repoid: str or int |
|
793 | 791 | :param key: Set the unique field key for this repository. |
|
794 | 792 | :type key: str |
|
795 | 793 | """ |
|
796 | 794 | |
|
797 | 795 | repo = get_repo_or_error(repoid) |
|
798 | 796 | if not has_superadmin_permission(apiuser): |
|
799 | 797 | _perms = ('repository.admin',) |
|
800 | 798 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
801 | 799 | |
|
802 | 800 | field = RepositoryField.get_by_key_name(key, repo) |
|
803 | 801 | if not field: |
|
804 | 802 | raise JSONRPCError('Field with key `%s` does not ' |
|
805 | 803 | 'exists for repo `%s`' % (key, repoid)) |
|
806 | 804 | |
|
807 | 805 | try: |
|
808 | 806 | RepoModel().delete_repo_field(repo, field_key=key) |
|
809 | 807 | Session().commit() |
|
810 | 808 | return { |
|
811 | 809 | 'msg': "Deleted repository field `%s`" % (key,), |
|
812 | 810 | 'success': True, |
|
813 | 811 | } |
|
814 | 812 | except Exception: |
|
815 | 813 | log.exception( |
|
816 | 814 | "Exception occurred while trying to delete field from repo") |
|
817 | 815 | raise JSONRPCError( |
|
818 | 816 | 'failed to delete field for repository `%s`' % (repoid,)) |
|
819 | 817 | |
|
820 | 818 | |
|
821 | 819 | @jsonrpc_method() |
|
822 | 820 | def update_repo( |
|
823 | 821 | request, apiuser, repoid, repo_name=Optional(None), |
|
824 | 822 | owner=Optional(OAttr('apiuser')), description=Optional(''), |
|
825 | 823 | private=Optional(False), clone_uri=Optional(None), |
|
826 | 824 | landing_rev=Optional('rev:tip'), fork_of=Optional(None), |
|
827 | 825 | enable_statistics=Optional(False), |
|
828 | 826 | enable_locking=Optional(False), |
|
829 | 827 | enable_downloads=Optional(False), fields=Optional('')): |
|
830 | 828 | """ |
|
831 | 829 | Updates a repository with the given information. |
|
832 | 830 | |
|
833 | 831 | This command can only be run using an |authtoken| with at least |
|
834 | 832 | admin permissions to the |repo|. |
|
835 | 833 | |
|
836 | 834 | * If the repository name contains "/", repository will be updated |
|
837 | 835 | accordingly with a repository group or nested repository groups |
|
838 | 836 | |
|
839 | 837 | For example repoid=repo-test name="foo/bar/repo-test" will update |repo| |
|
840 | 838 | called "repo-test" and place it inside group "foo/bar". |
|
841 | 839 | You have to have permissions to access and write to the last repository |
|
842 | 840 | group ("bar" in this example) |
|
843 | 841 | |
|
844 | 842 | :param apiuser: This is filled automatically from the |authtoken|. |
|
845 | 843 | :type apiuser: AuthUser |
|
846 | 844 | :param repoid: repository name or repository ID. |
|
847 | 845 | :type repoid: str or int |
|
848 | 846 | :param repo_name: Update the |repo| name, including the |
|
849 | 847 | repository group it's in. |
|
850 | 848 | :type repo_name: str |
|
851 | 849 | :param owner: Set the |repo| owner. |
|
852 | 850 | :type owner: str |
|
853 | 851 | :param fork_of: Set the |repo| as fork of another |repo|. |
|
854 | 852 | :type fork_of: str |
|
855 | 853 | :param description: Update the |repo| description. |
|
856 | 854 | :type description: str |
|
857 | 855 | :param private: Set the |repo| as private. (True | False) |
|
858 | 856 | :type private: bool |
|
859 | 857 | :param clone_uri: Update the |repo| clone URI. |
|
860 | 858 | :type clone_uri: str |
|
861 | 859 | :param landing_rev: Set the |repo| landing revision. Default is ``rev:tip``. |
|
862 | 860 | :type landing_rev: str |
|
863 | 861 | :param enable_statistics: Enable statistics on the |repo|, (True | False). |
|
864 | 862 | :type enable_statistics: bool |
|
865 | 863 | :param enable_locking: Enable |repo| locking. |
|
866 | 864 | :type enable_locking: bool |
|
867 | 865 | :param enable_downloads: Enable downloads from the |repo|, (True | False). |
|
868 | 866 | :type enable_downloads: bool |
|
869 | 867 | :param fields: Add extra fields to the |repo|. Use the following |
|
870 | 868 | example format: ``field_key=field_val,field_key2=fieldval2``. |
|
871 | 869 | Escape ', ' with \, |
|
872 | 870 | :type fields: str |
|
873 | 871 | """ |
|
874 | 872 | |
|
875 | 873 | repo = get_repo_or_error(repoid) |
|
876 | 874 | |
|
877 | 875 | include_secrets = False |
|
878 | 876 | if not has_superadmin_permission(apiuser): |
|
879 | 877 | validate_repo_permissions(apiuser, repoid, repo, ('repository.admin',)) |
|
880 | 878 | else: |
|
881 | 879 | include_secrets = True |
|
882 | 880 | |
|
883 | 881 | updates = dict( |
|
884 | 882 | repo_name=repo_name |
|
885 | 883 | if not isinstance(repo_name, Optional) else repo.repo_name, |
|
886 | 884 | |
|
887 | 885 | fork_id=fork_of |
|
888 | 886 | if not isinstance(fork_of, Optional) else repo.fork.repo_name if repo.fork else None, |
|
889 | 887 | |
|
890 | 888 | user=owner |
|
891 | 889 | if not isinstance(owner, Optional) else repo.user.username, |
|
892 | 890 | |
|
893 | 891 | repo_description=description |
|
894 | 892 | if not isinstance(description, Optional) else repo.description, |
|
895 | 893 | |
|
896 | 894 | repo_private=private |
|
897 | 895 | if not isinstance(private, Optional) else repo.private, |
|
898 | 896 | |
|
899 | 897 | clone_uri=clone_uri |
|
900 | 898 | if not isinstance(clone_uri, Optional) else repo.clone_uri, |
|
901 | 899 | |
|
902 | 900 | repo_landing_rev=landing_rev |
|
903 | 901 | if not isinstance(landing_rev, Optional) else repo._landing_revision, |
|
904 | 902 | |
|
905 | 903 | repo_enable_statistics=enable_statistics |
|
906 | 904 | if not isinstance(enable_statistics, Optional) else repo.enable_statistics, |
|
907 | 905 | |
|
908 | 906 | repo_enable_locking=enable_locking |
|
909 | 907 | if not isinstance(enable_locking, Optional) else repo.enable_locking, |
|
910 | 908 | |
|
911 | 909 | repo_enable_downloads=enable_downloads |
|
912 | 910 | if not isinstance(enable_downloads, Optional) else repo.enable_downloads) |
|
913 | 911 | |
|
914 | 912 | ref_choices, _labels = ScmModel().get_repo_landing_revs( |
|
915 | 913 | request.translate, repo=repo) |
|
916 | 914 | |
|
917 | 915 | old_values = repo.get_api_data() |
|
918 | 916 | schema = repo_schema.RepoSchema().bind( |
|
919 | 917 | repo_type_options=rhodecode.BACKENDS.keys(), |
|
920 | 918 | repo_ref_options=ref_choices, |
|
921 | 919 | # user caller |
|
922 | 920 | user=apiuser, |
|
923 | 921 | old_values=old_values) |
|
924 | 922 | try: |
|
925 | 923 | schema_data = schema.deserialize(dict( |
|
926 | 924 | # we save old value, users cannot change type |
|
927 | 925 | repo_type=repo.repo_type, |
|
928 | 926 | |
|
929 | 927 | repo_name=updates['repo_name'], |
|
930 | 928 | repo_owner=updates['user'], |
|
931 | 929 | repo_description=updates['repo_description'], |
|
932 | 930 | repo_clone_uri=updates['clone_uri'], |
|
933 | 931 | repo_fork_of=updates['fork_id'], |
|
934 | 932 | repo_private=updates['repo_private'], |
|
935 | 933 | repo_landing_commit_ref=updates['repo_landing_rev'], |
|
936 | 934 | repo_enable_statistics=updates['repo_enable_statistics'], |
|
937 | 935 | repo_enable_downloads=updates['repo_enable_downloads'], |
|
938 | 936 | repo_enable_locking=updates['repo_enable_locking'])) |
|
939 | 937 | except validation_schema.Invalid as err: |
|
940 | 938 | raise JSONRPCValidationError(colander_exc=err) |
|
941 | 939 | |
|
942 | 940 | # save validated data back into the updates dict |
|
943 | 941 | validated_updates = dict( |
|
944 | 942 | repo_name=schema_data['repo_group']['repo_name_without_group'], |
|
945 | 943 | repo_group=schema_data['repo_group']['repo_group_id'], |
|
946 | 944 | |
|
947 | 945 | user=schema_data['repo_owner'], |
|
948 | 946 | repo_description=schema_data['repo_description'], |
|
949 | 947 | repo_private=schema_data['repo_private'], |
|
950 | 948 | clone_uri=schema_data['repo_clone_uri'], |
|
951 | 949 | repo_landing_rev=schema_data['repo_landing_commit_ref'], |
|
952 | 950 | repo_enable_statistics=schema_data['repo_enable_statistics'], |
|
953 | 951 | repo_enable_locking=schema_data['repo_enable_locking'], |
|
954 | 952 | repo_enable_downloads=schema_data['repo_enable_downloads'], |
|
955 | 953 | ) |
|
956 | 954 | |
|
957 | 955 | if schema_data['repo_fork_of']: |
|
958 | 956 | fork_repo = get_repo_or_error(schema_data['repo_fork_of']) |
|
959 | 957 | validated_updates['fork_id'] = fork_repo.repo_id |
|
960 | 958 | |
|
961 | 959 | # extra fields |
|
962 | 960 | fields = parse_args(Optional.extract(fields), key_prefix='ex_') |
|
963 | 961 | if fields: |
|
964 | 962 | validated_updates.update(fields) |
|
965 | 963 | |
|
966 | 964 | try: |
|
967 | 965 | RepoModel().update(repo, **validated_updates) |
|
968 | 966 | audit_logger.store_api( |
|
969 | 967 | 'repo.edit', action_data={'old_data': old_values}, |
|
970 | 968 | user=apiuser, repo=repo) |
|
971 | 969 | Session().commit() |
|
972 | 970 | return { |
|
973 | 971 | 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo.repo_name), |
|
974 | 972 | 'repository': repo.get_api_data(include_secrets=include_secrets) |
|
975 | 973 | } |
|
976 | 974 | except Exception: |
|
977 | 975 | log.exception( |
|
978 | 976 | u"Exception while trying to update the repository %s", |
|
979 | 977 | repoid) |
|
980 | 978 | raise JSONRPCError('failed to update repo `%s`' % repoid) |
|
981 | 979 | |
|
982 | 980 | |
|
983 | 981 | @jsonrpc_method() |
|
984 | 982 | def fork_repo(request, apiuser, repoid, fork_name, |
|
985 | 983 | owner=Optional(OAttr('apiuser')), |
|
986 | 984 | description=Optional(''), |
|
987 | 985 | private=Optional(False), |
|
988 | 986 | clone_uri=Optional(None), |
|
989 | 987 | landing_rev=Optional('rev:tip'), |
|
990 | 988 | copy_permissions=Optional(False)): |
|
991 | 989 | """ |
|
992 | 990 | Creates a fork of the specified |repo|. |
|
993 | 991 | |
|
994 | 992 | * If the fork_name contains "/", fork will be created inside |
|
995 | 993 | a repository group or nested repository groups |
|
996 | 994 | |
|
997 | 995 | For example "foo/bar/fork-repo" will create fork called "fork-repo" |
|
998 | 996 | inside group "foo/bar". You have to have permissions to access and |
|
999 | 997 | write to the last repository group ("bar" in this example) |
|
1000 | 998 | |
|
1001 | 999 | This command can only be run using an |authtoken| with minimum |
|
1002 | 1000 | read permissions of the forked repo, create fork permissions for an user. |
|
1003 | 1001 | |
|
1004 | 1002 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1005 | 1003 | :type apiuser: AuthUser |
|
1006 | 1004 | :param repoid: Set repository name or repository ID. |
|
1007 | 1005 | :type repoid: str or int |
|
1008 | 1006 | :param fork_name: Set the fork name, including it's repository group membership. |
|
1009 | 1007 | :type fork_name: str |
|
1010 | 1008 | :param owner: Set the fork owner. |
|
1011 | 1009 | :type owner: str |
|
1012 | 1010 | :param description: Set the fork description. |
|
1013 | 1011 | :type description: str |
|
1014 | 1012 | :param copy_permissions: Copy permissions from parent |repo|. The |
|
1015 | 1013 | default is False. |
|
1016 | 1014 | :type copy_permissions: bool |
|
1017 | 1015 | :param private: Make the fork private. The default is False. |
|
1018 | 1016 | :type private: bool |
|
1019 | 1017 | :param landing_rev: Set the landing revision. The default is tip. |
|
1020 | 1018 | |
|
1021 | 1019 | Example output: |
|
1022 | 1020 | |
|
1023 | 1021 | .. code-block:: bash |
|
1024 | 1022 | |
|
1025 | 1023 | id : <id_for_response> |
|
1026 | 1024 | api_key : "<api_key>" |
|
1027 | 1025 | args: { |
|
1028 | 1026 | "repoid" : "<reponame or repo_id>", |
|
1029 | 1027 | "fork_name": "<forkname>", |
|
1030 | 1028 | "owner": "<username or user_id = Optional(=apiuser)>", |
|
1031 | 1029 | "description": "<description>", |
|
1032 | 1030 | "copy_permissions": "<bool>", |
|
1033 | 1031 | "private": "<bool>", |
|
1034 | 1032 | "landing_rev": "<landing_rev>" |
|
1035 | 1033 | } |
|
1036 | 1034 | |
|
1037 | 1035 | Example error output: |
|
1038 | 1036 | |
|
1039 | 1037 | .. code-block:: bash |
|
1040 | 1038 | |
|
1041 | 1039 | id : <id_given_in_input> |
|
1042 | 1040 | result: { |
|
1043 | 1041 | "msg": "Created fork of `<reponame>` as `<forkname>`", |
|
1044 | 1042 | "success": true, |
|
1045 | 1043 | "task": "<celery task id or None if done sync>" |
|
1046 | 1044 | } |
|
1047 | 1045 | error: null |
|
1048 | 1046 | |
|
1049 | 1047 | """ |
|
1050 | 1048 | |
|
1051 | 1049 | repo = get_repo_or_error(repoid) |
|
1052 | 1050 | repo_name = repo.repo_name |
|
1053 | 1051 | |
|
1054 | 1052 | if not has_superadmin_permission(apiuser): |
|
1055 | 1053 | # check if we have at least read permission for |
|
1056 | 1054 | # this repo that we fork ! |
|
1057 | 1055 | _perms = ( |
|
1058 | 1056 | 'repository.admin', 'repository.write', 'repository.read') |
|
1059 | 1057 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1060 | 1058 | |
|
1061 | 1059 | # check if the regular user has at least fork permissions as well |
|
1062 | 1060 | if not HasPermissionAnyApi('hg.fork.repository')(user=apiuser): |
|
1063 | 1061 | raise JSONRPCForbidden() |
|
1064 | 1062 | |
|
1065 | 1063 | # check if user can set owner parameter |
|
1066 | 1064 | owner = validate_set_owner_permissions(apiuser, owner) |
|
1067 | 1065 | |
|
1068 | 1066 | description = Optional.extract(description) |
|
1069 | 1067 | copy_permissions = Optional.extract(copy_permissions) |
|
1070 | 1068 | clone_uri = Optional.extract(clone_uri) |
|
1071 | 1069 | landing_commit_ref = Optional.extract(landing_rev) |
|
1072 | 1070 | private = Optional.extract(private) |
|
1073 | 1071 | |
|
1074 | 1072 | schema = repo_schema.RepoSchema().bind( |
|
1075 | 1073 | repo_type_options=rhodecode.BACKENDS.keys(), |
|
1076 | 1074 | # user caller |
|
1077 | 1075 | user=apiuser) |
|
1078 | 1076 | |
|
1079 | 1077 | try: |
|
1080 | 1078 | schema_data = schema.deserialize(dict( |
|
1081 | 1079 | repo_name=fork_name, |
|
1082 | 1080 | repo_type=repo.repo_type, |
|
1083 | 1081 | repo_owner=owner.username, |
|
1084 | 1082 | repo_description=description, |
|
1085 | 1083 | repo_landing_commit_ref=landing_commit_ref, |
|
1086 | 1084 | repo_clone_uri=clone_uri, |
|
1087 | 1085 | repo_private=private, |
|
1088 | 1086 | repo_copy_permissions=copy_permissions)) |
|
1089 | 1087 | except validation_schema.Invalid as err: |
|
1090 | 1088 | raise JSONRPCValidationError(colander_exc=err) |
|
1091 | 1089 | |
|
1092 | 1090 | try: |
|
1093 | 1091 | data = { |
|
1094 | 1092 | 'fork_parent_id': repo.repo_id, |
|
1095 | 1093 | |
|
1096 | 1094 | 'repo_name': schema_data['repo_group']['repo_name_without_group'], |
|
1097 | 1095 | 'repo_name_full': schema_data['repo_name'], |
|
1098 | 1096 | 'repo_group': schema_data['repo_group']['repo_group_id'], |
|
1099 | 1097 | 'repo_type': schema_data['repo_type'], |
|
1100 | 1098 | 'description': schema_data['repo_description'], |
|
1101 | 1099 | 'private': schema_data['repo_private'], |
|
1102 | 1100 | 'copy_permissions': schema_data['repo_copy_permissions'], |
|
1103 | 1101 | 'landing_rev': schema_data['repo_landing_commit_ref'], |
|
1104 | 1102 | } |
|
1105 | 1103 | |
|
1106 | 1104 | task = RepoModel().create_fork(data, cur_user=owner) |
|
1107 | 1105 | # no commit, it's done in RepoModel, or async via celery |
|
1108 | from celery.result import BaseAsyncResult | |
|
1109 | task_id = None | |
|
1110 | if isinstance(task, BaseAsyncResult): | |
|
1111 | task_id = task.task_id | |
|
1106 | task_id = get_task_id(task) | |
|
1107 | ||
|
1112 | 1108 | return { |
|
1113 | 1109 | 'msg': 'Created fork of `%s` as `%s`' % ( |
|
1114 | 1110 | repo.repo_name, schema_data['repo_name']), |
|
1115 | 1111 | 'success': True, # cannot return the repo data here since fork |
|
1116 | 1112 | # can be done async |
|
1117 | 1113 | 'task': task_id |
|
1118 | 1114 | } |
|
1119 | 1115 | except Exception: |
|
1120 | 1116 | log.exception( |
|
1121 | 1117 | u"Exception while trying to create fork %s", |
|
1122 | 1118 | schema_data['repo_name']) |
|
1123 | 1119 | raise JSONRPCError( |
|
1124 | 1120 | 'failed to fork repository `%s` as `%s`' % ( |
|
1125 | 1121 | repo_name, schema_data['repo_name'])) |
|
1126 | 1122 | |
|
1127 | 1123 | |
|
1128 | 1124 | @jsonrpc_method() |
|
1129 | 1125 | def delete_repo(request, apiuser, repoid, forks=Optional('')): |
|
1130 | 1126 | """ |
|
1131 | 1127 | Deletes a repository. |
|
1132 | 1128 | |
|
1133 | 1129 | * When the `forks` parameter is set it's possible to detach or delete |
|
1134 | 1130 | forks of deleted repository. |
|
1135 | 1131 | |
|
1136 | 1132 | This command can only be run using an |authtoken| with admin |
|
1137 | 1133 | permissions on the |repo|. |
|
1138 | 1134 | |
|
1139 | 1135 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1140 | 1136 | :type apiuser: AuthUser |
|
1141 | 1137 | :param repoid: Set the repository name or repository ID. |
|
1142 | 1138 | :type repoid: str or int |
|
1143 | 1139 | :param forks: Set to `detach` or `delete` forks from the |repo|. |
|
1144 | 1140 | :type forks: Optional(str) |
|
1145 | 1141 | |
|
1146 | 1142 | Example error output: |
|
1147 | 1143 | |
|
1148 | 1144 | .. code-block:: bash |
|
1149 | 1145 | |
|
1150 | 1146 | id : <id_given_in_input> |
|
1151 | 1147 | result: { |
|
1152 | 1148 | "msg": "Deleted repository `<reponame>`", |
|
1153 | 1149 | "success": true |
|
1154 | 1150 | } |
|
1155 | 1151 | error: null |
|
1156 | 1152 | """ |
|
1157 | 1153 | |
|
1158 | 1154 | repo = get_repo_or_error(repoid) |
|
1159 | 1155 | repo_name = repo.repo_name |
|
1160 | 1156 | if not has_superadmin_permission(apiuser): |
|
1161 | 1157 | _perms = ('repository.admin',) |
|
1162 | 1158 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1163 | 1159 | |
|
1164 | 1160 | try: |
|
1165 | 1161 | handle_forks = Optional.extract(forks) |
|
1166 | 1162 | _forks_msg = '' |
|
1167 | 1163 | _forks = [f for f in repo.forks] |
|
1168 | 1164 | if handle_forks == 'detach': |
|
1169 | 1165 | _forks_msg = ' ' + 'Detached %s forks' % len(_forks) |
|
1170 | 1166 | elif handle_forks == 'delete': |
|
1171 | 1167 | _forks_msg = ' ' + 'Deleted %s forks' % len(_forks) |
|
1172 | 1168 | elif _forks: |
|
1173 | 1169 | raise JSONRPCError( |
|
1174 | 1170 | 'Cannot delete `%s` it still contains attached forks' % |
|
1175 | 1171 | (repo.repo_name,) |
|
1176 | 1172 | ) |
|
1177 | 1173 | old_data = repo.get_api_data() |
|
1178 | 1174 | RepoModel().delete(repo, forks=forks) |
|
1179 | 1175 | |
|
1180 | 1176 | repo = audit_logger.RepoWrap(repo_id=None, |
|
1181 | 1177 | repo_name=repo.repo_name) |
|
1182 | 1178 | |
|
1183 | 1179 | audit_logger.store_api( |
|
1184 | 1180 | 'repo.delete', action_data={'old_data': old_data}, |
|
1185 | 1181 | user=apiuser, repo=repo) |
|
1186 | 1182 | |
|
1187 | 1183 | ScmModel().mark_for_invalidation(repo_name, delete=True) |
|
1188 | 1184 | Session().commit() |
|
1189 | 1185 | return { |
|
1190 | 1186 | 'msg': 'Deleted repository `%s`%s' % (repo_name, _forks_msg), |
|
1191 | 1187 | 'success': True |
|
1192 | 1188 | } |
|
1193 | 1189 | except Exception: |
|
1194 | 1190 | log.exception("Exception occurred while trying to delete repo") |
|
1195 | 1191 | raise JSONRPCError( |
|
1196 | 1192 | 'failed to delete repository `%s`' % (repo_name,) |
|
1197 | 1193 | ) |
|
1198 | 1194 | |
|
1199 | 1195 | |
|
1200 | 1196 | #TODO: marcink, change name ? |
|
1201 | 1197 | @jsonrpc_method() |
|
1202 | 1198 | def invalidate_cache(request, apiuser, repoid, delete_keys=Optional(False)): |
|
1203 | 1199 | """ |
|
1204 | 1200 | Invalidates the cache for the specified repository. |
|
1205 | 1201 | |
|
1206 | 1202 | This command can only be run using an |authtoken| with admin rights to |
|
1207 | 1203 | the specified repository. |
|
1208 | 1204 | |
|
1209 | 1205 | This command takes the following options: |
|
1210 | 1206 | |
|
1211 | 1207 | :param apiuser: This is filled automatically from |authtoken|. |
|
1212 | 1208 | :type apiuser: AuthUser |
|
1213 | 1209 | :param repoid: Sets the repository name or repository ID. |
|
1214 | 1210 | :type repoid: str or int |
|
1215 | 1211 | :param delete_keys: This deletes the invalidated keys instead of |
|
1216 | 1212 | just flagging them. |
|
1217 | 1213 | :type delete_keys: Optional(``True`` | ``False``) |
|
1218 | 1214 | |
|
1219 | 1215 | Example output: |
|
1220 | 1216 | |
|
1221 | 1217 | .. code-block:: bash |
|
1222 | 1218 | |
|
1223 | 1219 | id : <id_given_in_input> |
|
1224 | 1220 | result : { |
|
1225 | 1221 | 'msg': Cache for repository `<repository name>` was invalidated, |
|
1226 | 1222 | 'repository': <repository name> |
|
1227 | 1223 | } |
|
1228 | 1224 | error : null |
|
1229 | 1225 | |
|
1230 | 1226 | Example error output: |
|
1231 | 1227 | |
|
1232 | 1228 | .. code-block:: bash |
|
1233 | 1229 | |
|
1234 | 1230 | id : <id_given_in_input> |
|
1235 | 1231 | result : null |
|
1236 | 1232 | error : { |
|
1237 | 1233 | 'Error occurred during cache invalidation action' |
|
1238 | 1234 | } |
|
1239 | 1235 | |
|
1240 | 1236 | """ |
|
1241 | 1237 | |
|
1242 | 1238 | repo = get_repo_or_error(repoid) |
|
1243 | 1239 | if not has_superadmin_permission(apiuser): |
|
1244 | 1240 | _perms = ('repository.admin', 'repository.write',) |
|
1245 | 1241 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1246 | 1242 | |
|
1247 | 1243 | delete = Optional.extract(delete_keys) |
|
1248 | 1244 | try: |
|
1249 | 1245 | ScmModel().mark_for_invalidation(repo.repo_name, delete=delete) |
|
1250 | 1246 | return { |
|
1251 | 1247 | 'msg': 'Cache for repository `%s` was invalidated' % (repoid,), |
|
1252 | 1248 | 'repository': repo.repo_name |
|
1253 | 1249 | } |
|
1254 | 1250 | except Exception: |
|
1255 | 1251 | log.exception( |
|
1256 | 1252 | "Exception occurred while trying to invalidate repo cache") |
|
1257 | 1253 | raise JSONRPCError( |
|
1258 | 1254 | 'Error occurred during cache invalidation action' |
|
1259 | 1255 | ) |
|
1260 | 1256 | |
|
1261 | 1257 | |
|
1262 | 1258 | #TODO: marcink, change name ? |
|
1263 | 1259 | @jsonrpc_method() |
|
1264 | 1260 | def lock(request, apiuser, repoid, locked=Optional(None), |
|
1265 | 1261 | userid=Optional(OAttr('apiuser'))): |
|
1266 | 1262 | """ |
|
1267 | 1263 | Sets the lock state of the specified |repo| by the given user. |
|
1268 | 1264 | From more information, see :ref:`repo-locking`. |
|
1269 | 1265 | |
|
1270 | 1266 | * If the ``userid`` option is not set, the repository is locked to the |
|
1271 | 1267 | user who called the method. |
|
1272 | 1268 | * If the ``locked`` parameter is not set, the current lock state of the |
|
1273 | 1269 | repository is displayed. |
|
1274 | 1270 | |
|
1275 | 1271 | This command can only be run using an |authtoken| with admin rights to |
|
1276 | 1272 | the specified repository. |
|
1277 | 1273 | |
|
1278 | 1274 | This command takes the following options: |
|
1279 | 1275 | |
|
1280 | 1276 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1281 | 1277 | :type apiuser: AuthUser |
|
1282 | 1278 | :param repoid: Sets the repository name or repository ID. |
|
1283 | 1279 | :type repoid: str or int |
|
1284 | 1280 | :param locked: Sets the lock state. |
|
1285 | 1281 | :type locked: Optional(``True`` | ``False``) |
|
1286 | 1282 | :param userid: Set the repository lock to this user. |
|
1287 | 1283 | :type userid: Optional(str or int) |
|
1288 | 1284 | |
|
1289 | 1285 | Example error output: |
|
1290 | 1286 | |
|
1291 | 1287 | .. code-block:: bash |
|
1292 | 1288 | |
|
1293 | 1289 | id : <id_given_in_input> |
|
1294 | 1290 | result : { |
|
1295 | 1291 | 'repo': '<reponame>', |
|
1296 | 1292 | 'locked': <bool: lock state>, |
|
1297 | 1293 | 'locked_since': <int: lock timestamp>, |
|
1298 | 1294 | 'locked_by': <username of person who made the lock>, |
|
1299 | 1295 | 'lock_reason': <str: reason for locking>, |
|
1300 | 1296 | 'lock_state_changed': <bool: True if lock state has been changed in this request>, |
|
1301 | 1297 | 'msg': 'Repo `<reponame>` locked by `<username>` on <timestamp>.' |
|
1302 | 1298 | or |
|
1303 | 1299 | 'msg': 'Repo `<repository name>` not locked.' |
|
1304 | 1300 | or |
|
1305 | 1301 | 'msg': 'User `<user name>` set lock state for repo `<repository name>` to `<new lock state>`' |
|
1306 | 1302 | } |
|
1307 | 1303 | error : null |
|
1308 | 1304 | |
|
1309 | 1305 | Example error output: |
|
1310 | 1306 | |
|
1311 | 1307 | .. code-block:: bash |
|
1312 | 1308 | |
|
1313 | 1309 | id : <id_given_in_input> |
|
1314 | 1310 | result : null |
|
1315 | 1311 | error : { |
|
1316 | 1312 | 'Error occurred locking repository `<reponame>`' |
|
1317 | 1313 | } |
|
1318 | 1314 | """ |
|
1319 | 1315 | |
|
1320 | 1316 | repo = get_repo_or_error(repoid) |
|
1321 | 1317 | if not has_superadmin_permission(apiuser): |
|
1322 | 1318 | # check if we have at least write permission for this repo ! |
|
1323 | 1319 | _perms = ('repository.admin', 'repository.write',) |
|
1324 | 1320 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1325 | 1321 | |
|
1326 | 1322 | # make sure normal user does not pass someone else userid, |
|
1327 | 1323 | # he is not allowed to do that |
|
1328 | 1324 | if not isinstance(userid, Optional) and userid != apiuser.user_id: |
|
1329 | 1325 | raise JSONRPCError('userid is not the same as your user') |
|
1330 | 1326 | |
|
1331 | 1327 | if isinstance(userid, Optional): |
|
1332 | 1328 | userid = apiuser.user_id |
|
1333 | 1329 | |
|
1334 | 1330 | user = get_user_or_error(userid) |
|
1335 | 1331 | |
|
1336 | 1332 | if isinstance(locked, Optional): |
|
1337 | 1333 | lockobj = repo.locked |
|
1338 | 1334 | |
|
1339 | 1335 | if lockobj[0] is None: |
|
1340 | 1336 | _d = { |
|
1341 | 1337 | 'repo': repo.repo_name, |
|
1342 | 1338 | 'locked': False, |
|
1343 | 1339 | 'locked_since': None, |
|
1344 | 1340 | 'locked_by': None, |
|
1345 | 1341 | 'lock_reason': None, |
|
1346 | 1342 | 'lock_state_changed': False, |
|
1347 | 1343 | 'msg': 'Repo `%s` not locked.' % repo.repo_name |
|
1348 | 1344 | } |
|
1349 | 1345 | return _d |
|
1350 | 1346 | else: |
|
1351 | 1347 | _user_id, _time, _reason = lockobj |
|
1352 | 1348 | lock_user = get_user_or_error(userid) |
|
1353 | 1349 | _d = { |
|
1354 | 1350 | 'repo': repo.repo_name, |
|
1355 | 1351 | 'locked': True, |
|
1356 | 1352 | 'locked_since': _time, |
|
1357 | 1353 | 'locked_by': lock_user.username, |
|
1358 | 1354 | 'lock_reason': _reason, |
|
1359 | 1355 | 'lock_state_changed': False, |
|
1360 | 1356 | 'msg': ('Repo `%s` locked by `%s` on `%s`.' |
|
1361 | 1357 | % (repo.repo_name, lock_user.username, |
|
1362 | 1358 | json.dumps(time_to_datetime(_time)))) |
|
1363 | 1359 | } |
|
1364 | 1360 | return _d |
|
1365 | 1361 | |
|
1366 | 1362 | # force locked state through a flag |
|
1367 | 1363 | else: |
|
1368 | 1364 | locked = str2bool(locked) |
|
1369 | 1365 | lock_reason = Repository.LOCK_API |
|
1370 | 1366 | try: |
|
1371 | 1367 | if locked: |
|
1372 | 1368 | lock_time = time.time() |
|
1373 | 1369 | Repository.lock(repo, user.user_id, lock_time, lock_reason) |
|
1374 | 1370 | else: |
|
1375 | 1371 | lock_time = None |
|
1376 | 1372 | Repository.unlock(repo) |
|
1377 | 1373 | _d = { |
|
1378 | 1374 | 'repo': repo.repo_name, |
|
1379 | 1375 | 'locked': locked, |
|
1380 | 1376 | 'locked_since': lock_time, |
|
1381 | 1377 | 'locked_by': user.username, |
|
1382 | 1378 | 'lock_reason': lock_reason, |
|
1383 | 1379 | 'lock_state_changed': True, |
|
1384 | 1380 | 'msg': ('User `%s` set lock state for repo `%s` to `%s`' |
|
1385 | 1381 | % (user.username, repo.repo_name, locked)) |
|
1386 | 1382 | } |
|
1387 | 1383 | return _d |
|
1388 | 1384 | except Exception: |
|
1389 | 1385 | log.exception( |
|
1390 | 1386 | "Exception occurred while trying to lock repository") |
|
1391 | 1387 | raise JSONRPCError( |
|
1392 | 1388 | 'Error occurred locking repository `%s`' % repo.repo_name |
|
1393 | 1389 | ) |
|
1394 | 1390 | |
|
1395 | 1391 | |
|
1396 | 1392 | @jsonrpc_method() |
|
1397 | 1393 | def comment_commit( |
|
1398 | 1394 | request, apiuser, repoid, commit_id, message, status=Optional(None), |
|
1399 | 1395 | comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE), |
|
1400 | 1396 | resolves_comment_id=Optional(None), |
|
1401 | 1397 | userid=Optional(OAttr('apiuser'))): |
|
1402 | 1398 | """ |
|
1403 | 1399 | Set a commit comment, and optionally change the status of the commit. |
|
1404 | 1400 | |
|
1405 | 1401 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1406 | 1402 | :type apiuser: AuthUser |
|
1407 | 1403 | :param repoid: Set the repository name or repository ID. |
|
1408 | 1404 | :type repoid: str or int |
|
1409 | 1405 | :param commit_id: Specify the commit_id for which to set a comment. |
|
1410 | 1406 | :type commit_id: str |
|
1411 | 1407 | :param message: The comment text. |
|
1412 | 1408 | :type message: str |
|
1413 | 1409 | :param status: (**Optional**) status of commit, one of: 'not_reviewed', |
|
1414 | 1410 | 'approved', 'rejected', 'under_review' |
|
1415 | 1411 | :type status: str |
|
1416 | 1412 | :param comment_type: Comment type, one of: 'note', 'todo' |
|
1417 | 1413 | :type comment_type: Optional(str), default: 'note' |
|
1418 | 1414 | :param userid: Set the user name of the comment creator. |
|
1419 | 1415 | :type userid: Optional(str or int) |
|
1420 | 1416 | |
|
1421 | 1417 | Example error output: |
|
1422 | 1418 | |
|
1423 | 1419 | .. code-block:: bash |
|
1424 | 1420 | |
|
1425 | 1421 | { |
|
1426 | 1422 | "id" : <id_given_in_input>, |
|
1427 | 1423 | "result" : { |
|
1428 | 1424 | "msg": "Commented on commit `<commit_id>` for repository `<repoid>`", |
|
1429 | 1425 | "status_change": null or <status>, |
|
1430 | 1426 | "success": true |
|
1431 | 1427 | }, |
|
1432 | 1428 | "error" : null |
|
1433 | 1429 | } |
|
1434 | 1430 | |
|
1435 | 1431 | """ |
|
1436 | 1432 | repo = get_repo_or_error(repoid) |
|
1437 | 1433 | if not has_superadmin_permission(apiuser): |
|
1438 | 1434 | _perms = ('repository.read', 'repository.write', 'repository.admin') |
|
1439 | 1435 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1440 | 1436 | |
|
1441 | 1437 | try: |
|
1442 | 1438 | commit_id = repo.scm_instance().get_commit(commit_id=commit_id).raw_id |
|
1443 | 1439 | except Exception as e: |
|
1444 | 1440 | log.exception('Failed to fetch commit') |
|
1445 | 1441 | raise JSONRPCError(e.message) |
|
1446 | 1442 | |
|
1447 | 1443 | if isinstance(userid, Optional): |
|
1448 | 1444 | userid = apiuser.user_id |
|
1449 | 1445 | |
|
1450 | 1446 | user = get_user_or_error(userid) |
|
1451 | 1447 | status = Optional.extract(status) |
|
1452 | 1448 | comment_type = Optional.extract(comment_type) |
|
1453 | 1449 | resolves_comment_id = Optional.extract(resolves_comment_id) |
|
1454 | 1450 | |
|
1455 | 1451 | allowed_statuses = [x[0] for x in ChangesetStatus.STATUSES] |
|
1456 | 1452 | if status and status not in allowed_statuses: |
|
1457 | 1453 | raise JSONRPCError('Bad status, must be on ' |
|
1458 | 1454 | 'of %s got %s' % (allowed_statuses, status,)) |
|
1459 | 1455 | |
|
1460 | 1456 | if resolves_comment_id: |
|
1461 | 1457 | comment = ChangesetComment.get(resolves_comment_id) |
|
1462 | 1458 | if not comment: |
|
1463 | 1459 | raise JSONRPCError( |
|
1464 | 1460 | 'Invalid resolves_comment_id `%s` for this commit.' |
|
1465 | 1461 | % resolves_comment_id) |
|
1466 | 1462 | if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO: |
|
1467 | 1463 | raise JSONRPCError( |
|
1468 | 1464 | 'Comment `%s` is wrong type for setting status to resolved.' |
|
1469 | 1465 | % resolves_comment_id) |
|
1470 | 1466 | |
|
1471 | 1467 | try: |
|
1472 | 1468 | rc_config = SettingsModel().get_all_settings() |
|
1473 | 1469 | renderer = rc_config.get('rhodecode_markup_renderer', 'rst') |
|
1474 | 1470 | status_change_label = ChangesetStatus.get_status_lbl(status) |
|
1475 | 1471 | comment = CommentsModel().create( |
|
1476 | 1472 | message, repo, user, commit_id=commit_id, |
|
1477 | 1473 | status_change=status_change_label, |
|
1478 | 1474 | status_change_type=status, |
|
1479 | 1475 | renderer=renderer, |
|
1480 | 1476 | comment_type=comment_type, |
|
1481 | 1477 | resolves_comment_id=resolves_comment_id |
|
1482 | 1478 | ) |
|
1483 | 1479 | if status: |
|
1484 | 1480 | # also do a status change |
|
1485 | 1481 | try: |
|
1486 | 1482 | ChangesetStatusModel().set_status( |
|
1487 | 1483 | repo, status, user, comment, revision=commit_id, |
|
1488 | 1484 | dont_allow_on_closed_pull_request=True |
|
1489 | 1485 | ) |
|
1490 | 1486 | except StatusChangeOnClosedPullRequestError: |
|
1491 | 1487 | log.exception( |
|
1492 | 1488 | "Exception occurred while trying to change repo commit status") |
|
1493 | 1489 | msg = ('Changing status on a changeset associated with ' |
|
1494 | 1490 | 'a closed pull request is not allowed') |
|
1495 | 1491 | raise JSONRPCError(msg) |
|
1496 | 1492 | |
|
1497 | 1493 | Session().commit() |
|
1498 | 1494 | return { |
|
1499 | 1495 | 'msg': ( |
|
1500 | 1496 | 'Commented on commit `%s` for repository `%s`' % ( |
|
1501 | 1497 | comment.revision, repo.repo_name)), |
|
1502 | 1498 | 'status_change': status, |
|
1503 | 1499 | 'success': True, |
|
1504 | 1500 | } |
|
1505 | 1501 | except JSONRPCError: |
|
1506 | 1502 | # catch any inside errors, and re-raise them to prevent from |
|
1507 | 1503 | # below global catch to silence them |
|
1508 | 1504 | raise |
|
1509 | 1505 | except Exception: |
|
1510 | 1506 | log.exception("Exception occurred while trying to comment on commit") |
|
1511 | 1507 | raise JSONRPCError( |
|
1512 | 1508 | 'failed to set comment on repository `%s`' % (repo.repo_name,) |
|
1513 | 1509 | ) |
|
1514 | 1510 | |
|
1515 | 1511 | |
|
1516 | 1512 | @jsonrpc_method() |
|
1517 | 1513 | def grant_user_permission(request, apiuser, repoid, userid, perm): |
|
1518 | 1514 | """ |
|
1519 | 1515 | Grant permissions for the specified user on the given repository, |
|
1520 | 1516 | or update existing permissions if found. |
|
1521 | 1517 | |
|
1522 | 1518 | This command can only be run using an |authtoken| with admin |
|
1523 | 1519 | permissions on the |repo|. |
|
1524 | 1520 | |
|
1525 | 1521 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1526 | 1522 | :type apiuser: AuthUser |
|
1527 | 1523 | :param repoid: Set the repository name or repository ID. |
|
1528 | 1524 | :type repoid: str or int |
|
1529 | 1525 | :param userid: Set the user name. |
|
1530 | 1526 | :type userid: str |
|
1531 | 1527 | :param perm: Set the user permissions, using the following format |
|
1532 | 1528 | ``(repository.(none|read|write|admin))`` |
|
1533 | 1529 | :type perm: str |
|
1534 | 1530 | |
|
1535 | 1531 | Example output: |
|
1536 | 1532 | |
|
1537 | 1533 | .. code-block:: bash |
|
1538 | 1534 | |
|
1539 | 1535 | id : <id_given_in_input> |
|
1540 | 1536 | result: { |
|
1541 | 1537 | "msg" : "Granted perm: `<perm>` for user: `<username>` in repo: `<reponame>`", |
|
1542 | 1538 | "success": true |
|
1543 | 1539 | } |
|
1544 | 1540 | error: null |
|
1545 | 1541 | """ |
|
1546 | 1542 | |
|
1547 | 1543 | repo = get_repo_or_error(repoid) |
|
1548 | 1544 | user = get_user_or_error(userid) |
|
1549 | 1545 | perm = get_perm_or_error(perm) |
|
1550 | 1546 | if not has_superadmin_permission(apiuser): |
|
1551 | 1547 | _perms = ('repository.admin',) |
|
1552 | 1548 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1553 | 1549 | |
|
1554 | 1550 | try: |
|
1555 | 1551 | |
|
1556 | 1552 | RepoModel().grant_user_permission(repo=repo, user=user, perm=perm) |
|
1557 | 1553 | |
|
1558 | 1554 | Session().commit() |
|
1559 | 1555 | return { |
|
1560 | 1556 | 'msg': 'Granted perm: `%s` for user: `%s` in repo: `%s`' % ( |
|
1561 | 1557 | perm.permission_name, user.username, repo.repo_name |
|
1562 | 1558 | ), |
|
1563 | 1559 | 'success': True |
|
1564 | 1560 | } |
|
1565 | 1561 | except Exception: |
|
1566 | 1562 | log.exception( |
|
1567 | 1563 | "Exception occurred while trying edit permissions for repo") |
|
1568 | 1564 | raise JSONRPCError( |
|
1569 | 1565 | 'failed to edit permission for user: `%s` in repo: `%s`' % ( |
|
1570 | 1566 | userid, repoid |
|
1571 | 1567 | ) |
|
1572 | 1568 | ) |
|
1573 | 1569 | |
|
1574 | 1570 | |
|
1575 | 1571 | @jsonrpc_method() |
|
1576 | 1572 | def revoke_user_permission(request, apiuser, repoid, userid): |
|
1577 | 1573 | """ |
|
1578 | 1574 | Revoke permission for a user on the specified repository. |
|
1579 | 1575 | |
|
1580 | 1576 | This command can only be run using an |authtoken| with admin |
|
1581 | 1577 | permissions on the |repo|. |
|
1582 | 1578 | |
|
1583 | 1579 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1584 | 1580 | :type apiuser: AuthUser |
|
1585 | 1581 | :param repoid: Set the repository name or repository ID. |
|
1586 | 1582 | :type repoid: str or int |
|
1587 | 1583 | :param userid: Set the user name of revoked user. |
|
1588 | 1584 | :type userid: str or int |
|
1589 | 1585 | |
|
1590 | 1586 | Example error output: |
|
1591 | 1587 | |
|
1592 | 1588 | .. code-block:: bash |
|
1593 | 1589 | |
|
1594 | 1590 | id : <id_given_in_input> |
|
1595 | 1591 | result: { |
|
1596 | 1592 | "msg" : "Revoked perm for user: `<username>` in repo: `<reponame>`", |
|
1597 | 1593 | "success": true |
|
1598 | 1594 | } |
|
1599 | 1595 | error: null |
|
1600 | 1596 | """ |
|
1601 | 1597 | |
|
1602 | 1598 | repo = get_repo_or_error(repoid) |
|
1603 | 1599 | user = get_user_or_error(userid) |
|
1604 | 1600 | if not has_superadmin_permission(apiuser): |
|
1605 | 1601 | _perms = ('repository.admin',) |
|
1606 | 1602 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1607 | 1603 | |
|
1608 | 1604 | try: |
|
1609 | 1605 | RepoModel().revoke_user_permission(repo=repo, user=user) |
|
1610 | 1606 | Session().commit() |
|
1611 | 1607 | return { |
|
1612 | 1608 | 'msg': 'Revoked perm for user: `%s` in repo: `%s`' % ( |
|
1613 | 1609 | user.username, repo.repo_name |
|
1614 | 1610 | ), |
|
1615 | 1611 | 'success': True |
|
1616 | 1612 | } |
|
1617 | 1613 | except Exception: |
|
1618 | 1614 | log.exception( |
|
1619 | 1615 | "Exception occurred while trying revoke permissions to repo") |
|
1620 | 1616 | raise JSONRPCError( |
|
1621 | 1617 | 'failed to edit permission for user: `%s` in repo: `%s`' % ( |
|
1622 | 1618 | userid, repoid |
|
1623 | 1619 | ) |
|
1624 | 1620 | ) |
|
1625 | 1621 | |
|
1626 | 1622 | |
|
1627 | 1623 | @jsonrpc_method() |
|
1628 | 1624 | def grant_user_group_permission(request, apiuser, repoid, usergroupid, perm): |
|
1629 | 1625 | """ |
|
1630 | 1626 | Grant permission for a user group on the specified repository, |
|
1631 | 1627 | or update existing permissions. |
|
1632 | 1628 | |
|
1633 | 1629 | This command can only be run using an |authtoken| with admin |
|
1634 | 1630 | permissions on the |repo|. |
|
1635 | 1631 | |
|
1636 | 1632 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1637 | 1633 | :type apiuser: AuthUser |
|
1638 | 1634 | :param repoid: Set the repository name or repository ID. |
|
1639 | 1635 | :type repoid: str or int |
|
1640 | 1636 | :param usergroupid: Specify the ID of the user group. |
|
1641 | 1637 | :type usergroupid: str or int |
|
1642 | 1638 | :param perm: Set the user group permissions using the following |
|
1643 | 1639 | format: (repository.(none|read|write|admin)) |
|
1644 | 1640 | :type perm: str |
|
1645 | 1641 | |
|
1646 | 1642 | Example output: |
|
1647 | 1643 | |
|
1648 | 1644 | .. code-block:: bash |
|
1649 | 1645 | |
|
1650 | 1646 | id : <id_given_in_input> |
|
1651 | 1647 | result : { |
|
1652 | 1648 | "msg" : "Granted perm: `<perm>` for group: `<usersgroupname>` in repo: `<reponame>`", |
|
1653 | 1649 | "success": true |
|
1654 | 1650 | |
|
1655 | 1651 | } |
|
1656 | 1652 | error : null |
|
1657 | 1653 | |
|
1658 | 1654 | Example error output: |
|
1659 | 1655 | |
|
1660 | 1656 | .. code-block:: bash |
|
1661 | 1657 | |
|
1662 | 1658 | id : <id_given_in_input> |
|
1663 | 1659 | result : null |
|
1664 | 1660 | error : { |
|
1665 | 1661 | "failed to edit permission for user group: `<usergroup>` in repo `<repo>`' |
|
1666 | 1662 | } |
|
1667 | 1663 | |
|
1668 | 1664 | """ |
|
1669 | 1665 | |
|
1670 | 1666 | repo = get_repo_or_error(repoid) |
|
1671 | 1667 | perm = get_perm_or_error(perm) |
|
1672 | 1668 | if not has_superadmin_permission(apiuser): |
|
1673 | 1669 | _perms = ('repository.admin',) |
|
1674 | 1670 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1675 | 1671 | |
|
1676 | 1672 | user_group = get_user_group_or_error(usergroupid) |
|
1677 | 1673 | if not has_superadmin_permission(apiuser): |
|
1678 | 1674 | # check if we have at least read permission for this user group ! |
|
1679 | 1675 | _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',) |
|
1680 | 1676 | if not HasUserGroupPermissionAnyApi(*_perms)( |
|
1681 | 1677 | user=apiuser, user_group_name=user_group.users_group_name): |
|
1682 | 1678 | raise JSONRPCError( |
|
1683 | 1679 | 'user group `%s` does not exist' % (usergroupid,)) |
|
1684 | 1680 | |
|
1685 | 1681 | try: |
|
1686 | 1682 | RepoModel().grant_user_group_permission( |
|
1687 | 1683 | repo=repo, group_name=user_group, perm=perm) |
|
1688 | 1684 | |
|
1689 | 1685 | Session().commit() |
|
1690 | 1686 | return { |
|
1691 | 1687 | 'msg': 'Granted perm: `%s` for user group: `%s` in ' |
|
1692 | 1688 | 'repo: `%s`' % ( |
|
1693 | 1689 | perm.permission_name, user_group.users_group_name, |
|
1694 | 1690 | repo.repo_name |
|
1695 | 1691 | ), |
|
1696 | 1692 | 'success': True |
|
1697 | 1693 | } |
|
1698 | 1694 | except Exception: |
|
1699 | 1695 | log.exception( |
|
1700 | 1696 | "Exception occurred while trying change permission on repo") |
|
1701 | 1697 | raise JSONRPCError( |
|
1702 | 1698 | 'failed to edit permission for user group: `%s` in ' |
|
1703 | 1699 | 'repo: `%s`' % ( |
|
1704 | 1700 | usergroupid, repo.repo_name |
|
1705 | 1701 | ) |
|
1706 | 1702 | ) |
|
1707 | 1703 | |
|
1708 | 1704 | |
|
1709 | 1705 | @jsonrpc_method() |
|
1710 | 1706 | def revoke_user_group_permission(request, apiuser, repoid, usergroupid): |
|
1711 | 1707 | """ |
|
1712 | 1708 | Revoke the permissions of a user group on a given repository. |
|
1713 | 1709 | |
|
1714 | 1710 | This command can only be run using an |authtoken| with admin |
|
1715 | 1711 | permissions on the |repo|. |
|
1716 | 1712 | |
|
1717 | 1713 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1718 | 1714 | :type apiuser: AuthUser |
|
1719 | 1715 | :param repoid: Set the repository name or repository ID. |
|
1720 | 1716 | :type repoid: str or int |
|
1721 | 1717 | :param usergroupid: Specify the user group ID. |
|
1722 | 1718 | :type usergroupid: str or int |
|
1723 | 1719 | |
|
1724 | 1720 | Example output: |
|
1725 | 1721 | |
|
1726 | 1722 | .. code-block:: bash |
|
1727 | 1723 | |
|
1728 | 1724 | id : <id_given_in_input> |
|
1729 | 1725 | result: { |
|
1730 | 1726 | "msg" : "Revoked perm for group: `<usersgroupname>` in repo: `<reponame>`", |
|
1731 | 1727 | "success": true |
|
1732 | 1728 | } |
|
1733 | 1729 | error: null |
|
1734 | 1730 | """ |
|
1735 | 1731 | |
|
1736 | 1732 | repo = get_repo_or_error(repoid) |
|
1737 | 1733 | if not has_superadmin_permission(apiuser): |
|
1738 | 1734 | _perms = ('repository.admin',) |
|
1739 | 1735 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1740 | 1736 | |
|
1741 | 1737 | user_group = get_user_group_or_error(usergroupid) |
|
1742 | 1738 | if not has_superadmin_permission(apiuser): |
|
1743 | 1739 | # check if we have at least read permission for this user group ! |
|
1744 | 1740 | _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',) |
|
1745 | 1741 | if not HasUserGroupPermissionAnyApi(*_perms)( |
|
1746 | 1742 | user=apiuser, user_group_name=user_group.users_group_name): |
|
1747 | 1743 | raise JSONRPCError( |
|
1748 | 1744 | 'user group `%s` does not exist' % (usergroupid,)) |
|
1749 | 1745 | |
|
1750 | 1746 | try: |
|
1751 | 1747 | RepoModel().revoke_user_group_permission( |
|
1752 | 1748 | repo=repo, group_name=user_group) |
|
1753 | 1749 | |
|
1754 | 1750 | Session().commit() |
|
1755 | 1751 | return { |
|
1756 | 1752 | 'msg': 'Revoked perm for user group: `%s` in repo: `%s`' % ( |
|
1757 | 1753 | user_group.users_group_name, repo.repo_name |
|
1758 | 1754 | ), |
|
1759 | 1755 | 'success': True |
|
1760 | 1756 | } |
|
1761 | 1757 | except Exception: |
|
1762 | 1758 | log.exception("Exception occurred while trying revoke " |
|
1763 | 1759 | "user group permission on repo") |
|
1764 | 1760 | raise JSONRPCError( |
|
1765 | 1761 | 'failed to edit permission for user group: `%s` in ' |
|
1766 | 1762 | 'repo: `%s`' % ( |
|
1767 | 1763 | user_group.users_group_name, repo.repo_name |
|
1768 | 1764 | ) |
|
1769 | 1765 | ) |
|
1770 | 1766 | |
|
1771 | 1767 | |
|
1772 | 1768 | @jsonrpc_method() |
|
1773 | 1769 | def pull(request, apiuser, repoid): |
|
1774 | 1770 | """ |
|
1775 | 1771 | Triggers a pull on the given repository from a remote location. You |
|
1776 | 1772 | can use this to keep remote repositories up-to-date. |
|
1777 | 1773 | |
|
1778 | 1774 | This command can only be run using an |authtoken| with admin |
|
1779 | 1775 | rights to the specified repository. For more information, |
|
1780 | 1776 | see :ref:`config-token-ref`. |
|
1781 | 1777 | |
|
1782 | 1778 | This command takes the following options: |
|
1783 | 1779 | |
|
1784 | 1780 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1785 | 1781 | :type apiuser: AuthUser |
|
1786 | 1782 | :param repoid: The repository name or repository ID. |
|
1787 | 1783 | :type repoid: str or int |
|
1788 | 1784 | |
|
1789 | 1785 | Example output: |
|
1790 | 1786 | |
|
1791 | 1787 | .. code-block:: bash |
|
1792 | 1788 | |
|
1793 | 1789 | id : <id_given_in_input> |
|
1794 | 1790 | result : { |
|
1795 | 1791 | "msg": "Pulled from `<repository name>`" |
|
1796 | 1792 | "repository": "<repository name>" |
|
1797 | 1793 | } |
|
1798 | 1794 | error : null |
|
1799 | 1795 | |
|
1800 | 1796 | Example error output: |
|
1801 | 1797 | |
|
1802 | 1798 | .. code-block:: bash |
|
1803 | 1799 | |
|
1804 | 1800 | id : <id_given_in_input> |
|
1805 | 1801 | result : null |
|
1806 | 1802 | error : { |
|
1807 | 1803 | "Unable to pull changes from `<reponame>`" |
|
1808 | 1804 | } |
|
1809 | 1805 | |
|
1810 | 1806 | """ |
|
1811 | 1807 | |
|
1812 | 1808 | repo = get_repo_or_error(repoid) |
|
1813 | 1809 | if not has_superadmin_permission(apiuser): |
|
1814 | 1810 | _perms = ('repository.admin',) |
|
1815 | 1811 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1816 | 1812 | |
|
1817 | 1813 | try: |
|
1818 | 1814 | ScmModel().pull_changes(repo.repo_name, apiuser.username) |
|
1819 | 1815 | return { |
|
1820 | 1816 | 'msg': 'Pulled from `%s`' % repo.repo_name, |
|
1821 | 1817 | 'repository': repo.repo_name |
|
1822 | 1818 | } |
|
1823 | 1819 | except Exception: |
|
1824 | 1820 | log.exception("Exception occurred while trying to " |
|
1825 | 1821 | "pull changes from remote location") |
|
1826 | 1822 | raise JSONRPCError( |
|
1827 | 1823 | 'Unable to pull changes from `%s`' % repo.repo_name |
|
1828 | 1824 | ) |
|
1829 | 1825 | |
|
1830 | 1826 | |
|
1831 | 1827 | @jsonrpc_method() |
|
1832 | 1828 | def strip(request, apiuser, repoid, revision, branch): |
|
1833 | 1829 | """ |
|
1834 | 1830 | Strips the given revision from the specified repository. |
|
1835 | 1831 | |
|
1836 | 1832 | * This will remove the revision and all of its decendants. |
|
1837 | 1833 | |
|
1838 | 1834 | This command can only be run using an |authtoken| with admin rights to |
|
1839 | 1835 | the specified repository. |
|
1840 | 1836 | |
|
1841 | 1837 | This command takes the following options: |
|
1842 | 1838 | |
|
1843 | 1839 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1844 | 1840 | :type apiuser: AuthUser |
|
1845 | 1841 | :param repoid: The repository name or repository ID. |
|
1846 | 1842 | :type repoid: str or int |
|
1847 | 1843 | :param revision: The revision you wish to strip. |
|
1848 | 1844 | :type revision: str |
|
1849 | 1845 | :param branch: The branch from which to strip the revision. |
|
1850 | 1846 | :type branch: str |
|
1851 | 1847 | |
|
1852 | 1848 | Example output: |
|
1853 | 1849 | |
|
1854 | 1850 | .. code-block:: bash |
|
1855 | 1851 | |
|
1856 | 1852 | id : <id_given_in_input> |
|
1857 | 1853 | result : { |
|
1858 | 1854 | "msg": "'Stripped commit <commit_hash> from repo `<repository name>`'" |
|
1859 | 1855 | "repository": "<repository name>" |
|
1860 | 1856 | } |
|
1861 | 1857 | error : null |
|
1862 | 1858 | |
|
1863 | 1859 | Example error output: |
|
1864 | 1860 | |
|
1865 | 1861 | .. code-block:: bash |
|
1866 | 1862 | |
|
1867 | 1863 | id : <id_given_in_input> |
|
1868 | 1864 | result : null |
|
1869 | 1865 | error : { |
|
1870 | 1866 | "Unable to strip commit <commit_hash> from repo `<repository name>`" |
|
1871 | 1867 | } |
|
1872 | 1868 | |
|
1873 | 1869 | """ |
|
1874 | 1870 | |
|
1875 | 1871 | repo = get_repo_or_error(repoid) |
|
1876 | 1872 | if not has_superadmin_permission(apiuser): |
|
1877 | 1873 | _perms = ('repository.admin',) |
|
1878 | 1874 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1879 | 1875 | |
|
1880 | 1876 | try: |
|
1881 | 1877 | ScmModel().strip(repo, revision, branch) |
|
1882 | 1878 | audit_logger.store_api( |
|
1883 | 1879 | 'repo.commit.strip', action_data={'commit_id': revision}, |
|
1884 | 1880 | repo=repo, |
|
1885 | 1881 | user=apiuser, commit=True) |
|
1886 | 1882 | |
|
1887 | 1883 | return { |
|
1888 | 1884 | 'msg': 'Stripped commit %s from repo `%s`' % ( |
|
1889 | 1885 | revision, repo.repo_name), |
|
1890 | 1886 | 'repository': repo.repo_name |
|
1891 | 1887 | } |
|
1892 | 1888 | except Exception: |
|
1893 | 1889 | log.exception("Exception while trying to strip") |
|
1894 | 1890 | raise JSONRPCError( |
|
1895 | 1891 | 'Unable to strip commit %s from repo `%s`' % ( |
|
1896 | 1892 | revision, repo.repo_name) |
|
1897 | 1893 | ) |
|
1898 | 1894 | |
|
1899 | 1895 | |
|
1900 | 1896 | @jsonrpc_method() |
|
1901 | 1897 | def get_repo_settings(request, apiuser, repoid, key=Optional(None)): |
|
1902 | 1898 | """ |
|
1903 | 1899 | Returns all settings for a repository. If key is given it only returns the |
|
1904 | 1900 | setting identified by the key or null. |
|
1905 | 1901 | |
|
1906 | 1902 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1907 | 1903 | :type apiuser: AuthUser |
|
1908 | 1904 | :param repoid: The repository name or repository id. |
|
1909 | 1905 | :type repoid: str or int |
|
1910 | 1906 | :param key: Key of the setting to return. |
|
1911 | 1907 | :type: key: Optional(str) |
|
1912 | 1908 | |
|
1913 | 1909 | Example output: |
|
1914 | 1910 | |
|
1915 | 1911 | .. code-block:: bash |
|
1916 | 1912 | |
|
1917 | 1913 | { |
|
1918 | 1914 | "error": null, |
|
1919 | 1915 | "id": 237, |
|
1920 | 1916 | "result": { |
|
1921 | 1917 | "extensions_largefiles": true, |
|
1922 | 1918 | "extensions_evolve": true, |
|
1923 | 1919 | "hooks_changegroup_push_logger": true, |
|
1924 | 1920 | "hooks_changegroup_repo_size": false, |
|
1925 | 1921 | "hooks_outgoing_pull_logger": true, |
|
1926 | 1922 | "phases_publish": "True", |
|
1927 | 1923 | "rhodecode_hg_use_rebase_for_merging": true, |
|
1928 | 1924 | "rhodecode_pr_merge_enabled": true, |
|
1929 | 1925 | "rhodecode_use_outdated_comments": true |
|
1930 | 1926 | } |
|
1931 | 1927 | } |
|
1932 | 1928 | """ |
|
1933 | 1929 | |
|
1934 | 1930 | # Restrict access to this api method to admins only. |
|
1935 | 1931 | if not has_superadmin_permission(apiuser): |
|
1936 | 1932 | raise JSONRPCForbidden() |
|
1937 | 1933 | |
|
1938 | 1934 | try: |
|
1939 | 1935 | repo = get_repo_or_error(repoid) |
|
1940 | 1936 | settings_model = VcsSettingsModel(repo=repo) |
|
1941 | 1937 | settings = settings_model.get_global_settings() |
|
1942 | 1938 | settings.update(settings_model.get_repo_settings()) |
|
1943 | 1939 | |
|
1944 | 1940 | # If only a single setting is requested fetch it from all settings. |
|
1945 | 1941 | key = Optional.extract(key) |
|
1946 | 1942 | if key is not None: |
|
1947 | 1943 | settings = settings.get(key, None) |
|
1948 | 1944 | except Exception: |
|
1949 | 1945 | msg = 'Failed to fetch settings for repository `{}`'.format(repoid) |
|
1950 | 1946 | log.exception(msg) |
|
1951 | 1947 | raise JSONRPCError(msg) |
|
1952 | 1948 | |
|
1953 | 1949 | return settings |
|
1954 | 1950 | |
|
1955 | 1951 | |
|
1956 | 1952 | @jsonrpc_method() |
|
1957 | 1953 | def set_repo_settings(request, apiuser, repoid, settings): |
|
1958 | 1954 | """ |
|
1959 | 1955 | Update repository settings. Returns true on success. |
|
1960 | 1956 | |
|
1961 | 1957 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1962 | 1958 | :type apiuser: AuthUser |
|
1963 | 1959 | :param repoid: The repository name or repository id. |
|
1964 | 1960 | :type repoid: str or int |
|
1965 | 1961 | :param settings: The new settings for the repository. |
|
1966 | 1962 | :type: settings: dict |
|
1967 | 1963 | |
|
1968 | 1964 | Example output: |
|
1969 | 1965 | |
|
1970 | 1966 | .. code-block:: bash |
|
1971 | 1967 | |
|
1972 | 1968 | { |
|
1973 | 1969 | "error": null, |
|
1974 | 1970 | "id": 237, |
|
1975 | 1971 | "result": true |
|
1976 | 1972 | } |
|
1977 | 1973 | """ |
|
1978 | 1974 | # Restrict access to this api method to admins only. |
|
1979 | 1975 | if not has_superadmin_permission(apiuser): |
|
1980 | 1976 | raise JSONRPCForbidden() |
|
1981 | 1977 | |
|
1982 | 1978 | if type(settings) is not dict: |
|
1983 | 1979 | raise JSONRPCError('Settings have to be a JSON Object.') |
|
1984 | 1980 | |
|
1985 | 1981 | try: |
|
1986 | 1982 | settings_model = VcsSettingsModel(repo=repoid) |
|
1987 | 1983 | |
|
1988 | 1984 | # Merge global, repo and incoming settings. |
|
1989 | 1985 | new_settings = settings_model.get_global_settings() |
|
1990 | 1986 | new_settings.update(settings_model.get_repo_settings()) |
|
1991 | 1987 | new_settings.update(settings) |
|
1992 | 1988 | |
|
1993 | 1989 | # Update the settings. |
|
1994 | 1990 | inherit_global_settings = new_settings.get( |
|
1995 | 1991 | 'inherit_global_settings', False) |
|
1996 | 1992 | settings_model.create_or_update_repo_settings( |
|
1997 | 1993 | new_settings, inherit_global_settings=inherit_global_settings) |
|
1998 | 1994 | Session().commit() |
|
1999 | 1995 | except Exception: |
|
2000 | 1996 | msg = 'Failed to update settings for repository `{}`'.format(repoid) |
|
2001 | 1997 | log.exception(msg) |
|
2002 | 1998 | raise JSONRPCError(msg) |
|
2003 | 1999 | |
|
2004 | 2000 | # Indicate success. |
|
2005 | 2001 | return True |
|
2006 | 2002 | |
|
2007 | 2003 | |
|
2008 | 2004 | @jsonrpc_method() |
|
2009 | 2005 | def maintenance(request, apiuser, repoid): |
|
2010 | 2006 | """ |
|
2011 | 2007 | Triggers a maintenance on the given repository. |
|
2012 | 2008 | |
|
2013 | 2009 | This command can only be run using an |authtoken| with admin |
|
2014 | 2010 | rights to the specified repository. For more information, |
|
2015 | 2011 | see :ref:`config-token-ref`. |
|
2016 | 2012 | |
|
2017 | 2013 | This command takes the following options: |
|
2018 | 2014 | |
|
2019 | 2015 | :param apiuser: This is filled automatically from the |authtoken|. |
|
2020 | 2016 | :type apiuser: AuthUser |
|
2021 | 2017 | :param repoid: The repository name or repository ID. |
|
2022 | 2018 | :type repoid: str or int |
|
2023 | 2019 | |
|
2024 | 2020 | Example output: |
|
2025 | 2021 | |
|
2026 | 2022 | .. code-block:: bash |
|
2027 | 2023 | |
|
2028 | 2024 | id : <id_given_in_input> |
|
2029 | 2025 | result : { |
|
2030 | 2026 | "msg": "executed maintenance command", |
|
2031 | 2027 | "executed_actions": [ |
|
2032 | 2028 | <action_message>, <action_message2>... |
|
2033 | 2029 | ], |
|
2034 | 2030 | "repository": "<repository name>" |
|
2035 | 2031 | } |
|
2036 | 2032 | error : null |
|
2037 | 2033 | |
|
2038 | 2034 | Example error output: |
|
2039 | 2035 | |
|
2040 | 2036 | .. code-block:: bash |
|
2041 | 2037 | |
|
2042 | 2038 | id : <id_given_in_input> |
|
2043 | 2039 | result : null |
|
2044 | 2040 | error : { |
|
2045 | 2041 | "Unable to execute maintenance on `<reponame>`" |
|
2046 | 2042 | } |
|
2047 | 2043 | |
|
2048 | 2044 | """ |
|
2049 | 2045 | |
|
2050 | 2046 | repo = get_repo_or_error(repoid) |
|
2051 | 2047 | if not has_superadmin_permission(apiuser): |
|
2052 | 2048 | _perms = ('repository.admin',) |
|
2053 | 2049 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
2054 | 2050 | |
|
2055 | 2051 | try: |
|
2056 | 2052 | maintenance = repo_maintenance.RepoMaintenance() |
|
2057 | 2053 | executed_actions = maintenance.execute(repo) |
|
2058 | 2054 | |
|
2059 | 2055 | return { |
|
2060 | 2056 | 'msg': 'executed maintenance command', |
|
2061 | 2057 | 'executed_actions': executed_actions, |
|
2062 | 2058 | 'repository': repo.repo_name |
|
2063 | 2059 | } |
|
2064 | 2060 | except Exception: |
|
2065 | 2061 | log.exception("Exception occurred while trying to run maintenance") |
|
2066 | 2062 | raise JSONRPCError( |
|
2067 | 2063 | 'Unable to execute maintenance on `%s`' % repo.repo_name) |
@@ -1,183 +1,181 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import logging |
|
22 | 22 | import formencode |
|
23 | 23 | import formencode.htmlfill |
|
24 | 24 | |
|
25 | 25 | from pyramid.httpexceptions import HTTPFound, HTTPForbidden |
|
26 | 26 | from pyramid.view import view_config |
|
27 | 27 | from pyramid.renderers import render |
|
28 | 28 | from pyramid.response import Response |
|
29 | 29 | |
|
30 | 30 | from rhodecode.apps._base import BaseAppView, DataGridAppView |
|
31 | from rhodecode.lib.celerylib.utils import get_task_id | |
|
31 | 32 | |
|
32 | 33 | from rhodecode.lib.ext_json import json |
|
33 | 34 | from rhodecode.lib.auth import ( |
|
34 | 35 | LoginRequired, CSRFRequired, NotAnonymous, |
|
35 | 36 | HasPermissionAny, HasRepoGroupPermissionAny) |
|
36 | 37 | from rhodecode.lib import helpers as h |
|
37 | 38 | from rhodecode.lib.utils import repo_name_slug |
|
38 | 39 | from rhodecode.lib.utils2 import safe_int, safe_unicode |
|
39 | 40 | from rhodecode.model.forms import RepoForm |
|
40 | 41 | from rhodecode.model.repo import RepoModel |
|
41 | 42 | from rhodecode.model.scm import RepoList, RepoGroupList, ScmModel |
|
42 | 43 | from rhodecode.model.settings import SettingsModel |
|
43 | 44 | from rhodecode.model.db import Repository, RepoGroup |
|
44 | 45 | |
|
45 | 46 | log = logging.getLogger(__name__) |
|
46 | 47 | |
|
47 | 48 | |
|
48 | 49 | class AdminReposView(BaseAppView, DataGridAppView): |
|
49 | 50 | |
|
50 | 51 | def load_default_context(self): |
|
51 | 52 | c = self._get_local_tmpl_context() |
|
52 | 53 | |
|
53 | 54 | return c |
|
54 | 55 | |
|
55 | 56 | def _load_form_data(self, c): |
|
56 | 57 | acl_groups = RepoGroupList(RepoGroup.query().all(), |
|
57 | 58 | perm_set=['group.write', 'group.admin']) |
|
58 | 59 | c.repo_groups = RepoGroup.groups_choices(groups=acl_groups) |
|
59 | 60 | c.repo_groups_choices = map(lambda k: safe_unicode(k[0]), c.repo_groups) |
|
60 | 61 | c.landing_revs_choices, c.landing_revs = \ |
|
61 | 62 | ScmModel().get_repo_landing_revs(self.request.translate) |
|
62 | 63 | c.personal_repo_group = self._rhodecode_user.personal_repo_group |
|
63 | 64 | |
|
64 | 65 | @LoginRequired() |
|
65 | 66 | @NotAnonymous() |
|
66 | 67 | # perms check inside |
|
67 | 68 | @view_config( |
|
68 | 69 | route_name='repos', request_method='GET', |
|
69 | 70 | renderer='rhodecode:templates/admin/repos/repos.mako') |
|
70 | 71 | def repository_list(self): |
|
71 | 72 | c = self.load_default_context() |
|
72 | 73 | |
|
73 | 74 | repo_list = Repository.get_all_repos() |
|
74 | 75 | c.repo_list = RepoList(repo_list, perm_set=['repository.admin']) |
|
75 | 76 | repos_data = RepoModel().get_repos_as_dict( |
|
76 | 77 | repo_list=c.repo_list, admin=True, super_user_actions=True) |
|
77 | 78 | # json used to render the grid |
|
78 | 79 | c.data = json.dumps(repos_data) |
|
79 | 80 | |
|
80 | 81 | return self._get_template_context(c) |
|
81 | 82 | |
|
82 | 83 | @LoginRequired() |
|
83 | 84 | @NotAnonymous() |
|
84 | 85 | # perms check inside |
|
85 | 86 | @view_config( |
|
86 | 87 | route_name='repo_new', request_method='GET', |
|
87 | 88 | renderer='rhodecode:templates/admin/repos/repo_add.mako') |
|
88 | 89 | def repository_new(self): |
|
89 | 90 | c = self.load_default_context() |
|
90 | 91 | |
|
91 | 92 | new_repo = self.request.GET.get('repo', '') |
|
92 | 93 | parent_group = safe_int(self.request.GET.get('parent_group')) |
|
93 | 94 | _gr = RepoGroup.get(parent_group) |
|
94 | 95 | |
|
95 | 96 | if not HasPermissionAny('hg.admin', 'hg.create.repository')(): |
|
96 | 97 | # you're not super admin nor have global create permissions, |
|
97 | 98 | # but maybe you have at least write permission to a parent group ? |
|
98 | 99 | |
|
99 | 100 | gr_name = _gr.group_name if _gr else None |
|
100 | 101 | # create repositories with write permission on group is set to true |
|
101 | 102 | create_on_write = HasPermissionAny('hg.create.write_on_repogroup.true')() |
|
102 | 103 | group_admin = HasRepoGroupPermissionAny('group.admin')(group_name=gr_name) |
|
103 | 104 | group_write = HasRepoGroupPermissionAny('group.write')(group_name=gr_name) |
|
104 | 105 | if not (group_admin or (group_write and create_on_write)): |
|
105 | 106 | raise HTTPForbidden() |
|
106 | 107 | |
|
107 | 108 | self._load_form_data(c) |
|
108 | 109 | c.new_repo = repo_name_slug(new_repo) |
|
109 | 110 | |
|
110 | 111 | # apply the defaults from defaults page |
|
111 | 112 | defaults = SettingsModel().get_default_repo_settings(strip_prefix=True) |
|
112 | 113 | # set checkbox to autochecked |
|
113 | 114 | defaults['repo_copy_permissions'] = True |
|
114 | 115 | |
|
115 | 116 | parent_group_choice = '-1' |
|
116 | 117 | if not self._rhodecode_user.is_admin and self._rhodecode_user.personal_repo_group: |
|
117 | 118 | parent_group_choice = self._rhodecode_user.personal_repo_group |
|
118 | 119 | |
|
119 | 120 | if parent_group and _gr: |
|
120 | 121 | if parent_group in [x[0] for x in c.repo_groups]: |
|
121 | 122 | parent_group_choice = safe_unicode(parent_group) |
|
122 | 123 | |
|
123 | 124 | defaults.update({'repo_group': parent_group_choice}) |
|
124 | 125 | |
|
125 | 126 | data = render('rhodecode:templates/admin/repos/repo_add.mako', |
|
126 | 127 | self._get_template_context(c), self.request) |
|
127 | 128 | html = formencode.htmlfill.render( |
|
128 | 129 | data, |
|
129 | 130 | defaults=defaults, |
|
130 | 131 | encoding="UTF-8", |
|
131 | 132 | force_defaults=False |
|
132 | 133 | ) |
|
133 | 134 | return Response(html) |
|
134 | 135 | |
|
135 | 136 | @LoginRequired() |
|
136 | 137 | @NotAnonymous() |
|
137 | 138 | @CSRFRequired() |
|
138 | 139 | # perms check inside |
|
139 | 140 | @view_config( |
|
140 | 141 | route_name='repo_create', request_method='POST', |
|
141 | 142 | renderer='rhodecode:templates/admin/repos/repos.mako') |
|
142 | 143 | def repository_create(self): |
|
143 | 144 | c = self.load_default_context() |
|
144 | 145 | |
|
145 | 146 | form_result = {} |
|
147 | self._load_form_data(c) | |
|
146 | 148 | task_id = None |
|
147 | self._load_form_data(c) | |
|
148 | ||
|
149 | 149 | try: |
|
150 | 150 | # CanWriteToGroup validators checks permissions of this POST |
|
151 | 151 | form = RepoForm( |
|
152 | 152 | self.request.translate, repo_groups=c.repo_groups_choices, |
|
153 | 153 | landing_revs=c.landing_revs_choices)() |
|
154 |
form_result |
|
|
154 | form_result = form.to_python(dict(self.request.POST)) | |
|
155 | 155 | |
|
156 | 156 | # create is done sometimes async on celery, db transaction |
|
157 | 157 | # management is handled there. |
|
158 | 158 | task = RepoModel().create(form_result, self._rhodecode_user.user_id) |
|
159 | from celery.result import BaseAsyncResult | |
|
160 | if isinstance(task, BaseAsyncResult): | |
|
161 | task_id = task.task_id | |
|
159 | task_id = get_task_id(task) | |
|
162 | 160 | except formencode.Invalid as errors: |
|
163 | 161 | data = render('rhodecode:templates/admin/repos/repo_add.mako', |
|
164 | 162 | self._get_template_context(c), self.request) |
|
165 | 163 | html = formencode.htmlfill.render( |
|
166 | 164 | data, |
|
167 | 165 | defaults=errors.value, |
|
168 | 166 | errors=errors.error_dict or {}, |
|
169 | 167 | prefix_error=False, |
|
170 | 168 | encoding="UTF-8", |
|
171 | 169 | force_defaults=False |
|
172 | 170 | ) |
|
173 | 171 | return Response(html) |
|
174 | 172 | |
|
175 | 173 | except Exception as e: |
|
176 | 174 | msg = self._log_creation_exception(e, form_result.get('repo_name')) |
|
177 | 175 | h.flash(msg, category='error') |
|
178 | 176 | raise HTTPFound(h.route_path('home')) |
|
179 | 177 | |
|
180 | 178 | raise HTTPFound( |
|
181 | 179 | h.route_path('repo_creating', |
|
182 | 180 | repo_name=form_result['repo_name_full'], |
|
183 | 181 | _query=dict(task_id=task_id))) |
@@ -1,110 +1,109 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2011-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import logging |
|
22 | 22 | |
|
23 | 23 | from pyramid.view import view_config |
|
24 | 24 | from pyramid.httpexceptions import HTTPFound, HTTPNotFound |
|
25 | 25 | |
|
26 | 26 | from rhodecode.apps._base import BaseAppView |
|
27 | 27 | from rhodecode.lib import helpers as h |
|
28 | 28 | from rhodecode.lib.auth import (NotAnonymous, HasRepoPermissionAny) |
|
29 | 29 | from rhodecode.model.db import Repository |
|
30 | 30 | |
|
31 | 31 | log = logging.getLogger(__name__) |
|
32 | 32 | |
|
33 | 33 | |
|
34 | 34 | class RepoChecksView(BaseAppView): |
|
35 | 35 | def load_default_context(self): |
|
36 | 36 | c = self._get_local_tmpl_context() |
|
37 | 37 | |
|
38 | 38 | return c |
|
39 | 39 | |
|
40 | 40 | @NotAnonymous() |
|
41 | 41 | @view_config( |
|
42 | 42 | route_name='repo_creating', request_method='GET', |
|
43 | 43 | renderer='rhodecode:templates/admin/repos/repo_creating.mako') |
|
44 | 44 | def repo_creating(self): |
|
45 | 45 | c = self.load_default_context() |
|
46 | 46 | |
|
47 | 47 | repo_name = self.request.matchdict['repo_name'] |
|
48 | 48 | db_repo = Repository.get_by_repo_name(repo_name) |
|
49 | if not db_repo: | |
|
50 | raise HTTPNotFound() | |
|
51 | 49 | |
|
52 | 50 | # check if maybe repo is already created |
|
53 | if db_repo.repo_state in [Repository.STATE_CREATED]: | |
|
51 | if db_repo and db_repo.repo_state in [Repository.STATE_CREATED]: | |
|
54 | 52 | # re-check permissions before redirecting to prevent resource |
|
55 | 53 | # discovery by checking the 302 code |
|
56 | 54 | perm_set = ['repository.read', 'repository.write', 'repository.admin'] |
|
57 | 55 | has_perm = HasRepoPermissionAny(*perm_set)( |
|
58 | 56 | db_repo.repo_name, 'Repo Creating check') |
|
59 | 57 | if not has_perm: |
|
60 | 58 | raise HTTPNotFound() |
|
61 | 59 | |
|
62 | 60 | raise HTTPFound(h.route_path( |
|
63 | 61 | 'repo_summary', repo_name=db_repo.repo_name)) |
|
64 | 62 | |
|
65 | 63 | c.task_id = self.request.GET.get('task_id') |
|
66 | 64 | c.repo_name = repo_name |
|
67 | 65 | |
|
68 | 66 | return self._get_template_context(c) |
|
69 | 67 | |
|
70 | 68 | @NotAnonymous() |
|
71 | 69 | @view_config( |
|
72 | 70 | route_name='repo_creating_check', request_method='GET', |
|
73 | 71 | renderer='json_ext') |
|
74 | 72 | def repo_creating_check(self): |
|
75 | 73 | _ = self.request.translate |
|
76 | 74 | task_id = self.request.GET.get('task_id') |
|
77 | 75 | self.load_default_context() |
|
78 | 76 | |
|
79 | 77 | repo_name = self.request.matchdict['repo_name'] |
|
80 | 78 | |
|
81 | 79 | if task_id and task_id not in ['None']: |
|
82 | 80 | import rhodecode |
|
83 | from celery.result import AsyncResult | |
|
81 | from rhodecode.lib.celerylib.loader import celery_app | |
|
84 | 82 | if rhodecode.CELERY_ENABLED: |
|
85 | task = AsyncResult(task_id) | |
|
83 | task = celery_app.AsyncResult(task_id) | |
|
84 | task.get() | |
|
86 | 85 | if task.failed(): |
|
87 | 86 | msg = self._log_creation_exception(task.result, repo_name) |
|
88 | 87 | h.flash(msg, category='error') |
|
89 | 88 | raise HTTPFound(h.route_path('home'), code=501) |
|
90 | 89 | |
|
91 | 90 | db_repo = Repository.get_by_repo_name(repo_name) |
|
92 | 91 | if db_repo and db_repo.repo_state == Repository.STATE_CREATED: |
|
93 | 92 | if db_repo.clone_uri: |
|
94 | 93 | clone_uri = db_repo.clone_uri_hidden |
|
95 | 94 | h.flash(_('Created repository %s from %s') |
|
96 | 95 | % (db_repo.repo_name, clone_uri), category='success') |
|
97 | 96 | else: |
|
98 | 97 | repo_url = h.link_to( |
|
99 | 98 | db_repo.repo_name, |
|
100 | 99 | h.route_path('repo_summary', repo_name=db_repo.repo_name)) |
|
101 | 100 | fork = db_repo.fork |
|
102 | 101 | if fork: |
|
103 | 102 | fork_name = fork.repo_name |
|
104 | 103 | h.flash(h.literal(_('Forked repository %s as %s') |
|
105 | 104 | % (fork_name, repo_url)), category='success') |
|
106 | 105 | else: |
|
107 | 106 | h.flash(h.literal(_('Created repository %s') % repo_url), |
|
108 | 107 | category='success') |
|
109 | 108 | return {'result': True} |
|
110 | 109 | return {'result': False} |
@@ -1,259 +1,259 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2011-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import logging |
|
22 | 22 | import datetime |
|
23 | 23 | import formencode |
|
24 | 24 | import formencode.htmlfill |
|
25 | 25 | |
|
26 | 26 | from pyramid.httpexceptions import HTTPFound |
|
27 | 27 | from pyramid.view import view_config |
|
28 | 28 | from pyramid.renderers import render |
|
29 | 29 | from pyramid.response import Response |
|
30 | 30 | |
|
31 | 31 | from rhodecode.apps._base import RepoAppView, DataGridAppView |
|
32 | 32 | from rhodecode.lib.auth import ( |
|
33 | 33 | LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous, |
|
34 | 34 | HasRepoPermissionAny, HasPermissionAnyDecorator, CSRFRequired) |
|
35 | 35 | import rhodecode.lib.helpers as h |
|
36 | from rhodecode.lib.celerylib.utils import get_task_id | |
|
36 | 37 | from rhodecode.model.db import coalesce, or_, Repository, RepoGroup |
|
37 | 38 | from rhodecode.model.repo import RepoModel |
|
38 | 39 | from rhodecode.model.forms import RepoForkForm |
|
39 | 40 | from rhodecode.model.scm import ScmModel, RepoGroupList |
|
40 | 41 | from rhodecode.lib.utils2 import safe_int, safe_unicode |
|
41 | 42 | |
|
42 | 43 | log = logging.getLogger(__name__) |
|
43 | 44 | |
|
44 | 45 | |
|
45 | 46 | class RepoForksView(RepoAppView, DataGridAppView): |
|
46 | 47 | |
|
47 | 48 | def load_default_context(self): |
|
48 | 49 | c = self._get_local_tmpl_context(include_app_defaults=True) |
|
49 | 50 | c.rhodecode_repo = self.rhodecode_vcs_repo |
|
50 | 51 | |
|
51 | 52 | acl_groups = RepoGroupList( |
|
52 | 53 | RepoGroup.query().all(), |
|
53 | 54 | perm_set=['group.write', 'group.admin']) |
|
54 | 55 | c.repo_groups = RepoGroup.groups_choices(groups=acl_groups) |
|
55 | 56 | c.repo_groups_choices = map(lambda k: safe_unicode(k[0]), c.repo_groups) |
|
56 | 57 | choices, c.landing_revs = ScmModel().get_repo_landing_revs( |
|
57 | 58 | self.request.translate) |
|
58 | 59 | c.landing_revs_choices = choices |
|
59 | 60 | c.personal_repo_group = c.rhodecode_user.personal_repo_group |
|
60 | 61 | |
|
61 | 62 | return c |
|
62 | 63 | |
|
63 | 64 | @LoginRequired() |
|
64 | 65 | @HasRepoPermissionAnyDecorator( |
|
65 | 66 | 'repository.read', 'repository.write', 'repository.admin') |
|
66 | 67 | @view_config( |
|
67 | 68 | route_name='repo_forks_show_all', request_method='GET', |
|
68 | 69 | renderer='rhodecode:templates/forks/forks.mako') |
|
69 | 70 | def repo_forks_show_all(self): |
|
70 | 71 | c = self.load_default_context() |
|
71 | 72 | return self._get_template_context(c) |
|
72 | 73 | |
|
73 | 74 | @LoginRequired() |
|
74 | 75 | @HasRepoPermissionAnyDecorator( |
|
75 | 76 | 'repository.read', 'repository.write', 'repository.admin') |
|
76 | 77 | @view_config( |
|
77 | 78 | route_name='repo_forks_data', request_method='GET', |
|
78 | 79 | renderer='json_ext', xhr=True) |
|
79 | 80 | def repo_forks_data(self): |
|
80 | 81 | _ = self.request.translate |
|
81 | 82 | self.load_default_context() |
|
82 | 83 | column_map = { |
|
83 | 84 | 'fork_name': 'repo_name', |
|
84 | 85 | 'fork_date': 'created_on', |
|
85 | 86 | 'last_activity': 'updated_on' |
|
86 | 87 | } |
|
87 | 88 | draw, start, limit = self._extract_chunk(self.request) |
|
88 | 89 | search_q, order_by, order_dir = self._extract_ordering( |
|
89 | 90 | self.request, column_map=column_map) |
|
90 | 91 | |
|
91 | 92 | acl_check = HasRepoPermissionAny( |
|
92 | 93 | 'repository.read', 'repository.write', 'repository.admin') |
|
93 | 94 | repo_id = self.db_repo.repo_id |
|
94 | 95 | allowed_ids = [-1] |
|
95 | 96 | for f in Repository.query().filter(Repository.fork_id == repo_id): |
|
96 | 97 | if acl_check(f.repo_name, 'get forks check'): |
|
97 | 98 | allowed_ids.append(f.repo_id) |
|
98 | 99 | |
|
99 | 100 | forks_data_total_count = Repository.query()\ |
|
100 | 101 | .filter(Repository.fork_id == repo_id)\ |
|
101 | 102 | .filter(Repository.repo_id.in_(allowed_ids))\ |
|
102 | 103 | .count() |
|
103 | 104 | |
|
104 | 105 | # json generate |
|
105 | 106 | base_q = Repository.query()\ |
|
106 | 107 | .filter(Repository.fork_id == repo_id)\ |
|
107 | 108 | .filter(Repository.repo_id.in_(allowed_ids))\ |
|
108 | 109 | |
|
109 | 110 | if search_q: |
|
110 | 111 | like_expression = u'%{}%'.format(safe_unicode(search_q)) |
|
111 | 112 | base_q = base_q.filter(or_( |
|
112 | 113 | Repository.repo_name.ilike(like_expression), |
|
113 | 114 | Repository.description.ilike(like_expression), |
|
114 | 115 | )) |
|
115 | 116 | |
|
116 | 117 | forks_data_total_filtered_count = base_q.count() |
|
117 | 118 | |
|
118 | 119 | sort_col = getattr(Repository, order_by, None) |
|
119 | 120 | if sort_col: |
|
120 | 121 | if order_dir == 'asc': |
|
121 | 122 | # handle null values properly to order by NULL last |
|
122 | 123 | if order_by in ['last_activity']: |
|
123 | 124 | sort_col = coalesce(sort_col, datetime.date.max) |
|
124 | 125 | sort_col = sort_col.asc() |
|
125 | 126 | else: |
|
126 | 127 | # handle null values properly to order by NULL last |
|
127 | 128 | if order_by in ['last_activity']: |
|
128 | 129 | sort_col = coalesce(sort_col, datetime.date.min) |
|
129 | 130 | sort_col = sort_col.desc() |
|
130 | 131 | |
|
131 | 132 | base_q = base_q.order_by(sort_col) |
|
132 | 133 | base_q = base_q.offset(start).limit(limit) |
|
133 | 134 | |
|
134 | 135 | fork_list = base_q.all() |
|
135 | 136 | |
|
136 | 137 | def fork_actions(fork): |
|
137 | 138 | url_link = h.route_path( |
|
138 | 139 | 'repo_compare', |
|
139 | 140 | repo_name=fork.repo_name, |
|
140 | 141 | source_ref_type=self.db_repo.landing_rev[0], |
|
141 | 142 | source_ref=self.db_repo.landing_rev[1], |
|
142 | 143 | target_ref_type=self.db_repo.landing_rev[0], |
|
143 | 144 | target_ref=self.db_repo.landing_rev[1], |
|
144 | 145 | _query=dict(merge=1, target_repo=f.repo_name)) |
|
145 | 146 | return h.link_to(_('Compare fork'), url_link, class_='btn-link') |
|
146 | 147 | |
|
147 | 148 | def fork_name(fork): |
|
148 | 149 | return h.link_to(fork.repo_name, |
|
149 | 150 | h.route_path('repo_summary', repo_name=fork.repo_name)) |
|
150 | 151 | |
|
151 | 152 | forks_data = [] |
|
152 | 153 | for fork in fork_list: |
|
153 | 154 | forks_data.append({ |
|
154 | 155 | "username": h.gravatar_with_user(self.request, fork.user.username), |
|
155 | 156 | "fork_name": fork_name(fork), |
|
156 | 157 | "description": fork.description, |
|
157 | 158 | "fork_date": h.age_component(fork.created_on, time_is_local=True), |
|
158 | 159 | "last_activity": h.format_date(fork.updated_on), |
|
159 | 160 | "action": fork_actions(fork), |
|
160 | 161 | }) |
|
161 | 162 | |
|
162 | 163 | data = ({ |
|
163 | 164 | 'draw': draw, |
|
164 | 165 | 'data': forks_data, |
|
165 | 166 | 'recordsTotal': forks_data_total_count, |
|
166 | 167 | 'recordsFiltered': forks_data_total_filtered_count, |
|
167 | 168 | }) |
|
168 | 169 | |
|
169 | 170 | return data |
|
170 | 171 | |
|
171 | 172 | @LoginRequired() |
|
172 | 173 | @NotAnonymous() |
|
173 | 174 | @HasPermissionAnyDecorator('hg.admin', 'hg.fork.repository') |
|
174 | 175 | @HasRepoPermissionAnyDecorator( |
|
175 | 176 | 'repository.read', 'repository.write', 'repository.admin') |
|
176 | 177 | @view_config( |
|
177 | 178 | route_name='repo_fork_new', request_method='GET', |
|
178 | 179 | renderer='rhodecode:templates/forks/forks.mako') |
|
179 | 180 | def repo_fork_new(self): |
|
180 | 181 | c = self.load_default_context() |
|
181 | 182 | |
|
182 | 183 | defaults = RepoModel()._get_defaults(self.db_repo_name) |
|
183 | 184 | # alter the description to indicate a fork |
|
184 | 185 | defaults['description'] = ( |
|
185 | 186 | 'fork of repository: %s \n%s' % ( |
|
186 | 187 | defaults['repo_name'], defaults['description'])) |
|
187 | 188 | # add suffix to fork |
|
188 | 189 | defaults['repo_name'] = '%s-fork' % defaults['repo_name'] |
|
189 | 190 | |
|
190 | 191 | data = render('rhodecode:templates/forks/fork.mako', |
|
191 | 192 | self._get_template_context(c), self.request) |
|
192 | 193 | html = formencode.htmlfill.render( |
|
193 | 194 | data, |
|
194 | 195 | defaults=defaults, |
|
195 | 196 | encoding="UTF-8", |
|
196 | 197 | force_defaults=False |
|
197 | 198 | ) |
|
198 | 199 | return Response(html) |
|
199 | 200 | |
|
200 | 201 | @LoginRequired() |
|
201 | 202 | @NotAnonymous() |
|
202 | 203 | @HasPermissionAnyDecorator('hg.admin', 'hg.fork.repository') |
|
203 | 204 | @HasRepoPermissionAnyDecorator( |
|
204 | 205 | 'repository.read', 'repository.write', 'repository.admin') |
|
205 | 206 | @CSRFRequired() |
|
206 | 207 | @view_config( |
|
207 | 208 | route_name='repo_fork_create', request_method='POST', |
|
208 | 209 | renderer='rhodecode:templates/forks/fork.mako') |
|
209 | 210 | def repo_fork_create(self): |
|
210 | 211 | _ = self.request.translate |
|
211 | 212 | c = self.load_default_context() |
|
212 | 213 | |
|
213 | 214 | _form = RepoForkForm(self.request.translate, old_data={'repo_type': self.db_repo.repo_type}, |
|
214 | 215 | repo_groups=c.repo_groups_choices, |
|
215 | 216 | landing_revs=c.landing_revs_choices)() |
|
216 | 217 | post_data = dict(self.request.POST) |
|
217 | 218 | |
|
218 | 219 | # forbid injecting other repo by forging a request |
|
219 | 220 | post_data['fork_parent_id'] = self.db_repo.repo_id |
|
220 | 221 | |
|
221 | 222 | form_result = {} |
|
222 | 223 | task_id = None |
|
223 | 224 | try: |
|
224 | 225 | form_result = _form.to_python(post_data) |
|
225 | 226 | # create fork is done sometimes async on celery, db transaction |
|
226 | 227 | # management is handled there. |
|
227 | 228 | task = RepoModel().create_fork( |
|
228 | 229 | form_result, c.rhodecode_user.user_id) |
|
229 | from celery.result import BaseAsyncResult | |
|
230 | if isinstance(task, BaseAsyncResult): | |
|
231 | task_id = task.task_id | |
|
230 | ||
|
231 | task_id = get_task_id(task) | |
|
232 | 232 | except formencode.Invalid as errors: |
|
233 | 233 | c.rhodecode_db_repo = self.db_repo |
|
234 | 234 | |
|
235 | 235 | data = render('rhodecode:templates/forks/fork.mako', |
|
236 | 236 | self._get_template_context(c), self.request) |
|
237 | 237 | html = formencode.htmlfill.render( |
|
238 | 238 | data, |
|
239 | 239 | defaults=errors.value, |
|
240 | 240 | errors=errors.error_dict or {}, |
|
241 | 241 | prefix_error=False, |
|
242 | 242 | encoding="UTF-8", |
|
243 | 243 | force_defaults=False |
|
244 | 244 | ) |
|
245 | 245 | return Response(html) |
|
246 | 246 | except Exception: |
|
247 | 247 | log.exception( |
|
248 | 248 | u'Exception while trying to fork the repository %s', |
|
249 | 249 | self.db_repo_name) |
|
250 | 250 | msg = ( |
|
251 | 251 | _('An error occurred during repository forking %s') % ( |
|
252 | 252 | self.db_repo_name, )) |
|
253 | 253 | h.flash(msg, category='error') |
|
254 | 254 | |
|
255 | 255 | repo_name = form_result.get('repo_name_full', self.db_repo_name) |
|
256 | 256 | raise HTTPFound( |
|
257 | 257 | h.route_path('repo_creating', |
|
258 | 258 | repo_name=repo_name, |
|
259 | 259 | _query=dict(task_id=task_id))) |
@@ -1,106 +1,90 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | import os |
|
23 | 23 | import logging |
|
24 | 24 | import rhodecode |
|
25 | 25 | |
|
26 | # ------------------------------------------------------------------------------ | |
|
27 | # CELERY magic until refactor - issue #4163 - import order matters here: | |
|
28 | #from rhodecode.lib import celerypylons # this must be first, celerypylons | |
|
29 | # sets config settings upon import | |
|
30 | ||
|
31 | import rhodecode.integrations # any modules using celery task | |
|
32 | # decorators should be added afterwards: | |
|
33 | # ------------------------------------------------------------------------------ | |
|
34 | 26 | |
|
35 | 27 | from rhodecode.config import utils |
|
36 | 28 | |
|
37 | 29 | from rhodecode.lib.utils import load_rcextensions |
|
38 | 30 | from rhodecode.lib.utils2 import str2bool |
|
39 | 31 | from rhodecode.lib.vcs import connect_vcs, start_vcs_server |
|
40 | 32 | |
|
41 | 33 | log = logging.getLogger(__name__) |
|
42 | 34 | |
|
43 | 35 | |
|
44 | 36 | def load_pyramid_environment(global_config, settings): |
|
45 | 37 | # Some parts of the code expect a merge of global and app settings. |
|
46 | 38 | settings_merged = global_config.copy() |
|
47 | 39 | settings_merged.update(settings) |
|
48 | 40 | |
|
49 | 41 | # TODO(marcink): probably not required anymore |
|
50 | 42 | # configure channelstream, |
|
51 | 43 | settings_merged['channelstream_config'] = { |
|
52 | 44 | 'enabled': str2bool(settings_merged.get('channelstream.enabled', False)), |
|
53 | 45 | 'server': settings_merged.get('channelstream.server'), |
|
54 | 46 | 'secret': settings_merged.get('channelstream.secret') |
|
55 | 47 | } |
|
56 | 48 | |
|
57 | ||
|
58 | # TODO(marcink): celery | |
|
59 | # # store some globals into rhodecode | |
|
60 | # rhodecode.CELERY_ENABLED = str2bool(config['app_conf'].get('use_celery')) | |
|
61 | # rhodecode.CELERY_EAGER = str2bool( | |
|
62 | # config['app_conf'].get('celery.always.eager')) | |
|
63 | ||
|
64 | ||
|
65 | 49 | # If this is a test run we prepare the test environment like |
|
66 | 50 | # creating a test database, test search index and test repositories. |
|
67 | 51 | # This has to be done before the database connection is initialized. |
|
68 | 52 | if settings['is_test']: |
|
69 | 53 | rhodecode.is_test = True |
|
70 | 54 | rhodecode.disable_error_handler = True |
|
71 | 55 | |
|
72 | 56 | utils.initialize_test_environment(settings_merged) |
|
73 | 57 | |
|
74 | 58 | # Initialize the database connection. |
|
75 | 59 | utils.initialize_database(settings_merged) |
|
76 | 60 | |
|
77 | 61 | load_rcextensions(root_path=settings_merged['here']) |
|
78 | 62 | |
|
79 | 63 | # Limit backends to `vcs.backends` from configuration |
|
80 | 64 | for alias in rhodecode.BACKENDS.keys(): |
|
81 | 65 | if alias not in settings['vcs.backends']: |
|
82 | 66 | del rhodecode.BACKENDS[alias] |
|
83 | 67 | log.info('Enabled VCS backends: %s', rhodecode.BACKENDS.keys()) |
|
84 | 68 | |
|
85 | 69 | # initialize vcs client and optionally run the server if enabled |
|
86 | 70 | vcs_server_uri = settings['vcs.server'] |
|
87 | 71 | vcs_server_enabled = settings['vcs.server.enable'] |
|
88 | 72 | start_server = ( |
|
89 | 73 | settings['vcs.start_server'] and |
|
90 | 74 | not int(os.environ.get('RC_VCSSERVER_TEST_DISABLE', '0'))) |
|
91 | 75 | |
|
92 | 76 | if vcs_server_enabled and start_server: |
|
93 | 77 | log.info("Starting vcsserver") |
|
94 | 78 | start_vcs_server(server_and_port=vcs_server_uri, |
|
95 | 79 | protocol=utils.get_vcs_server_protocol(settings), |
|
96 | 80 | log_level=settings['vcs.server.log_level']) |
|
97 | 81 | |
|
98 | 82 | utils.configure_vcs(settings) |
|
99 | 83 | |
|
100 | 84 | # Store the settings to make them available to other modules. |
|
101 | 85 | |
|
102 | 86 | rhodecode.PYRAMID_SETTINGS = settings_merged |
|
103 | 87 | rhodecode.CONFIG = settings_merged |
|
104 | 88 | |
|
105 | 89 | if vcs_server_enabled: |
|
106 | 90 | connect_vcs(vcs_server_uri, utils.get_vcs_server_protocol(settings)) |
@@ -1,379 +1,379 b'' | |||
|
1 | 1 | { |
|
2 | 2 | "libnghttp2-1.7.1": { |
|
3 | 3 | "MIT License": "http://spdx.org/licenses/MIT" |
|
4 | 4 | }, |
|
5 | 5 | "nodejs-4.3.1": { |
|
6 | 6 | "MIT License": "http://spdx.org/licenses/MIT" |
|
7 | 7 | }, |
|
8 | 8 | "python-2.7.12": { |
|
9 | 9 | "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0" |
|
10 | 10 | }, |
|
11 | 11 | "python2.7-Babel-1.3": { |
|
12 | 12 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
13 | 13 | }, |
|
14 | 14 | "python2.7-Beaker-1.7.0": { |
|
15 | 15 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
16 | 16 | }, |
|
17 | 17 | "python2.7-Chameleon-2.24": { |
|
18 | 18 | "BSD-like": "http://repoze.org/license.html" |
|
19 | 19 | }, |
|
20 | 20 | "python2.7-FormEncode-1.2.4": { |
|
21 | 21 | "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0" |
|
22 | 22 | }, |
|
23 | 23 | "python2.7-Jinja2-2.7.3": { |
|
24 | 24 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
25 | 25 | }, |
|
26 | 26 | "python2.7-Mako-1.0.6": { |
|
27 | 27 | "MIT License": "http://spdx.org/licenses/MIT" |
|
28 | 28 | }, |
|
29 | 29 | "python2.7-Markdown-2.6.7": { |
|
30 | 30 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
31 | 31 | }, |
|
32 | 32 | "python2.7-MarkupSafe-0.23": { |
|
33 | 33 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
34 | 34 | }, |
|
35 | 35 | "python2.7-Paste-2.0.3": { |
|
36 | 36 | "MIT License": "http://spdx.org/licenses/MIT" |
|
37 | 37 | }, |
|
38 | 38 | "python2.7-PasteDeploy-1.5.2": { |
|
39 | 39 | "MIT License": "http://spdx.org/licenses/MIT" |
|
40 | 40 | }, |
|
41 | 41 | "python2.7-PasteScript-1.7.5": { |
|
42 | 42 | "MIT License": "http://spdx.org/licenses/MIT" |
|
43 | 43 | }, |
|
44 | 44 | "python2.7-Pygments-2.2.0": { |
|
45 | 45 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
46 | 46 | }, |
|
47 | 47 | "python2.7-Routes-1.13": { |
|
48 | 48 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
49 | 49 | }, |
|
50 | 50 | "python2.7-SQLAlchemy-0.9.9": { |
|
51 | 51 | "MIT License": "http://spdx.org/licenses/MIT" |
|
52 | 52 | }, |
|
53 | 53 | "python2.7-Tempita-0.5.2": { |
|
54 | 54 | "MIT License": "http://spdx.org/licenses/MIT" |
|
55 | 55 | }, |
|
56 | 56 | "python2.7-URLObject-2.4.0": { |
|
57 | 57 | "The Unlicense": "http://unlicense.org/" |
|
58 | 58 | }, |
|
59 | 59 | "python2.7-WebError-0.10.3": { |
|
60 | 60 | "MIT License": "http://spdx.org/licenses/MIT" |
|
61 | 61 | }, |
|
62 | 62 | "python2.7-WebHelpers-1.3": { |
|
63 | 63 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
64 | 64 | }, |
|
65 | 65 | "python2.7-WebHelpers2-2.0": { |
|
66 | 66 | "MIT License": "http://spdx.org/licenses/MIT" |
|
67 | 67 | }, |
|
68 | 68 | "python2.7-WebOb-1.3.1": { |
|
69 | 69 | "MIT License": "http://spdx.org/licenses/MIT" |
|
70 | 70 | }, |
|
71 | 71 | "python2.7-Whoosh-2.7.4": { |
|
72 | 72 | "BSD 2-clause \"Simplified\" License": "http://spdx.org/licenses/BSD-2-Clause", |
|
73 | 73 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
74 | 74 | }, |
|
75 | 75 | "python2.7-alembic-0.8.4": { |
|
76 | 76 | "MIT License": "http://spdx.org/licenses/MIT" |
|
77 | 77 | }, |
|
78 | 78 | "python2.7-amqplib-1.0.2": { |
|
79 | 79 | "GNU Lesser General Public License v3.0 only": "http://spdx.org/licenses/LGPL-3.0" |
|
80 | 80 | }, |
|
81 | 81 | "python2.7-appenlight-client-0.6.14": { |
|
82 | 82 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
83 | 83 | }, |
|
84 | 84 | "python2.7-authomatic-0.1.0.post1": { |
|
85 | 85 | "MIT License": "http://spdx.org/licenses/MIT" |
|
86 | 86 | }, |
|
87 | 87 | "python2.7-backports.shutil-get-terminal-size-1.0.0": { |
|
88 | 88 | "MIT License": "http://spdx.org/licenses/MIT" |
|
89 | 89 | }, |
|
90 | 90 | "python2.7-bleach-1.5.0": { |
|
91 | 91 | "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0" |
|
92 | 92 | }, |
|
93 | 93 | "python2.7-celery-2.2.10": { |
|
94 | 94 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
95 | 95 | }, |
|
96 | 96 | "python2.7-channelstream-0.5.2": { |
|
97 | 97 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
98 | 98 | }, |
|
99 | 99 | "python2.7-click-5.1": { |
|
100 | 100 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
101 | 101 | }, |
|
102 | 102 | "python2.7-colander-1.2": { |
|
103 | 103 | "Repoze License": "http://www.repoze.org/LICENSE.txt" |
|
104 | 104 | }, |
|
105 | 105 | "python2.7-configobj-5.0.6": { |
|
106 | 106 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
107 | 107 | }, |
|
108 | 108 | "python2.7-configparser-3.5.0": { |
|
109 | 109 | "MIT License": "http://spdx.org/licenses/MIT" |
|
110 | 110 | }, |
|
111 | 111 | "python2.7-cssselect-1.0.1": { |
|
112 | 112 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
113 | 113 | }, |
|
114 | 114 | "python2.7-decorator-4.0.11": { |
|
115 | 115 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
116 | 116 | }, |
|
117 | 117 | "python2.7-deform-2.0a2": { |
|
118 | 118 | "BSD-derived": "http://www.repoze.org/LICENSE.txt" |
|
119 | 119 | }, |
|
120 | 120 | "python2.7-docutils-0.12": { |
|
121 | 121 | "BSD 2-clause \"Simplified\" License": "http://spdx.org/licenses/BSD-2-Clause" |
|
122 | 122 | }, |
|
123 | 123 | "python2.7-dogpile.cache-0.6.1": { |
|
124 | 124 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
125 | 125 | }, |
|
126 | 126 | "python2.7-dogpile.core-0.4.1": { |
|
127 | 127 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
128 | 128 | }, |
|
129 | 129 | "python2.7-elasticsearch-2.3.0": { |
|
130 | 130 | "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0" |
|
131 | 131 | }, |
|
132 | 132 | "python2.7-elasticsearch-dsl-2.2.0": { |
|
133 | 133 | "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0" |
|
134 | 134 | }, |
|
135 | 135 | "python2.7-entrypoints-0.2.2": { |
|
136 | 136 | "MIT License": "http://spdx.org/licenses/MIT" |
|
137 | 137 | }, |
|
138 | 138 | "python2.7-enum34-1.1.6": { |
|
139 | 139 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
140 | 140 | }, |
|
141 | 141 | "python2.7-functools32-3.2.3.post2": { |
|
142 | 142 | "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0" |
|
143 | 143 | }, |
|
144 | 144 | "python2.7-future-0.14.3": { |
|
145 | 145 | "MIT License": "http://spdx.org/licenses/MIT" |
|
146 | 146 | }, |
|
147 | 147 | "python2.7-futures-3.0.2": { |
|
148 | 148 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
149 | 149 | }, |
|
150 | 150 | "python2.7-gevent-1.1.2": { |
|
151 | 151 | "MIT License": "http://spdx.org/licenses/MIT" |
|
152 | 152 | }, |
|
153 | 153 | "python2.7-gnureadline-6.3.3": { |
|
154 | 154 | "GNU General Public License v1.0 only": "http://spdx.org/licenses/GPL-1.0" |
|
155 | 155 | }, |
|
156 | 156 | "python2.7-gprof2dot-2016.10.13": { |
|
157 | 157 | "GNU Lesser General Public License v3.0 or later": "http://spdx.org/licenses/LGPL-3.0+" |
|
158 | 158 | }, |
|
159 | 159 | "python2.7-greenlet-0.4.10": { |
|
160 | 160 | "MIT License": "http://spdx.org/licenses/MIT" |
|
161 | 161 | }, |
|
162 | 162 | "python2.7-gunicorn-19.6.0": { |
|
163 | 163 | "MIT License": "http://spdx.org/licenses/MIT" |
|
164 | 164 | }, |
|
165 | 165 | "python2.7-html5lib-0.9999999": { |
|
166 | 166 | "MIT License": "http://spdx.org/licenses/MIT" |
|
167 | 167 | }, |
|
168 | 168 | "python2.7-infrae.cache-1.0.1": { |
|
169 | 169 | "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1" |
|
170 | 170 | }, |
|
171 | 171 | "python2.7-ipython-5.1.0": { |
|
172 | 172 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
173 | 173 | }, |
|
174 | 174 | "python2.7-ipython-genutils-0.2.0": { |
|
175 | 175 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
176 | 176 | }, |
|
177 | 177 | "python2.7-iso8601-0.1.11": { |
|
178 | 178 | "MIT License": "http://spdx.org/licenses/MIT" |
|
179 | 179 | }, |
|
180 | 180 | "python2.7-itsdangerous-0.24": { |
|
181 | 181 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
182 | 182 | }, |
|
183 | 183 | "python2.7-jsonschema-2.6.0": { |
|
184 | 184 | "MIT License": "http://spdx.org/licenses/MIT" |
|
185 | 185 | }, |
|
186 | 186 | "python2.7-jupyter-client-5.0.0": { |
|
187 | 187 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
188 | 188 | }, |
|
189 | 189 | "python2.7-jupyter-core-4.3.0": { |
|
190 | 190 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
191 | 191 | }, |
|
192 |
"python2.7-kombu- |
|
|
192 | "python2.7-kombu-4.1.0": { | |
|
193 | 193 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
194 | 194 | }, |
|
195 | 195 | "python2.7-mistune-0.7.4": { |
|
196 | 196 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
197 | 197 | }, |
|
198 | 198 | "python2.7-msgpack-python-0.4.8": { |
|
199 | 199 | "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0" |
|
200 | 200 | }, |
|
201 | 201 | "python2.7-nbconvert-5.1.1": { |
|
202 | 202 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
203 | 203 | }, |
|
204 | 204 | "python2.7-nbformat-4.3.0": { |
|
205 | 205 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
206 | 206 | }, |
|
207 | 207 | "python2.7-packaging-15.2": { |
|
208 | 208 | "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0" |
|
209 | 209 | }, |
|
210 | 210 | "python2.7-pandocfilters-1.4.1": { |
|
211 | 211 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
212 | 212 | }, |
|
213 | 213 | "python2.7-pathlib2-2.1.0": { |
|
214 | 214 | "MIT License": "http://spdx.org/licenses/MIT" |
|
215 | 215 | }, |
|
216 | 216 | "python2.7-peppercorn-0.5": { |
|
217 | 217 | "BSD-derived": "http://www.repoze.org/LICENSE.txt" |
|
218 | 218 | }, |
|
219 | 219 | "python2.7-pexpect-4.2.1": { |
|
220 | 220 | "ISC License": "http://spdx.org/licenses/ISC" |
|
221 | 221 | }, |
|
222 | 222 | "python2.7-pickleshare-0.7.4": { |
|
223 | 223 | "MIT License": "http://spdx.org/licenses/MIT" |
|
224 | 224 | }, |
|
225 | 225 | "python2.7-prompt-toolkit-1.0.14": { |
|
226 | 226 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
227 | 227 | }, |
|
228 | 228 | "python2.7-psutil-4.3.1": { |
|
229 | 229 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
230 | 230 | }, |
|
231 | 231 | "python2.7-psycopg2-2.6.1": { |
|
232 | 232 | "GNU Lesser General Public License v3.0 or later": "http://spdx.org/licenses/LGPL-3.0+" |
|
233 | 233 | }, |
|
234 | 234 | "python2.7-ptyprocess-0.5.1": { |
|
235 | 235 | "ISC License": "http://opensource.org/licenses/ISC" |
|
236 | 236 | }, |
|
237 | 237 | "python2.7-py-1.4.31": { |
|
238 | 238 | "MIT License": "http://spdx.org/licenses/MIT" |
|
239 | 239 | }, |
|
240 | 240 | "python2.7-py-bcrypt-0.4": { |
|
241 | 241 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
242 | 242 | }, |
|
243 | 243 | "python2.7-py-gfm-0.1.3.rhodecode-upstream1": { |
|
244 | 244 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
245 | 245 | }, |
|
246 | 246 | "python2.7-pycrypto-2.6.1": { |
|
247 | 247 | "Public Domain": null |
|
248 | 248 | }, |
|
249 | 249 | "python2.7-pycurl-7.19.5": { |
|
250 | 250 | "MIT License": "http://spdx.org/licenses/MIT" |
|
251 | 251 | }, |
|
252 | 252 | "python2.7-pygments-markdown-lexer-0.1.0.dev39": { |
|
253 | 253 | "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0" |
|
254 | 254 | }, |
|
255 | 255 | "python2.7-pyparsing-1.5.7": { |
|
256 | 256 | "MIT License": "http://spdx.org/licenses/MIT" |
|
257 | 257 | }, |
|
258 | 258 | "python2.7-pyramid-1.7.4": { |
|
259 | 259 | "Repoze License": "http://www.repoze.org/LICENSE.txt" |
|
260 | 260 | }, |
|
261 | 261 | "python2.7-pyramid-beaker-0.8": { |
|
262 | 262 | "Repoze License": "http://www.repoze.org/LICENSE.txt" |
|
263 | 263 | }, |
|
264 | 264 | "python2.7-pyramid-debugtoolbar-3.0.5": { |
|
265 | 265 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause", |
|
266 | 266 | "Repoze License": "http://www.repoze.org/LICENSE.txt" |
|
267 | 267 | }, |
|
268 | 268 | "python2.7-pyramid-jinja2-2.5": { |
|
269 | 269 | "BSD-derived": "http://www.repoze.org/LICENSE.txt" |
|
270 | 270 | }, |
|
271 | 271 | "python2.7-pyramid-mako-1.0.2": { |
|
272 | 272 | "Repoze License": "http://www.repoze.org/LICENSE.txt" |
|
273 | 273 | }, |
|
274 | 274 | "python2.7-pysqlite-2.6.3": { |
|
275 | 275 | "libpng License": "http://spdx.org/licenses/Libpng", |
|
276 | 276 | "zlib License": "http://spdx.org/licenses/Zlib" |
|
277 | 277 | }, |
|
278 | 278 | "python2.7-pytest-3.0.5": { |
|
279 | 279 | "MIT License": "http://spdx.org/licenses/MIT" |
|
280 | 280 | }, |
|
281 | 281 | "python2.7-pytest-profiling-1.2.2": { |
|
282 | 282 | "MIT License": "http://spdx.org/licenses/MIT" |
|
283 | 283 | }, |
|
284 | 284 | "python2.7-pytest-runner-2.9": { |
|
285 | 285 | "MIT License": "http://spdx.org/licenses/MIT" |
|
286 | 286 | }, |
|
287 | 287 | "python2.7-pytest-sugar-0.7.1": { |
|
288 | 288 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
289 | 289 | }, |
|
290 | 290 | "python2.7-pytest-timeout-1.2.0": { |
|
291 | 291 | "MIT License": "http://spdx.org/licenses/MIT" |
|
292 | 292 | }, |
|
293 | 293 | "python2.7-python-dateutil-2.1": { |
|
294 | 294 | "Simplified BSD": null |
|
295 | 295 | }, |
|
296 | 296 | "python2.7-python-editor-1.0.3": { |
|
297 | 297 | "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0" |
|
298 | 298 | }, |
|
299 | 299 | "python2.7-python-ldap-2.4.19": { |
|
300 | 300 | "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0" |
|
301 | 301 | }, |
|
302 | 302 | "python2.7-python-memcached-1.57": { |
|
303 | 303 | "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0" |
|
304 | 304 | }, |
|
305 | 305 | "python2.7-pytz-2015.4": { |
|
306 | 306 | "MIT License": "http://spdx.org/licenses/MIT" |
|
307 | 307 | }, |
|
308 | 308 | "python2.7-pyzmq-14.6.0": { |
|
309 | 309 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
310 | 310 | }, |
|
311 | 311 | "python2.7-recaptcha-client-1.0.6": { |
|
312 | 312 | "MIT License": "http://spdx.org/licenses/MIT" |
|
313 | 313 | }, |
|
314 | 314 | "python2.7-repoze.lru-0.6": { |
|
315 | 315 | "Repoze License": "http://www.repoze.org/LICENSE.txt" |
|
316 | 316 | }, |
|
317 | 317 | "python2.7-requests-2.9.1": { |
|
318 | 318 | "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0" |
|
319 | 319 | }, |
|
320 | 320 | "python2.7-setuptools-19.4": { |
|
321 | 321 | "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0", |
|
322 | 322 | "Zope Public License 2.0": "http://spdx.org/licenses/ZPL-2.0" |
|
323 | 323 | }, |
|
324 | 324 | "python2.7-setuptools-scm-1.15.6": { |
|
325 | 325 | "MIT License": "http://spdx.org/licenses/MIT" |
|
326 | 326 | }, |
|
327 | 327 | "python2.7-simplegeneric-0.8.1": { |
|
328 | 328 | "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1" |
|
329 | 329 | }, |
|
330 | 330 | "python2.7-simplejson-3.7.2": { |
|
331 | 331 | "MIT License": "http://spdx.org/licenses/MIT" |
|
332 | 332 | }, |
|
333 | 333 | "python2.7-six-1.9.0": { |
|
334 | 334 | "MIT License": "http://spdx.org/licenses/MIT" |
|
335 | 335 | }, |
|
336 | 336 | "python2.7-subprocess32-3.2.6": { |
|
337 | 337 | "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0" |
|
338 | 338 | }, |
|
339 | 339 | "python2.7-termcolor-1.1.0": { |
|
340 | 340 | "MIT License": "http://spdx.org/licenses/MIT" |
|
341 | 341 | }, |
|
342 | 342 | "python2.7-testpath-0.1": { |
|
343 | 343 | "MIT License": "http://spdx.org/licenses/MIT" |
|
344 | 344 | }, |
|
345 | 345 | "python2.7-traitlets-4.3.2": { |
|
346 | 346 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
347 | 347 | }, |
|
348 | 348 | "python2.7-translationstring-1.3": { |
|
349 | 349 | "Repoze License": "http://www.repoze.org/LICENSE.txt" |
|
350 | 350 | }, |
|
351 | 351 | "python2.7-urllib3-1.16": { |
|
352 | 352 | "MIT License": "http://spdx.org/licenses/MIT" |
|
353 | 353 | }, |
|
354 | 354 | "python2.7-venusian-1.0": { |
|
355 | 355 | "Repoze License": "http://www.repoze.org/LICENSE.txt" |
|
356 | 356 | }, |
|
357 | 357 | "python2.7-waitress-1.0.1": { |
|
358 | 358 | "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1" |
|
359 | 359 | }, |
|
360 | 360 | "python2.7-wcwidth-0.1.7": { |
|
361 | 361 | "MIT License": "http://spdx.org/licenses/MIT" |
|
362 | 362 | }, |
|
363 | 363 | "python2.7-ws4py-0.3.5": { |
|
364 | 364 | "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause" |
|
365 | 365 | }, |
|
366 | 366 | "python2.7-zope.cachedescriptors-4.0.0": { |
|
367 | 367 | "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1" |
|
368 | 368 | }, |
|
369 | 369 | "python2.7-zope.deprecation-4.1.2": { |
|
370 | 370 | "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1" |
|
371 | 371 | }, |
|
372 | 372 | "python2.7-zope.interface-4.1.3": { |
|
373 | 373 | "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1" |
|
374 | 374 | }, |
|
375 | 375 | "xz-5.2.2": { |
|
376 | 376 | "GNU General Public License v2.0 or later": "http://spdx.org/licenses/GPL-2.0+", |
|
377 | 377 | "GNU Library General Public License v2.1 or later": "http://spdx.org/licenses/LGPL-2.1+" |
|
378 | 378 | } |
|
379 | 379 | } No newline at end of file |
@@ -1,431 +1,436 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import logging |
|
22 | 22 | import traceback |
|
23 | 23 | import collections |
|
24 | 24 | |
|
25 | 25 | from paste.gzipper import make_gzip_middleware |
|
26 | 26 | from pyramid.wsgi import wsgiapp |
|
27 | 27 | from pyramid.authorization import ACLAuthorizationPolicy |
|
28 | 28 | from pyramid.config import Configurator |
|
29 | 29 | from pyramid.settings import asbool, aslist |
|
30 | 30 | from pyramid.httpexceptions import ( |
|
31 | 31 | HTTPException, HTTPError, HTTPInternalServerError, HTTPFound, HTTPNotFound) |
|
32 | 32 | from pyramid.events import ApplicationCreated |
|
33 | 33 | from pyramid.renderers import render_to_response |
|
34 | 34 | |
|
35 | 35 | from rhodecode.model import meta |
|
36 | 36 | from rhodecode.config import patches |
|
37 | 37 | from rhodecode.config import utils as config_utils |
|
38 | 38 | from rhodecode.config.environment import load_pyramid_environment |
|
39 | 39 | |
|
40 | 40 | from rhodecode.lib.middleware.vcs import VCSMiddleware |
|
41 | 41 | from rhodecode.lib.vcs import VCSCommunicationError |
|
42 | 42 | from rhodecode.lib.exceptions import VCSServerUnavailable |
|
43 | 43 | from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled |
|
44 | 44 | from rhodecode.lib.middleware.https_fixup import HttpsFixup |
|
45 | from rhodecode.lib.celerylib.loader import configure_celery | |
|
45 | 46 | from rhodecode.lib.plugins.utils import register_rhodecode_plugin |
|
46 | 47 | from rhodecode.lib.utils2 import aslist as rhodecode_aslist, AttributeDict |
|
47 | 48 | from rhodecode.subscribers import ( |
|
48 | 49 | scan_repositories_if_enabled, write_js_routes_if_enabled, |
|
49 | 50 | write_metadata_if_needed, inject_app_settings) |
|
50 | 51 | |
|
51 | 52 | |
|
52 | 53 | log = logging.getLogger(__name__) |
|
53 | 54 | |
|
54 | 55 | |
|
55 | 56 | def is_http_error(response): |
|
56 | 57 | # error which should have traceback |
|
57 | 58 | return response.status_code > 499 |
|
58 | 59 | |
|
59 | 60 | |
|
60 | 61 | def make_pyramid_app(global_config, **settings): |
|
61 | 62 | """ |
|
62 | 63 | Constructs the WSGI application based on Pyramid. |
|
63 | 64 | |
|
64 | 65 | Specials: |
|
65 | 66 | |
|
66 | 67 | * The application can also be integrated like a plugin via the call to |
|
67 | 68 | `includeme`. This is accompanied with the other utility functions which |
|
68 | 69 | are called. Changing this should be done with great care to not break |
|
69 | 70 | cases when these fragments are assembled from another place. |
|
70 | 71 | |
|
71 | 72 | """ |
|
72 | 73 | sanitize_settings_and_apply_defaults(settings) |
|
73 | 74 | |
|
74 | 75 | config = Configurator(settings=settings) |
|
75 | 76 | |
|
76 | 77 | # Apply compatibility patches |
|
77 | 78 | patches.inspect_getargspec() |
|
78 | 79 | |
|
79 | 80 | load_pyramid_environment(global_config, settings) |
|
80 | 81 | |
|
81 | 82 | # Static file view comes first |
|
82 | 83 | includeme_first(config) |
|
83 | 84 | |
|
84 | 85 | includeme(config) |
|
85 | 86 | |
|
86 | 87 | pyramid_app = config.make_wsgi_app() |
|
87 | 88 | pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config) |
|
88 | 89 | pyramid_app.config = config |
|
89 | 90 | |
|
91 | config.configure_celery(global_config['__file__']) | |
|
90 | 92 | # creating the app uses a connection - return it after we are done |
|
91 | 93 | meta.Session.remove() |
|
92 | 94 | |
|
95 | log.info('Pyramid app %s created and configured.', pyramid_app) | |
|
93 | 96 | return pyramid_app |
|
94 | 97 | |
|
95 | 98 | |
|
96 | 99 | def not_found_view(request): |
|
97 | 100 | """ |
|
98 | 101 | This creates the view which should be registered as not-found-view to |
|
99 | 102 | pyramid. |
|
100 | 103 | """ |
|
101 | 104 | |
|
102 | 105 | if not getattr(request, 'vcs_call', None): |
|
103 | 106 | # handle like regular case with our error_handler |
|
104 | 107 | return error_handler(HTTPNotFound(), request) |
|
105 | 108 | |
|
106 | 109 | # handle not found view as a vcs call |
|
107 | 110 | settings = request.registry.settings |
|
108 | 111 | ae_client = getattr(request, 'ae_client', None) |
|
109 | 112 | vcs_app = VCSMiddleware( |
|
110 | 113 | HTTPNotFound(), request.registry, settings, |
|
111 | 114 | appenlight_client=ae_client) |
|
112 | 115 | |
|
113 | 116 | return wsgiapp(vcs_app)(None, request) |
|
114 | 117 | |
|
115 | 118 | |
|
116 | 119 | def error_handler(exception, request): |
|
117 | 120 | import rhodecode |
|
118 | 121 | from rhodecode.lib import helpers |
|
119 | 122 | |
|
120 | 123 | rhodecode_title = rhodecode.CONFIG.get('rhodecode_title') or 'RhodeCode' |
|
121 | 124 | |
|
122 | 125 | base_response = HTTPInternalServerError() |
|
123 | 126 | # prefer original exception for the response since it may have headers set |
|
124 | 127 | if isinstance(exception, HTTPException): |
|
125 | 128 | base_response = exception |
|
126 | 129 | elif isinstance(exception, VCSCommunicationError): |
|
127 | 130 | base_response = VCSServerUnavailable() |
|
128 | 131 | |
|
129 | 132 | if is_http_error(base_response): |
|
130 | 133 | log.exception( |
|
131 | 134 | 'error occurred handling this request for path: %s', request.path) |
|
132 | 135 | |
|
133 | 136 | error_explanation = base_response.explanation or str(base_response) |
|
134 | 137 | if base_response.status_code == 404: |
|
135 | 138 | error_explanation += " Or you don't have permission to access it." |
|
136 | 139 | c = AttributeDict() |
|
137 | 140 | c.error_message = base_response.status |
|
138 | 141 | c.error_explanation = error_explanation |
|
139 | 142 | c.visual = AttributeDict() |
|
140 | 143 | |
|
141 | 144 | c.visual.rhodecode_support_url = ( |
|
142 | 145 | request.registry.settings.get('rhodecode_support_url') or |
|
143 | 146 | request.route_url('rhodecode_support') |
|
144 | 147 | ) |
|
145 | 148 | c.redirect_time = 0 |
|
146 | 149 | c.rhodecode_name = rhodecode_title |
|
147 | 150 | if not c.rhodecode_name: |
|
148 | 151 | c.rhodecode_name = 'Rhodecode' |
|
149 | 152 | |
|
150 | 153 | c.causes = [] |
|
151 | 154 | if is_http_error(base_response): |
|
152 | 155 | c.causes.append('Server is overloaded.') |
|
153 | 156 | c.causes.append('Server database connection is lost.') |
|
154 | 157 | c.causes.append('Server expected unhandled error.') |
|
155 | 158 | |
|
156 | 159 | if hasattr(base_response, 'causes'): |
|
157 | 160 | c.causes = base_response.causes |
|
158 | 161 | |
|
159 | 162 | c.messages = helpers.flash.pop_messages(request=request) |
|
160 | 163 | c.traceback = traceback.format_exc() |
|
161 | 164 | response = render_to_response( |
|
162 | 165 | '/errors/error_document.mako', {'c': c, 'h': helpers}, request=request, |
|
163 | 166 | response=base_response) |
|
164 | 167 | |
|
165 | 168 | return response |
|
166 | 169 | |
|
167 | 170 | |
|
168 | 171 | def includeme_first(config): |
|
169 | 172 | # redirect automatic browser favicon.ico requests to correct place |
|
170 | 173 | def favicon_redirect(context, request): |
|
171 | 174 | return HTTPFound( |
|
172 | 175 | request.static_path('rhodecode:public/images/favicon.ico')) |
|
173 | 176 | |
|
174 | 177 | config.add_view(favicon_redirect, route_name='favicon') |
|
175 | 178 | config.add_route('favicon', '/favicon.ico') |
|
176 | 179 | |
|
177 | 180 | def robots_redirect(context, request): |
|
178 | 181 | return HTTPFound( |
|
179 | 182 | request.static_path('rhodecode:public/robots.txt')) |
|
180 | 183 | |
|
181 | 184 | config.add_view(robots_redirect, route_name='robots') |
|
182 | 185 | config.add_route('robots', '/robots.txt') |
|
183 | 186 | |
|
184 | 187 | config.add_static_view( |
|
185 | 188 | '_static/deform', 'deform:static') |
|
186 | 189 | config.add_static_view( |
|
187 | 190 | '_static/rhodecode', path='rhodecode:public', cache_max_age=3600 * 24) |
|
188 | 191 | |
|
189 | 192 | |
|
190 | 193 | def includeme(config): |
|
191 | 194 | settings = config.registry.settings |
|
192 | 195 | |
|
193 | 196 | # plugin information |
|
194 | 197 | config.registry.rhodecode_plugins = collections.OrderedDict() |
|
195 | 198 | |
|
196 | 199 | config.add_directive( |
|
197 | 200 | 'register_rhodecode_plugin', register_rhodecode_plugin) |
|
198 | 201 | |
|
202 | config.add_directive('configure_celery', configure_celery) | |
|
203 | ||
|
199 | 204 | if asbool(settings.get('appenlight', 'false')): |
|
200 | 205 | config.include('appenlight_client.ext.pyramid_tween') |
|
201 | 206 | |
|
202 | 207 | # Includes which are required. The application would fail without them. |
|
203 | 208 | config.include('pyramid_mako') |
|
204 | 209 | config.include('pyramid_beaker') |
|
205 | 210 | |
|
206 | 211 | config.include('rhodecode.authentication') |
|
207 | 212 | config.include('rhodecode.integrations') |
|
208 | 213 | |
|
209 | 214 | # apps |
|
210 | 215 | config.include('rhodecode.apps._base') |
|
211 | 216 | config.include('rhodecode.apps.ops') |
|
212 | 217 | |
|
213 | 218 | config.include('rhodecode.apps.admin') |
|
214 | 219 | config.include('rhodecode.apps.channelstream') |
|
215 | 220 | config.include('rhodecode.apps.login') |
|
216 | 221 | config.include('rhodecode.apps.home') |
|
217 | 222 | config.include('rhodecode.apps.journal') |
|
218 | 223 | config.include('rhodecode.apps.repository') |
|
219 | 224 | config.include('rhodecode.apps.repo_group') |
|
220 | 225 | config.include('rhodecode.apps.user_group') |
|
221 | 226 | config.include('rhodecode.apps.search') |
|
222 | 227 | config.include('rhodecode.apps.user_profile') |
|
223 | 228 | config.include('rhodecode.apps.my_account') |
|
224 | 229 | config.include('rhodecode.apps.svn_support') |
|
225 | 230 | config.include('rhodecode.apps.ssh_support') |
|
226 | 231 | config.include('rhodecode.apps.gist') |
|
227 | 232 | |
|
228 | 233 | config.include('rhodecode.apps.debug_style') |
|
229 | 234 | config.include('rhodecode.tweens') |
|
230 | 235 | config.include('rhodecode.api') |
|
231 | 236 | |
|
232 | 237 | config.add_route( |
|
233 | 238 | 'rhodecode_support', 'https://rhodecode.com/help/', static=True) |
|
234 | 239 | |
|
235 | 240 | config.add_translation_dirs('rhodecode:i18n/') |
|
236 | 241 | settings['default_locale_name'] = settings.get('lang', 'en') |
|
237 | 242 | |
|
238 | 243 | # Add subscribers. |
|
239 | 244 | config.add_subscriber(inject_app_settings, ApplicationCreated) |
|
240 | 245 | config.add_subscriber(scan_repositories_if_enabled, ApplicationCreated) |
|
241 | 246 | config.add_subscriber(write_metadata_if_needed, ApplicationCreated) |
|
242 | 247 | config.add_subscriber(write_js_routes_if_enabled, ApplicationCreated) |
|
243 | 248 | |
|
244 | 249 | # events |
|
245 | 250 | # TODO(marcink): this should be done when pyramid migration is finished |
|
246 | 251 | # config.add_subscriber( |
|
247 | 252 | # 'rhodecode.integrations.integrations_event_handler', |
|
248 | 253 | # 'rhodecode.events.RhodecodeEvent') |
|
249 | 254 | |
|
250 | 255 | # request custom methods |
|
251 | 256 | config.add_request_method( |
|
252 | 257 | 'rhodecode.lib.partial_renderer.get_partial_renderer', |
|
253 | 258 | 'get_partial_renderer') |
|
254 | 259 | |
|
255 | 260 | # Set the authorization policy. |
|
256 | 261 | authz_policy = ACLAuthorizationPolicy() |
|
257 | 262 | config.set_authorization_policy(authz_policy) |
|
258 | 263 | |
|
259 | 264 | # Set the default renderer for HTML templates to mako. |
|
260 | 265 | config.add_mako_renderer('.html') |
|
261 | 266 | |
|
262 | 267 | config.add_renderer( |
|
263 | 268 | name='json_ext', |
|
264 | 269 | factory='rhodecode.lib.ext_json_renderer.pyramid_ext_json') |
|
265 | 270 | |
|
266 | 271 | # include RhodeCode plugins |
|
267 | 272 | includes = aslist(settings.get('rhodecode.includes', [])) |
|
268 | 273 | for inc in includes: |
|
269 | 274 | config.include(inc) |
|
270 | 275 | |
|
271 | 276 | # custom not found view, if our pyramid app doesn't know how to handle |
|
272 | 277 | # the request pass it to potential VCS handling ap |
|
273 | 278 | config.add_notfound_view(not_found_view) |
|
274 | 279 | if not settings.get('debugtoolbar.enabled', False): |
|
275 | 280 | # disabled debugtoolbar handle all exceptions via the error_handlers |
|
276 | 281 | config.add_view(error_handler, context=Exception) |
|
277 | 282 | |
|
278 | 283 | # all errors including 403/404/50X |
|
279 | 284 | config.add_view(error_handler, context=HTTPError) |
|
280 | 285 | |
|
281 | 286 | |
|
282 | 287 | def wrap_app_in_wsgi_middlewares(pyramid_app, config): |
|
283 | 288 | """ |
|
284 | 289 | Apply outer WSGI middlewares around the application. |
|
285 | 290 | """ |
|
286 | 291 | settings = config.registry.settings |
|
287 | 292 | |
|
288 | 293 | # enable https redirects based on HTTP_X_URL_SCHEME set by proxy |
|
289 | 294 | pyramid_app = HttpsFixup(pyramid_app, settings) |
|
290 | 295 | |
|
291 | 296 | pyramid_app, _ae_client = wrap_in_appenlight_if_enabled( |
|
292 | 297 | pyramid_app, settings) |
|
293 | 298 | config.registry.ae_client = _ae_client |
|
294 | 299 | |
|
295 | 300 | if settings['gzip_responses']: |
|
296 | 301 | pyramid_app = make_gzip_middleware( |
|
297 | 302 | pyramid_app, settings, compress_level=1) |
|
298 | 303 | |
|
299 | 304 | # this should be the outer most middleware in the wsgi stack since |
|
300 | 305 | # middleware like Routes make database calls |
|
301 | 306 | def pyramid_app_with_cleanup(environ, start_response): |
|
302 | 307 | try: |
|
303 | 308 | return pyramid_app(environ, start_response) |
|
304 | 309 | finally: |
|
305 | 310 | # Dispose current database session and rollback uncommitted |
|
306 | 311 | # transactions. |
|
307 | 312 | meta.Session.remove() |
|
308 | 313 | |
|
309 | 314 | # In a single threaded mode server, on non sqlite db we should have |
|
310 | 315 | # '0 Current Checked out connections' at the end of a request, |
|
311 | 316 | # if not, then something, somewhere is leaving a connection open |
|
312 | 317 | pool = meta.Base.metadata.bind.engine.pool |
|
313 | 318 | log.debug('sa pool status: %s', pool.status()) |
|
314 | 319 | |
|
315 | 320 | return pyramid_app_with_cleanup |
|
316 | 321 | |
|
317 | 322 | |
|
318 | 323 | def sanitize_settings_and_apply_defaults(settings): |
|
319 | 324 | """ |
|
320 | 325 | Applies settings defaults and does all type conversion. |
|
321 | 326 | |
|
322 | 327 | We would move all settings parsing and preparation into this place, so that |
|
323 | 328 | we have only one place left which deals with this part. The remaining parts |
|
324 | 329 | of the application would start to rely fully on well prepared settings. |
|
325 | 330 | |
|
326 | 331 | This piece would later be split up per topic to avoid a big fat monster |
|
327 | 332 | function. |
|
328 | 333 | """ |
|
329 | 334 | |
|
330 | 335 | settings.setdefault('rhodecode.edition', 'Community Edition') |
|
331 | 336 | |
|
332 | 337 | if 'mako.default_filters' not in settings: |
|
333 | 338 | # set custom default filters if we don't have it defined |
|
334 | 339 | settings['mako.imports'] = 'from rhodecode.lib.base import h_filter' |
|
335 | 340 | settings['mako.default_filters'] = 'h_filter' |
|
336 | 341 | |
|
337 | 342 | if 'mako.directories' not in settings: |
|
338 | 343 | mako_directories = settings.setdefault('mako.directories', [ |
|
339 | 344 | # Base templates of the original application |
|
340 | 345 | 'rhodecode:templates', |
|
341 | 346 | ]) |
|
342 | 347 | log.debug( |
|
343 | 348 | "Using the following Mako template directories: %s", |
|
344 | 349 | mako_directories) |
|
345 | 350 | |
|
346 | 351 | # Default includes, possible to change as a user |
|
347 | 352 | pyramid_includes = settings.setdefault('pyramid.includes', [ |
|
348 | 353 | 'rhodecode.lib.middleware.request_wrapper', |
|
349 | 354 | ]) |
|
350 | 355 | log.debug( |
|
351 | 356 | "Using the following pyramid.includes: %s", |
|
352 | 357 | pyramid_includes) |
|
353 | 358 | |
|
354 | 359 | # TODO: johbo: Re-think this, usually the call to config.include |
|
355 | 360 | # should allow to pass in a prefix. |
|
356 | 361 | settings.setdefault('rhodecode.api.url', '/_admin/api') |
|
357 | 362 | |
|
358 | 363 | # Sanitize generic settings. |
|
359 | 364 | _list_setting(settings, 'default_encoding', 'UTF-8') |
|
360 | 365 | _bool_setting(settings, 'is_test', 'false') |
|
361 | 366 | _bool_setting(settings, 'gzip_responses', 'false') |
|
362 | 367 | |
|
363 | 368 | # Call split out functions that sanitize settings for each topic. |
|
364 | 369 | _sanitize_appenlight_settings(settings) |
|
365 | 370 | _sanitize_vcs_settings(settings) |
|
366 | 371 | |
|
367 | 372 | # configure instance id |
|
368 | 373 | config_utils.set_instance_id(settings) |
|
369 | 374 | |
|
370 | 375 | return settings |
|
371 | 376 | |
|
372 | 377 | |
|
373 | 378 | def _sanitize_appenlight_settings(settings): |
|
374 | 379 | _bool_setting(settings, 'appenlight', 'false') |
|
375 | 380 | |
|
376 | 381 | |
|
377 | 382 | def _sanitize_vcs_settings(settings): |
|
378 | 383 | """ |
|
379 | 384 | Applies settings defaults and does type conversion for all VCS related |
|
380 | 385 | settings. |
|
381 | 386 | """ |
|
382 | 387 | _string_setting(settings, 'vcs.svn.compatible_version', '') |
|
383 | 388 | _string_setting(settings, 'git_rev_filter', '--all') |
|
384 | 389 | _string_setting(settings, 'vcs.hooks.protocol', 'http') |
|
385 | 390 | _string_setting(settings, 'vcs.scm_app_implementation', 'http') |
|
386 | 391 | _string_setting(settings, 'vcs.server', '') |
|
387 | 392 | _string_setting(settings, 'vcs.server.log_level', 'debug') |
|
388 | 393 | _string_setting(settings, 'vcs.server.protocol', 'http') |
|
389 | 394 | _bool_setting(settings, 'startup.import_repos', 'false') |
|
390 | 395 | _bool_setting(settings, 'vcs.hooks.direct_calls', 'false') |
|
391 | 396 | _bool_setting(settings, 'vcs.server.enable', 'true') |
|
392 | 397 | _bool_setting(settings, 'vcs.start_server', 'false') |
|
393 | 398 | _list_setting(settings, 'vcs.backends', 'hg, git, svn') |
|
394 | 399 | _int_setting(settings, 'vcs.connection_timeout', 3600) |
|
395 | 400 | |
|
396 | 401 | # Support legacy values of vcs.scm_app_implementation. Legacy |
|
397 | 402 | # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http' |
|
398 | 403 | # which is now mapped to 'http'. |
|
399 | 404 | scm_app_impl = settings['vcs.scm_app_implementation'] |
|
400 | 405 | if scm_app_impl == 'rhodecode.lib.middleware.utils.scm_app_http': |
|
401 | 406 | settings['vcs.scm_app_implementation'] = 'http' |
|
402 | 407 | |
|
403 | 408 | |
|
404 | 409 | def _int_setting(settings, name, default): |
|
405 | 410 | settings[name] = int(settings.get(name, default)) |
|
406 | 411 | |
|
407 | 412 | |
|
408 | 413 | def _bool_setting(settings, name, default): |
|
409 | 414 | input_val = settings.get(name, default) |
|
410 | 415 | if isinstance(input_val, unicode): |
|
411 | 416 | input_val = input_val.encode('utf8') |
|
412 | 417 | settings[name] = asbool(input_val) |
|
413 | 418 | |
|
414 | 419 | |
|
415 | 420 | def _list_setting(settings, name, default): |
|
416 | 421 | raw_value = settings.get(name, default) |
|
417 | 422 | |
|
418 | 423 | old_separator = ',' |
|
419 | 424 | if old_separator in raw_value: |
|
420 | 425 | # If we get a comma separated list, pass it to our own function. |
|
421 | 426 | settings[name] = rhodecode_aslist(raw_value, sep=old_separator) |
|
422 | 427 | else: |
|
423 | 428 | # Otherwise we assume it uses pyramids space/newline separation. |
|
424 | 429 | settings[name] = aslist(raw_value) |
|
425 | 430 | |
|
426 | 431 | |
|
427 | 432 | def _string_setting(settings, name, default, lower=True): |
|
428 | 433 | value = settings.get(name, default) |
|
429 | 434 | if lower: |
|
430 | 435 | value = value.lower() |
|
431 | 436 | settings[name] = value |
@@ -1,254 +1,252 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2012-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | from __future__ import unicode_literals |
|
22 | 22 | import deform |
|
23 | import re | |
|
24 | 23 | import logging |
|
25 | 24 | import requests |
|
26 | 25 | import colander |
|
27 | 26 | import textwrap |
|
28 | from celery.task import task | |
|
29 | 27 | from mako.template import Template |
|
30 | 28 | |
|
31 | 29 | from rhodecode import events |
|
32 | 30 | from rhodecode.translation import _ |
|
33 | 31 | from rhodecode.lib import helpers as h |
|
34 | from rhodecode.lib.celerylib import run_task | |
|
32 | from rhodecode.lib.celerylib import run_task, async_task, RequestContextTask | |
|
35 | 33 | from rhodecode.lib.colander_utils import strip_whitespace |
|
36 | 34 | from rhodecode.integrations.types.base import IntegrationTypeBase |
|
37 | 35 | |
|
38 | 36 | log = logging.getLogger(__name__) |
|
39 | 37 | |
|
40 | 38 | |
|
41 | 39 | class HipchatSettingsSchema(colander.Schema): |
|
42 | 40 | color_choices = [ |
|
43 | 41 | ('yellow', _('Yellow')), |
|
44 | 42 | ('red', _('Red')), |
|
45 | 43 | ('green', _('Green')), |
|
46 | 44 | ('purple', _('Purple')), |
|
47 | 45 | ('gray', _('Gray')), |
|
48 | 46 | ] |
|
49 | 47 | |
|
50 | 48 | server_url = colander.SchemaNode( |
|
51 | 49 | colander.String(), |
|
52 | 50 | title=_('Hipchat server URL'), |
|
53 | 51 | description=_('Hipchat integration url.'), |
|
54 | 52 | default='', |
|
55 | 53 | preparer=strip_whitespace, |
|
56 | 54 | validator=colander.url, |
|
57 | 55 | widget=deform.widget.TextInputWidget( |
|
58 | 56 | placeholder='https://?.hipchat.com/v2/room/?/notification?auth_token=?', |
|
59 | 57 | ), |
|
60 | 58 | ) |
|
61 | 59 | notify = colander.SchemaNode( |
|
62 | 60 | colander.Bool(), |
|
63 | 61 | title=_('Notify'), |
|
64 | 62 | description=_('Make a notification to the users in room.'), |
|
65 | 63 | missing=False, |
|
66 | 64 | default=False, |
|
67 | 65 | ) |
|
68 | 66 | color = colander.SchemaNode( |
|
69 | 67 | colander.String(), |
|
70 | 68 | title=_('Color'), |
|
71 | 69 | description=_('Background color of message.'), |
|
72 | 70 | missing='', |
|
73 | 71 | validator=colander.OneOf([x[0] for x in color_choices]), |
|
74 | 72 | widget=deform.widget.Select2Widget( |
|
75 | 73 | values=color_choices, |
|
76 | 74 | ), |
|
77 | 75 | ) |
|
78 | 76 | |
|
79 | 77 | |
|
80 | 78 | repo_push_template = Template(''' |
|
81 | 79 | <b>${data['actor']['username']}</b> pushed to repo <a href="${data['repo']['url']}">${data['repo']['repo_name']}</a>: |
|
82 | 80 | <br> |
|
83 | 81 | <ul> |
|
84 | 82 | %for branch, branch_commits in branches_commits.items(): |
|
85 | 83 | <li> |
|
86 | 84 | <a href="${branch_commits['branch']['url']}">branch: ${branch_commits['branch']['name']}</a> |
|
87 | 85 | <ul> |
|
88 | 86 | %for commit in branch_commits['commits']: |
|
89 | 87 | <li><a href="${commit['url']}">${commit['short_id']}</a> - ${commit['message_html']}</li> |
|
90 | 88 | %endfor |
|
91 | 89 | </ul> |
|
92 | 90 | </li> |
|
93 | 91 | %endfor |
|
94 | 92 | ''') |
|
95 | 93 | |
|
96 | 94 | |
|
97 | 95 | class HipchatIntegrationType(IntegrationTypeBase): |
|
98 | 96 | key = 'hipchat' |
|
99 | 97 | display_name = _('Hipchat') |
|
100 | 98 | description = _('Send events such as repo pushes and pull requests to ' |
|
101 | 99 | 'your hipchat channel.') |
|
102 | 100 | icon = '''<?xml version="1.0" encoding="utf-8"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="0 0 1000 1000" enable-background="new 0 0 1000 1000" xml:space="preserve"><g><g transform="translate(0.000000,511.000000) scale(0.100000,-0.100000)"><path fill="#205281" d="M4197.1,4662.4c-1661.5-260.4-3018-1171.6-3682.6-2473.3C219.9,1613.6,100,1120.3,100,462.6c0-1014,376.8-1918.4,1127-2699.4C2326.7-3377.6,3878.5-3898.3,5701-3730.5l486.5,44.5l208.9-123.3c637.2-373.4,1551.8-640.6,2240.4-650.9c304.9-6.9,335.7,0,417.9,75.4c185,174.7,147.3,411.1-89.1,548.1c-315.2,181.6-620,544.7-733.1,870.1l-51.4,157.6l472.7,472.7c349.4,349.4,520.7,551.5,657.7,774.2c784.5,1281.2,784.5,2788.5,0,4052.6c-236.4,376.8-794.8,966-1178.4,1236.7c-572.1,407.7-1264.1,709.1-1993.7,870.1c-267.2,58.2-479.6,75.4-1038,82.2C4714.4,4686.4,4310.2,4679.6,4197.1,4662.4z M5947.6,3740.9c1856.7-380.3,3127.6-1709.4,3127.6-3275c0-1000.3-534.4-1949.2-1466.2-2600.1c-188.4-133.6-287.8-226.1-301.5-284.4c-41.1-157.6,263.8-938.6,397.4-1020.8c20.5-10.3,34.3-44.5,34.3-75.4c0-167.8-811.9,195.3-1363.4,609.8l-181.6,137l-332.3-58.2c-445.3-78.8-1281.2-78.8-1702.6,0C2796-2569.2,1734.1-1832.6,1220.2-801.5C983.8-318.5,905,51.5,929,613.3c27.4,640.6,243.2,1192.1,685.1,1740.3c620,770.8,1661.5,1305.2,2822.8,1452.5C4806.9,3854,5553.7,3819.7,5947.6,3740.9z"/><path fill="#205281" d="M2381.5-345.9c-75.4-106.2-68.5-167.8,34.3-322c332.3-500.2,1010.6-928.4,1760.8-1120.2c417.9-106.2,1226.4-106.2,1644.3,0c712.5,181.6,1270.9,517.3,1685.4,1014C7681-561.7,7715.3-424.7,7616-325.4c-89.1,89.1-167.9,65.1-431.7-133.6c-835.8-630.3-2028-856.4-3086.5-585.8C3683.3-938.6,3142-685,2830.3-448.7C2576.8-253.4,2463.7-229.4,2381.5-345.9z"/></g></g><!-- Svg Vector Icons : http://www.onlinewebfonts.com/icon --></svg>''' |
|
103 | 101 | valid_events = [ |
|
104 | 102 | events.PullRequestCloseEvent, |
|
105 | 103 | events.PullRequestMergeEvent, |
|
106 | 104 | events.PullRequestUpdateEvent, |
|
107 | 105 | events.PullRequestCommentEvent, |
|
108 | 106 | events.PullRequestReviewEvent, |
|
109 | 107 | events.PullRequestCreateEvent, |
|
110 | 108 | events.RepoPushEvent, |
|
111 | 109 | events.RepoCreateEvent, |
|
112 | 110 | ] |
|
113 | 111 | |
|
114 | 112 | def send_event(self, event): |
|
115 | 113 | if event.__class__ not in self.valid_events: |
|
116 | 114 | log.debug('event not valid: %r' % event) |
|
117 | 115 | return |
|
118 | 116 | |
|
119 | 117 | if event.name not in self.settings['events']: |
|
120 | 118 | log.debug('event ignored: %r' % event) |
|
121 | 119 | return |
|
122 | 120 | |
|
123 | 121 | data = event.as_dict() |
|
124 | 122 | |
|
125 | 123 | text = '<b>%s<b> caused a <b>%s</b> event' % ( |
|
126 | 124 | data['actor']['username'], event.name) |
|
127 | 125 | |
|
128 | 126 | log.debug('handling hipchat event for %s' % event.name) |
|
129 | 127 | |
|
130 | 128 | if isinstance(event, events.PullRequestCommentEvent): |
|
131 | 129 | text = self.format_pull_request_comment_event(event, data) |
|
132 | 130 | elif isinstance(event, events.PullRequestReviewEvent): |
|
133 | 131 | text = self.format_pull_request_review_event(event, data) |
|
134 | 132 | elif isinstance(event, events.PullRequestEvent): |
|
135 | 133 | text = self.format_pull_request_event(event, data) |
|
136 | 134 | elif isinstance(event, events.RepoPushEvent): |
|
137 | 135 | text = self.format_repo_push_event(data) |
|
138 | 136 | elif isinstance(event, events.RepoCreateEvent): |
|
139 | 137 | text = self.format_repo_create_event(data) |
|
140 | 138 | else: |
|
141 | 139 | log.error('unhandled event type: %r' % event) |
|
142 | 140 | |
|
143 | 141 | run_task(post_text_to_hipchat, self.settings, text) |
|
144 | 142 | |
|
145 | 143 | def settings_schema(self): |
|
146 | 144 | schema = HipchatSettingsSchema() |
|
147 | 145 | schema.add(colander.SchemaNode( |
|
148 | 146 | colander.Set(), |
|
149 | 147 | widget=deform.widget.CheckboxChoiceWidget( |
|
150 | 148 | values=sorted( |
|
151 | 149 | [(e.name, e.display_name) for e in self.valid_events] |
|
152 | 150 | ) |
|
153 | 151 | ), |
|
154 | 152 | description="Events activated for this integration", |
|
155 | 153 | name='events' |
|
156 | 154 | )) |
|
157 | 155 | |
|
158 | 156 | return schema |
|
159 | 157 | |
|
160 | 158 | def format_pull_request_comment_event(self, event, data): |
|
161 | 159 | comment_text = data['comment']['text'] |
|
162 | 160 | if len(comment_text) > 200: |
|
163 | 161 | comment_text = '{comment_text}<a href="{comment_url}">...<a/>'.format( |
|
164 | 162 | comment_text=h.html_escape(comment_text[:200]), |
|
165 | 163 | comment_url=data['comment']['url'], |
|
166 | 164 | ) |
|
167 | 165 | |
|
168 | 166 | comment_status = '' |
|
169 | 167 | if data['comment']['status']: |
|
170 | 168 | comment_status = '[{}]: '.format(data['comment']['status']) |
|
171 | 169 | |
|
172 | 170 | return (textwrap.dedent( |
|
173 | 171 | ''' |
|
174 | 172 | {user} commented on pull request <a href="{pr_url}">{number}</a> - {pr_title}: |
|
175 | 173 | >>> {comment_status}{comment_text} |
|
176 | 174 | ''').format( |
|
177 | 175 | comment_status=comment_status, |
|
178 | 176 | user=data['actor']['username'], |
|
179 | 177 | number=data['pullrequest']['pull_request_id'], |
|
180 | 178 | pr_url=data['pullrequest']['url'], |
|
181 | 179 | pr_status=data['pullrequest']['status'], |
|
182 | 180 | pr_title=h.html_escape(data['pullrequest']['title']), |
|
183 | 181 | comment_text=h.html_escape(comment_text) |
|
184 | 182 | ) |
|
185 | 183 | ) |
|
186 | 184 | |
|
187 | 185 | def format_pull_request_review_event(self, event, data): |
|
188 | 186 | return (textwrap.dedent( |
|
189 | 187 | ''' |
|
190 | 188 | Status changed to {pr_status} for pull request <a href="{pr_url}">#{number}</a> - {pr_title} |
|
191 | 189 | ''').format( |
|
192 | 190 | user=data['actor']['username'], |
|
193 | 191 | number=data['pullrequest']['pull_request_id'], |
|
194 | 192 | pr_url=data['pullrequest']['url'], |
|
195 | 193 | pr_status=data['pullrequest']['status'], |
|
196 | 194 | pr_title=h.html_escape(data['pullrequest']['title']), |
|
197 | 195 | ) |
|
198 | 196 | ) |
|
199 | 197 | |
|
200 | 198 | def format_pull_request_event(self, event, data): |
|
201 | 199 | action = { |
|
202 | 200 | events.PullRequestCloseEvent: 'closed', |
|
203 | 201 | events.PullRequestMergeEvent: 'merged', |
|
204 | 202 | events.PullRequestUpdateEvent: 'updated', |
|
205 | 203 | events.PullRequestCreateEvent: 'created', |
|
206 | 204 | }.get(event.__class__, str(event.__class__)) |
|
207 | 205 | |
|
208 | 206 | return ('Pull request <a href="{url}">#{number}</a> - {title} ' |
|
209 | 207 | '{action} by <b>{user}</b>').format( |
|
210 | 208 | user=data['actor']['username'], |
|
211 | 209 | number=data['pullrequest']['pull_request_id'], |
|
212 | 210 | url=data['pullrequest']['url'], |
|
213 | 211 | title=h.html_escape(data['pullrequest']['title']), |
|
214 | 212 | action=action |
|
215 | 213 | ) |
|
216 | 214 | |
|
217 | 215 | def format_repo_push_event(self, data): |
|
218 | 216 | branch_data = {branch['name']: branch |
|
219 | 217 | for branch in data['push']['branches']} |
|
220 | 218 | |
|
221 | 219 | branches_commits = {} |
|
222 | 220 | for commit in data['push']['commits']: |
|
223 | 221 | if commit['branch'] not in branches_commits: |
|
224 | 222 | branch_commits = {'branch': branch_data[commit['branch']], |
|
225 | 223 | 'commits': []} |
|
226 | 224 | branches_commits[commit['branch']] = branch_commits |
|
227 | 225 | |
|
228 | 226 | branch_commits = branches_commits[commit['branch']] |
|
229 | 227 | branch_commits['commits'].append(commit) |
|
230 | 228 | |
|
231 | 229 | result = repo_push_template.render( |
|
232 | 230 | data=data, |
|
233 | 231 | branches_commits=branches_commits, |
|
234 | 232 | ) |
|
235 | 233 | return result |
|
236 | 234 | |
|
237 | 235 | def format_repo_create_event(self, data): |
|
238 | 236 | return '<a href="{}">{}</a> ({}) repository created by <b>{}</b>'.format( |
|
239 | 237 | data['repo']['url'], |
|
240 | 238 | h.html_escape(data['repo']['repo_name']), |
|
241 | 239 | data['repo']['repo_type'], |
|
242 | 240 | data['actor']['username'], |
|
243 | 241 | ) |
|
244 | 242 | |
|
245 | 243 | |
|
246 | @task(ignore_result=True) | |
|
244 | @async_task(ignore_result=True, base=RequestContextTask) | |
|
247 | 245 | def post_text_to_hipchat(settings, text): |
|
248 | 246 | log.debug('sending %s to hipchat %s' % (text, settings['server_url'])) |
|
249 | 247 | resp = requests.post(settings['server_url'], json={ |
|
250 | 248 | "message": text, |
|
251 | 249 | "color": settings.get('color', 'yellow'), |
|
252 | 250 | "notify": settings.get('notify', False), |
|
253 | 251 | }) |
|
254 | 252 | resp.raise_for_status() # raise exception on a failed request |
@@ -1,334 +1,333 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2012-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | from __future__ import unicode_literals |
|
22 | 22 | import re |
|
23 | 23 | import time |
|
24 | 24 | import textwrap |
|
25 | 25 | import logging |
|
26 | 26 | |
|
27 | 27 | import deform |
|
28 | 28 | import requests |
|
29 | 29 | import colander |
|
30 | from celery.task import task | |
|
31 | 30 | from mako.template import Template |
|
32 | 31 | |
|
33 | 32 | from rhodecode import events |
|
34 | 33 | from rhodecode.translation import _ |
|
35 | 34 | from rhodecode.lib import helpers as h |
|
36 | from rhodecode.lib.celerylib import run_task | |
|
35 | from rhodecode.lib.celerylib import run_task, async_task, RequestContextTask | |
|
37 | 36 | from rhodecode.lib.colander_utils import strip_whitespace |
|
38 | 37 | from rhodecode.integrations.types.base import IntegrationTypeBase |
|
39 | 38 | |
|
40 | 39 | log = logging.getLogger(__name__) |
|
41 | 40 | |
|
42 | 41 | |
|
43 | 42 | class SlackSettingsSchema(colander.Schema): |
|
44 | 43 | service = colander.SchemaNode( |
|
45 | 44 | colander.String(), |
|
46 | 45 | title=_('Slack service URL'), |
|
47 | 46 | description=h.literal(_( |
|
48 | 47 | 'This can be setup at the ' |
|
49 | 48 | '<a href="https://my.slack.com/services/new/incoming-webhook/">' |
|
50 | 49 | 'slack app manager</a>')), |
|
51 | 50 | default='', |
|
52 | 51 | preparer=strip_whitespace, |
|
53 | 52 | validator=colander.url, |
|
54 | 53 | widget=deform.widget.TextInputWidget( |
|
55 | 54 | placeholder='https://hooks.slack.com/services/...', |
|
56 | 55 | ), |
|
57 | 56 | ) |
|
58 | 57 | username = colander.SchemaNode( |
|
59 | 58 | colander.String(), |
|
60 | 59 | title=_('Username'), |
|
61 | 60 | description=_('Username to show notifications coming from.'), |
|
62 | 61 | missing='Rhodecode', |
|
63 | 62 | preparer=strip_whitespace, |
|
64 | 63 | widget=deform.widget.TextInputWidget( |
|
65 | 64 | placeholder='Rhodecode' |
|
66 | 65 | ), |
|
67 | 66 | ) |
|
68 | 67 | channel = colander.SchemaNode( |
|
69 | 68 | colander.String(), |
|
70 | 69 | title=_('Channel'), |
|
71 | 70 | description=_('Channel to send notifications to.'), |
|
72 | 71 | missing='', |
|
73 | 72 | preparer=strip_whitespace, |
|
74 | 73 | widget=deform.widget.TextInputWidget( |
|
75 | 74 | placeholder='#general' |
|
76 | 75 | ), |
|
77 | 76 | ) |
|
78 | 77 | icon_emoji = colander.SchemaNode( |
|
79 | 78 | colander.String(), |
|
80 | 79 | title=_('Emoji'), |
|
81 | 80 | description=_('Emoji to use eg. :studio_microphone:'), |
|
82 | 81 | missing='', |
|
83 | 82 | preparer=strip_whitespace, |
|
84 | 83 | widget=deform.widget.TextInputWidget( |
|
85 | 84 | placeholder=':studio_microphone:' |
|
86 | 85 | ), |
|
87 | 86 | ) |
|
88 | 87 | |
|
89 | 88 | |
|
90 | 89 | class SlackIntegrationType(IntegrationTypeBase): |
|
91 | 90 | key = 'slack' |
|
92 | 91 | display_name = _('Slack') |
|
93 | 92 | description = _('Send events such as repo pushes and pull requests to ' |
|
94 | 93 | 'your slack channel.') |
|
95 | 94 | icon = '''<?xml version="1.0" encoding="UTF-8" standalone="no"?><svg viewBox="0 0 256 256" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid"><g><path d="M165.963541,15.8384262 C162.07318,3.86308197 149.212328,-2.69009836 137.239082,1.20236066 C125.263738,5.09272131 118.710557,17.9535738 122.603016,29.9268197 L181.550164,211.292328 C185.597902,222.478689 197.682361,228.765377 209.282098,225.426885 C221.381246,221.943607 228.756984,209.093246 224.896,197.21023 C224.749115,196.756984 165.963541,15.8384262 165.963541,15.8384262" fill="#DFA22F"></path><path d="M74.6260984,45.515541 C70.7336393,33.5422951 57.8727869,26.9891148 45.899541,30.8794754 C33.9241967,34.7698361 27.3710164,47.6306885 31.2634754,59.6060328 L90.210623,240.971541 C94.2583607,252.157902 106.34282,258.44459 117.942557,255.104 C130.041705,251.62282 137.417443,238.772459 133.556459,226.887344 C133.409574,226.436197 74.6260984,45.515541 74.6260984,45.515541" fill="#3CB187"></path><path d="M240.161574,166.045377 C252.136918,162.155016 258.688,149.294164 254.797639,137.31882 C250.907279,125.345574 238.046426,118.792393 226.07318,122.682754 L44.7076721,181.632 C33.5213115,185.677639 27.234623,197.762098 30.5731148,209.361836 C34.0563934,221.460984 46.9067541,228.836721 58.7897705,224.975738 C59.2430164,224.828852 240.161574,166.045377 240.161574,166.045377" fill="#CE1E5B"></path><path d="M82.507541,217.270557 C94.312918,213.434754 109.528131,208.491016 125.855475,203.186361 C122.019672,191.380984 117.075934,176.163672 111.76918,159.83423 L68.4191475,173.924721 L82.507541,217.270557" fill="#392538"></path><path d="M173.847082,187.591344 C190.235279,182.267803 205.467279,177.31777 217.195016,173.507148 C213.359213,161.70177 208.413377,146.480262 203.106623,130.146623 L159.75659,144.237115 L173.847082,187.591344" fill="#BB242A"></path><path d="M210.484459,74.7058361 C222.457705,70.8154754 229.010885,57.954623 225.120525,45.9792787 C221.230164,34.0060328 208.369311,27.4528525 196.393967,31.3432131 L15.028459,90.292459 C3.84209836,94.3380984 -2.44459016,106.422557 0.896,118.022295 C4.37718033,130.121443 17.227541,137.49718 29.1126557,133.636197 C29.5638033,133.489311 210.484459,74.7058361 210.484459,74.7058361" fill="#72C5CD"></path><path d="M52.8220328,125.933115 C64.6274098,122.097311 79.8468197,117.151475 96.1762623,111.84682 C90.8527213,95.4565246 85.9026885,80.2245246 82.0920656,68.4946885 L38.731541,82.5872787 L52.8220328,125.933115" fill="#248C73"></path><path d="M144.159475,96.256 C160.551869,90.9303607 175.785967,85.9803279 187.515803,82.1676066 C182.190164,65.7752131 177.240131,50.5390164 173.42741,38.807082 L130.068984,52.8996721 L144.159475,96.256" fill="#62803A"></path></g></svg>''' |
|
96 | 95 | valid_events = [ |
|
97 | 96 | events.PullRequestCloseEvent, |
|
98 | 97 | events.PullRequestMergeEvent, |
|
99 | 98 | events.PullRequestUpdateEvent, |
|
100 | 99 | events.PullRequestCommentEvent, |
|
101 | 100 | events.PullRequestReviewEvent, |
|
102 | 101 | events.PullRequestCreateEvent, |
|
103 | 102 | events.RepoPushEvent, |
|
104 | 103 | events.RepoCreateEvent, |
|
105 | 104 | ] |
|
106 | 105 | |
|
107 | 106 | def send_event(self, event): |
|
108 | 107 | if event.__class__ not in self.valid_events: |
|
109 | 108 | log.debug('event not valid: %r' % event) |
|
110 | 109 | return |
|
111 | 110 | |
|
112 | 111 | if event.name not in self.settings['events']: |
|
113 | 112 | log.debug('event ignored: %r' % event) |
|
114 | 113 | return |
|
115 | 114 | |
|
116 | 115 | data = event.as_dict() |
|
117 | 116 | |
|
118 | 117 | # defaults |
|
119 | 118 | title = '*%s* caused a *%s* event' % ( |
|
120 | 119 | data['actor']['username'], event.name) |
|
121 | 120 | text = '*%s* caused a *%s* event' % ( |
|
122 | 121 | data['actor']['username'], event.name) |
|
123 | 122 | fields = None |
|
124 | 123 | overrides = None |
|
125 | 124 | |
|
126 | 125 | log.debug('handling slack event for %s' % event.name) |
|
127 | 126 | |
|
128 | 127 | if isinstance(event, events.PullRequestCommentEvent): |
|
129 | 128 | (title, text, fields, overrides) \ |
|
130 | 129 | = self.format_pull_request_comment_event(event, data) |
|
131 | 130 | elif isinstance(event, events.PullRequestReviewEvent): |
|
132 | 131 | title, text = self.format_pull_request_review_event(event, data) |
|
133 | 132 | elif isinstance(event, events.PullRequestEvent): |
|
134 | 133 | title, text = self.format_pull_request_event(event, data) |
|
135 | 134 | elif isinstance(event, events.RepoPushEvent): |
|
136 | 135 | title, text = self.format_repo_push_event(data) |
|
137 | 136 | elif isinstance(event, events.RepoCreateEvent): |
|
138 | 137 | title, text = self.format_repo_create_event(data) |
|
139 | 138 | else: |
|
140 | 139 | log.error('unhandled event type: %r' % event) |
|
141 | 140 | |
|
142 | 141 | run_task(post_text_to_slack, self.settings, title, text, fields, overrides) |
|
143 | 142 | |
|
144 | 143 | def settings_schema(self): |
|
145 | 144 | schema = SlackSettingsSchema() |
|
146 | 145 | schema.add(colander.SchemaNode( |
|
147 | 146 | colander.Set(), |
|
148 | 147 | widget=deform.widget.CheckboxChoiceWidget( |
|
149 | 148 | values=sorted( |
|
150 | 149 | [(e.name, e.display_name) for e in self.valid_events] |
|
151 | 150 | ) |
|
152 | 151 | ), |
|
153 | 152 | description="Events activated for this integration", |
|
154 | 153 | name='events' |
|
155 | 154 | )) |
|
156 | 155 | |
|
157 | 156 | return schema |
|
158 | 157 | |
|
159 | 158 | def format_pull_request_comment_event(self, event, data): |
|
160 | 159 | comment_text = data['comment']['text'] |
|
161 | 160 | if len(comment_text) > 200: |
|
162 | 161 | comment_text = '<{comment_url}|{comment_text}...>'.format( |
|
163 | 162 | comment_text=comment_text[:200], |
|
164 | 163 | comment_url=data['comment']['url'], |
|
165 | 164 | ) |
|
166 | 165 | |
|
167 | 166 | fields = None |
|
168 | 167 | overrides = None |
|
169 | 168 | status_text = None |
|
170 | 169 | |
|
171 | 170 | if data['comment']['status']: |
|
172 | 171 | status_color = { |
|
173 | 172 | 'approved': '#0ac878', |
|
174 | 173 | 'rejected': '#e85e4d'}.get(data['comment']['status']) |
|
175 | 174 | |
|
176 | 175 | if status_color: |
|
177 | 176 | overrides = {"color": status_color} |
|
178 | 177 | |
|
179 | 178 | status_text = data['comment']['status'] |
|
180 | 179 | |
|
181 | 180 | if data['comment']['file']: |
|
182 | 181 | fields = [ |
|
183 | 182 | { |
|
184 | 183 | "title": "file", |
|
185 | 184 | "value": data['comment']['file'] |
|
186 | 185 | }, |
|
187 | 186 | { |
|
188 | 187 | "title": "line", |
|
189 | 188 | "value": data['comment']['line'] |
|
190 | 189 | } |
|
191 | 190 | ] |
|
192 | 191 | |
|
193 | 192 | title = Template(textwrap.dedent(r''' |
|
194 | 193 | *${data['actor']['username']}* left ${data['comment']['type']} on pull request <${data['pullrequest']['url']}|#${data['pullrequest']['pull_request_id']}>: |
|
195 | 194 | ''')).render(data=data, comment=event.comment) |
|
196 | 195 | |
|
197 | 196 | text = Template(textwrap.dedent(r''' |
|
198 | 197 | *pull request title*: ${pr_title} |
|
199 | 198 | % if status_text: |
|
200 | 199 | *submitted status*: `${status_text}` |
|
201 | 200 | % endif |
|
202 | 201 | >>> ${comment_text} |
|
203 | 202 | ''')).render(comment_text=comment_text, |
|
204 | 203 | pr_title=data['pullrequest']['title'], |
|
205 | 204 | status_text=status_text) |
|
206 | 205 | |
|
207 | 206 | return title, text, fields, overrides |
|
208 | 207 | |
|
209 | 208 | def format_pull_request_review_event(self, event, data): |
|
210 | 209 | title = Template(textwrap.dedent(r''' |
|
211 | 210 | *${data['actor']['username']}* changed status of pull request <${data['pullrequest']['url']}|#${data['pullrequest']['pull_request_id']} to `${data['pullrequest']['status']}`>: |
|
212 | 211 | ''')).render(data=data) |
|
213 | 212 | |
|
214 | 213 | text = Template(textwrap.dedent(r''' |
|
215 | 214 | *pull request title*: ${pr_title} |
|
216 | 215 | ''')).render( |
|
217 | 216 | pr_title=data['pullrequest']['title'], |
|
218 | 217 | ) |
|
219 | 218 | |
|
220 | 219 | return title, text |
|
221 | 220 | |
|
222 | 221 | def format_pull_request_event(self, event, data): |
|
223 | 222 | action = { |
|
224 | 223 | events.PullRequestCloseEvent: 'closed', |
|
225 | 224 | events.PullRequestMergeEvent: 'merged', |
|
226 | 225 | events.PullRequestUpdateEvent: 'updated', |
|
227 | 226 | events.PullRequestCreateEvent: 'created', |
|
228 | 227 | }.get(event.__class__, str(event.__class__)) |
|
229 | 228 | |
|
230 | 229 | title = Template(textwrap.dedent(r''' |
|
231 | 230 | *${data['actor']['username']}* `${action}` pull request <${data['pullrequest']['url']}|#${data['pullrequest']['pull_request_id']}>: |
|
232 | 231 | ''')).render(data=data, action=action) |
|
233 | 232 | |
|
234 | 233 | text = Template(textwrap.dedent(r''' |
|
235 | 234 | *pull request title*: ${pr_title} |
|
236 | 235 | %if data['pullrequest']['commits']: |
|
237 | 236 | *commits*: ${len(data['pullrequest']['commits'])} |
|
238 | 237 | %endif |
|
239 | 238 | ''')).render( |
|
240 | 239 | pr_title=data['pullrequest']['title'], |
|
241 | 240 | data=data |
|
242 | 241 | ) |
|
243 | 242 | |
|
244 | 243 | return title, text |
|
245 | 244 | |
|
246 | 245 | def format_repo_push_event(self, data): |
|
247 | 246 | branch_data = {branch['name']: branch |
|
248 | 247 | for branch in data['push']['branches']} |
|
249 | 248 | |
|
250 | 249 | branches_commits = {} |
|
251 | 250 | for commit in data['push']['commits']: |
|
252 | 251 | if commit['branch'] not in branches_commits: |
|
253 | 252 | branch_commits = {'branch': branch_data[commit['branch']], |
|
254 | 253 | 'commits': []} |
|
255 | 254 | branches_commits[commit['branch']] = branch_commits |
|
256 | 255 | |
|
257 | 256 | branch_commits = branches_commits[commit['branch']] |
|
258 | 257 | branch_commits['commits'].append(commit) |
|
259 | 258 | |
|
260 | 259 | title = Template(r''' |
|
261 | 260 | *${data['actor']['username']}* pushed to repo <${data['repo']['url']}|${data['repo']['repo_name']}>: |
|
262 | 261 | ''').render(data=data) |
|
263 | 262 | |
|
264 | 263 | repo_push_template = Template(textwrap.dedent(r''' |
|
265 | 264 | %for branch, branch_commits in branches_commits.items(): |
|
266 | 265 | ${len(branch_commits['commits'])} ${'commit' if len(branch_commits['commits']) == 1 else 'commits'} on branch: <${branch_commits['branch']['url']}|${branch_commits['branch']['name']}> |
|
267 | 266 | %for commit in branch_commits['commits']: |
|
268 | 267 | `<${commit['url']}|${commit['short_id']}>` - ${commit['message_html']|html_to_slack_links} |
|
269 | 268 | %endfor |
|
270 | 269 | %endfor |
|
271 | 270 | ''')) |
|
272 | 271 | |
|
273 | 272 | text = repo_push_template.render( |
|
274 | 273 | data=data, |
|
275 | 274 | branches_commits=branches_commits, |
|
276 | 275 | html_to_slack_links=html_to_slack_links, |
|
277 | 276 | ) |
|
278 | 277 | |
|
279 | 278 | return title, text |
|
280 | 279 | |
|
281 | 280 | def format_repo_create_event(self, data): |
|
282 | 281 | title = Template(r''' |
|
283 | 282 | *${data['actor']['username']}* created new repository ${data['repo']['repo_name']}: |
|
284 | 283 | ''').render(data=data) |
|
285 | 284 | |
|
286 | 285 | text = Template(textwrap.dedent(r''' |
|
287 | 286 | repo_url: ${data['repo']['url']} |
|
288 | 287 | repo_type: ${data['repo']['repo_type']} |
|
289 | 288 | ''')).render(data=data) |
|
290 | 289 | |
|
291 | 290 | return title, text |
|
292 | 291 | |
|
293 | 292 | |
|
294 | 293 | def html_to_slack_links(message): |
|
295 | 294 | return re.compile(r'<a .*?href=["\'](.+?)".*?>(.+?)</a>').sub( |
|
296 | 295 | r'<\1|\2>', message) |
|
297 | 296 | |
|
298 | 297 | |
|
299 | @task(ignore_result=True) | |
|
298 | @async_task(ignore_result=True, base=RequestContextTask) | |
|
300 | 299 | def post_text_to_slack(settings, title, text, fields=None, overrides=None): |
|
301 | 300 | log.debug('sending %s (%s) to slack %s' % ( |
|
302 | 301 | title, text, settings['service'])) |
|
303 | 302 | |
|
304 | 303 | fields = fields or [] |
|
305 | 304 | overrides = overrides or {} |
|
306 | 305 | |
|
307 | 306 | message_data = { |
|
308 | 307 | "fallback": text, |
|
309 | 308 | "color": "#427cc9", |
|
310 | 309 | "pretext": title, |
|
311 | 310 | #"author_name": "Bobby Tables", |
|
312 | 311 | #"author_link": "http://flickr.com/bobby/", |
|
313 | 312 | #"author_icon": "http://flickr.com/icons/bobby.jpg", |
|
314 | 313 | #"title": "Slack API Documentation", |
|
315 | 314 | #"title_link": "https://api.slack.com/", |
|
316 | 315 | "text": text, |
|
317 | 316 | "fields": fields, |
|
318 | 317 | #"image_url": "http://my-website.com/path/to/image.jpg", |
|
319 | 318 | #"thumb_url": "http://example.com/path/to/thumb.png", |
|
320 | 319 | "footer": "RhodeCode", |
|
321 | 320 | #"footer_icon": "", |
|
322 | 321 | "ts": time.time(), |
|
323 | 322 | "mrkdwn_in": ["pretext", "text"] |
|
324 | 323 | } |
|
325 | 324 | message_data.update(overrides) |
|
326 | 325 | json_message = { |
|
327 | 326 | "icon_emoji": settings.get('icon_emoji', ':studio_microphone:'), |
|
328 | 327 | "channel": settings.get('channel', ''), |
|
329 | 328 | "username": settings.get('username', 'Rhodecode'), |
|
330 | 329 | "attachments": [message_data] |
|
331 | 330 | } |
|
332 | 331 | |
|
333 | 332 | resp = requests.post(settings['service'], json=json_message) |
|
334 | 333 | resp.raise_for_status() # raise exception on a failed request |
@@ -1,350 +1,351 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2012-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | from __future__ import unicode_literals |
|
22 | 22 | import string |
|
23 | 23 | from collections import OrderedDict |
|
24 | 24 | |
|
25 | 25 | import deform |
|
26 | 26 | import deform.widget |
|
27 | 27 | import logging |
|
28 | 28 | import requests |
|
29 | 29 | import requests.adapters |
|
30 | 30 | import colander |
|
31 | from celery.task import task | |
|
32 | 31 | from requests.packages.urllib3.util.retry import Retry |
|
33 | 32 | |
|
34 | 33 | import rhodecode |
|
35 | 34 | from rhodecode import events |
|
36 | 35 | from rhodecode.translation import _ |
|
37 | 36 | from rhodecode.integrations.types.base import IntegrationTypeBase |
|
37 | from rhodecode.lib.celerylib import async_task, RequestContextTask | |
|
38 | 38 | |
|
39 | 39 | log = logging.getLogger(__name__) |
|
40 | 40 | |
|
41 | ||
|
41 | 42 | # updating this required to update the `common_vars` passed in url calling func |
|
42 | 43 | WEBHOOK_URL_VARS = [ |
|
43 | 44 | 'repo_name', |
|
44 | 45 | 'repo_type', |
|
45 | 46 | 'repo_id', |
|
46 | 47 | 'repo_url', |
|
47 | 48 | # extra repo fields |
|
48 | 49 | 'extra:<extra_key_name>', |
|
49 | 50 | |
|
50 | 51 | # special attrs below that we handle, using multi-call |
|
51 | 52 | 'branch', |
|
52 | 53 | 'commit_id', |
|
53 | 54 | |
|
54 | 55 | # pr events vars |
|
55 | 56 | 'pull_request_id', |
|
56 | 57 | 'pull_request_url', |
|
57 | 58 | |
|
58 | 59 | # user who triggers the call |
|
59 | 60 | 'username', |
|
60 | 61 | 'user_id', |
|
61 | 62 | |
|
62 | 63 | ] |
|
63 | 64 | URL_VARS = ', '.join('${' + x + '}' for x in WEBHOOK_URL_VARS) |
|
64 | 65 | |
|
65 | 66 | |
|
66 | 67 | def get_auth(settings): |
|
67 | 68 | from requests.auth import HTTPBasicAuth |
|
68 | 69 | username = settings.get('username') |
|
69 | 70 | password = settings.get('password') |
|
70 | 71 | if username and password: |
|
71 | 72 | return HTTPBasicAuth(username, password) |
|
72 | 73 | return None |
|
73 | 74 | |
|
74 | 75 | |
|
75 | 76 | class WebhookHandler(object): |
|
76 | 77 | def __init__(self, template_url, secret_token, headers): |
|
77 | 78 | self.template_url = template_url |
|
78 | 79 | self.secret_token = secret_token |
|
79 | 80 | self.headers = headers |
|
80 | 81 | |
|
81 | 82 | def get_base_parsed_template(self, data): |
|
82 | 83 | """ |
|
83 | 84 | initially parses the passed in template with some common variables |
|
84 | 85 | available on ALL calls |
|
85 | 86 | """ |
|
86 | 87 | # note: make sure to update the `WEBHOOK_URL_VARS` if this changes |
|
87 | 88 | common_vars = { |
|
88 | 89 | 'repo_name': data['repo']['repo_name'], |
|
89 | 90 | 'repo_type': data['repo']['repo_type'], |
|
90 | 91 | 'repo_id': data['repo']['repo_id'], |
|
91 | 92 | 'repo_url': data['repo']['url'], |
|
92 | 93 | 'username': data['actor']['username'], |
|
93 | 94 | 'user_id': data['actor']['user_id'] |
|
94 | 95 | } |
|
95 | 96 | extra_vars = {} |
|
96 | 97 | for extra_key, extra_val in data['repo']['extra_fields'].items(): |
|
97 | 98 | extra_vars['extra:{}'.format(extra_key)] = extra_val |
|
98 | 99 | common_vars.update(extra_vars) |
|
99 | 100 | |
|
100 | 101 | return string.Template( |
|
101 | 102 | self.template_url).safe_substitute(**common_vars) |
|
102 | 103 | |
|
103 | 104 | def repo_push_event_handler(self, event, data): |
|
104 | 105 | url = self.get_base_parsed_template(data) |
|
105 | 106 | url_cals = [] |
|
106 | 107 | branch_data = OrderedDict() |
|
107 | 108 | for obj in data['push']['branches']: |
|
108 | 109 | branch_data[obj['name']] = obj |
|
109 | 110 | |
|
110 | 111 | branches_commits = OrderedDict() |
|
111 | 112 | for commit in data['push']['commits']: |
|
112 | 113 | if commit['branch'] not in branches_commits: |
|
113 | 114 | branch_commits = {'branch': branch_data[commit['branch']], |
|
114 | 115 | 'commits': []} |
|
115 | 116 | branches_commits[commit['branch']] = branch_commits |
|
116 | 117 | |
|
117 | 118 | branch_commits = branches_commits[commit['branch']] |
|
118 | 119 | branch_commits['commits'].append(commit) |
|
119 | 120 | |
|
120 | 121 | if '${branch}' in url: |
|
121 | 122 | # call it multiple times, for each branch if used in variables |
|
122 | 123 | for branch, commit_ids in branches_commits.items(): |
|
123 | 124 | branch_url = string.Template(url).safe_substitute(branch=branch) |
|
124 | 125 | # call further down for each commit if used |
|
125 | 126 | if '${commit_id}' in branch_url: |
|
126 | 127 | for commit_data in commit_ids['commits']: |
|
127 | 128 | commit_id = commit_data['raw_id'] |
|
128 | 129 | commit_url = string.Template(branch_url).safe_substitute( |
|
129 | 130 | commit_id=commit_id) |
|
130 | 131 | # register per-commit call |
|
131 | 132 | log.debug( |
|
132 | 133 | 'register webhook call(%s) to url %s', event, commit_url) |
|
133 | 134 | url_cals.append((commit_url, self.secret_token, self.headers, data)) |
|
134 | 135 | |
|
135 | 136 | else: |
|
136 | 137 | # register per-branch call |
|
137 | 138 | log.debug( |
|
138 | 139 | 'register webhook call(%s) to url %s', event, branch_url) |
|
139 | 140 | url_cals.append((branch_url, self.secret_token, self.headers, data)) |
|
140 | 141 | |
|
141 | 142 | else: |
|
142 | 143 | log.debug( |
|
143 | 144 | 'register webhook call(%s) to url %s', event, url) |
|
144 | 145 | url_cals.append((url, self.secret_token, self.headers, data)) |
|
145 | 146 | |
|
146 | 147 | return url_cals |
|
147 | 148 | |
|
148 | 149 | def repo_create_event_handler(self, event, data): |
|
149 | 150 | url = self.get_base_parsed_template(data) |
|
150 | 151 | log.debug( |
|
151 | 152 | 'register webhook call(%s) to url %s', event, url) |
|
152 | 153 | return [(url, self.secret_token, self.headers, data)] |
|
153 | 154 | |
|
154 | 155 | def pull_request_event_handler(self, event, data): |
|
155 | 156 | url = self.get_base_parsed_template(data) |
|
156 | 157 | log.debug( |
|
157 | 158 | 'register webhook call(%s) to url %s', event, url) |
|
158 | 159 | url = string.Template(url).safe_substitute( |
|
159 | 160 | pull_request_id=data['pullrequest']['pull_request_id'], |
|
160 | 161 | pull_request_url=data['pullrequest']['url']) |
|
161 | 162 | return [(url, self.secret_token, self.headers, data)] |
|
162 | 163 | |
|
163 | 164 | def __call__(self, event, data): |
|
164 | 165 | if isinstance(event, events.RepoPushEvent): |
|
165 | 166 | return self.repo_push_event_handler(event, data) |
|
166 | 167 | elif isinstance(event, events.RepoCreateEvent): |
|
167 | 168 | return self.repo_create_event_handler(event, data) |
|
168 | 169 | elif isinstance(event, events.PullRequestEvent): |
|
169 | 170 | return self.pull_request_event_handler(event, data) |
|
170 | 171 | else: |
|
171 | 172 | raise ValueError('event type not supported: %s' % events) |
|
172 | 173 | |
|
173 | 174 | |
|
174 | 175 | class WebhookSettingsSchema(colander.Schema): |
|
175 | 176 | url = colander.SchemaNode( |
|
176 | 177 | colander.String(), |
|
177 | 178 | title=_('Webhook URL'), |
|
178 | 179 | description= |
|
179 | 180 | _('URL to which Webhook should submit data. Following variables ' |
|
180 | 181 | 'are allowed to be used: {vars}. Some of the variables would ' |
|
181 | 182 | 'trigger multiple calls, like ${{branch}} or ${{commit_id}}. ' |
|
182 | 183 | 'Webhook will be called as many times as unique objects in ' |
|
183 | 184 | 'data in such cases.').format(vars=URL_VARS), |
|
184 | 185 | missing=colander.required, |
|
185 | 186 | required=True, |
|
186 | 187 | validator=colander.url, |
|
187 | 188 | widget=deform.widget.TextInputWidget( |
|
188 | 189 | placeholder='https://www.example.com/webhook' |
|
189 | 190 | ), |
|
190 | 191 | ) |
|
191 | 192 | secret_token = colander.SchemaNode( |
|
192 | 193 | colander.String(), |
|
193 | 194 | title=_('Secret Token'), |
|
194 | 195 | description=_('Optional string used to validate received payloads. ' |
|
195 | 196 | 'It will be sent together with event data in JSON'), |
|
196 | 197 | default='', |
|
197 | 198 | missing='', |
|
198 | 199 | widget=deform.widget.TextInputWidget( |
|
199 | 200 | placeholder='e.g. secret_token' |
|
200 | 201 | ), |
|
201 | 202 | ) |
|
202 | 203 | username = colander.SchemaNode( |
|
203 | 204 | colander.String(), |
|
204 | 205 | title=_('Username'), |
|
205 | 206 | description=_('Optional username to authenticate the call.'), |
|
206 | 207 | default='', |
|
207 | 208 | missing='', |
|
208 | 209 | widget=deform.widget.TextInputWidget( |
|
209 | 210 | placeholder='e.g. admin' |
|
210 | 211 | ), |
|
211 | 212 | ) |
|
212 | 213 | password = colander.SchemaNode( |
|
213 | 214 | colander.String(), |
|
214 | 215 | title=_('Password'), |
|
215 | 216 | description=_('Optional password to authenticate the call.'), |
|
216 | 217 | default='', |
|
217 | 218 | missing='', |
|
218 | 219 | widget=deform.widget.PasswordWidget( |
|
219 | 220 | placeholder='e.g. secret.', |
|
220 | 221 | redisplay=True, |
|
221 | 222 | ), |
|
222 | 223 | ) |
|
223 | 224 | custom_header_key = colander.SchemaNode( |
|
224 | 225 | colander.String(), |
|
225 | 226 | title=_('Custom Header Key'), |
|
226 | 227 | description=_('Custom Header name to be set when calling endpoint.'), |
|
227 | 228 | default='', |
|
228 | 229 | missing='', |
|
229 | 230 | widget=deform.widget.TextInputWidget( |
|
230 | 231 | placeholder='e.g.Authorization' |
|
231 | 232 | ), |
|
232 | 233 | ) |
|
233 | 234 | custom_header_val = colander.SchemaNode( |
|
234 | 235 | colander.String(), |
|
235 | 236 | title=_('Custom Header Value'), |
|
236 | 237 | description=_('Custom Header value to be set when calling endpoint.'), |
|
237 | 238 | default='', |
|
238 | 239 | missing='', |
|
239 | 240 | widget=deform.widget.TextInputWidget( |
|
240 | 241 | placeholder='e.g. RcLogin auth=xxxx' |
|
241 | 242 | ), |
|
242 | 243 | ) |
|
243 | 244 | method_type = colander.SchemaNode( |
|
244 | 245 | colander.String(), |
|
245 | 246 | title=_('Call Method'), |
|
246 | 247 | description=_('Select if the Webhook call should be made ' |
|
247 | 248 | 'with POST or GET.'), |
|
248 | 249 | default='post', |
|
249 | 250 | missing='', |
|
250 | 251 | widget=deform.widget.RadioChoiceWidget( |
|
251 | 252 | values=[('get', 'GET'), ('post', 'POST')], |
|
252 | 253 | inline=True |
|
253 | 254 | ), |
|
254 | 255 | ) |
|
255 | 256 | |
|
256 | 257 | |
|
257 | 258 | class WebhookIntegrationType(IntegrationTypeBase): |
|
258 | 259 | key = 'webhook' |
|
259 | 260 | display_name = _('Webhook') |
|
260 | 261 | description = _('Post json events to a Webhook endpoint') |
|
261 | 262 | icon = '''<?xml version="1.0" encoding="UTF-8" standalone="no"?><svg viewBox="0 0 256 239" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid"><g><path d="M119.540432,100.502743 C108.930124,118.338815 98.7646301,135.611455 88.3876025,152.753617 C85.7226696,157.154315 84.4040417,160.738531 86.5332204,166.333309 C92.4107024,181.787152 84.1193605,196.825836 68.5350381,200.908244 C53.8383677,204.759349 39.5192953,195.099955 36.6032893,179.365384 C34.0194114,165.437749 44.8274148,151.78491 60.1824106,149.608284 C61.4694072,149.424428 62.7821041,149.402681 64.944891,149.240571 C72.469175,136.623655 80.1773157,123.700312 88.3025935,110.073173 C73.611854,95.4654658 64.8677898,78.3885437 66.803227,57.2292132 C68.1712787,42.2715849 74.0527146,29.3462646 84.8033863,18.7517722 C105.393354,-1.53572199 136.805164,-4.82141828 161.048542,10.7510424 C184.333097,25.7086706 194.996783,54.8450075 185.906752,79.7822957 C179.052655,77.9239597 172.151111,76.049808 164.563565,73.9917997 C167.418285,60.1274266 165.306899,47.6765751 155.95591,37.0109123 C149.777932,29.9690049 141.850349,26.2780332 132.835442,24.9178894 C114.764113,22.1877169 97.0209573,33.7983633 91.7563309,51.5355878 C85.7800012,71.6669027 94.8245623,88.1111998 119.540432,100.502743 L119.540432,100.502743 Z" fill="#C73A63"></path><path d="M149.841194,79.4106285 C157.316054,92.5969067 164.905578,105.982857 172.427885,119.246236 C210.44865,107.483365 239.114472,128.530009 249.398582,151.063322 C261.81978,178.282014 253.328765,210.520191 228.933162,227.312431 C203.893073,244.551464 172.226236,241.605803 150.040866,219.46195 C155.694953,214.729124 161.376716,209.974552 167.44794,204.895759 C189.360489,219.088306 208.525074,218.420096 222.753207,201.614016 C234.885769,187.277151 234.622834,165.900356 222.138374,151.863988 C207.730339,135.66681 188.431321,135.172572 165.103273,150.721309 C155.426087,133.553447 145.58086,116.521995 136.210101,99.2295848 C133.05093,93.4015266 129.561608,90.0209366 122.440622,88.7873178 C110.547271,86.7253555 102.868785,76.5124151 102.408155,65.0698097 C101.955433,53.7537294 108.621719,43.5249733 119.04224,39.5394355 C129.363912,35.5914599 141.476705,38.7783085 148.419765,47.554004 C154.093621,54.7244134 155.896602,62.7943365 152.911402,71.6372484 C152.081082,74.1025091 151.00562,76.4886916 149.841194,79.4106285 L149.841194,79.4106285 Z" fill="#4B4B4B"></path><path d="M167.706921,187.209935 L121.936499,187.209935 C117.54964,205.253587 108.074103,219.821756 91.7464461,229.085759 C79.0544063,236.285822 65.3738898,238.72736 50.8136292,236.376762 C24.0061432,232.053165 2.08568567,207.920497 0.156179306,180.745298 C-2.02835403,149.962159 19.1309765,122.599149 47.3341915,116.452801 C49.2814904,123.524363 51.2485589,130.663141 53.1958579,137.716911 C27.3195169,150.919004 18.3639187,167.553089 25.6054984,188.352614 C31.9811726,206.657224 50.0900643,216.690262 69.7528413,212.809503 C89.8327554,208.847688 99.9567329,192.160226 98.7211371,165.37844 C117.75722,165.37844 136.809118,165.180745 155.847178,165.475311 C163.280522,165.591951 169.019617,164.820939 174.620326,158.267339 C183.840836,147.48306 200.811003,148.455721 210.741239,158.640984 C220.88894,169.049642 220.402609,185.79839 209.663799,195.768166 C199.302587,205.38802 182.933414,204.874012 173.240413,194.508846 C171.247644,192.37176 169.677943,189.835329 167.706921,187.209935 L167.706921,187.209935 Z" fill="#4A4A4A"></path></g></svg>''' |
|
262 | 263 | |
|
263 | 264 | valid_events = [ |
|
264 | 265 | events.PullRequestCloseEvent, |
|
265 | 266 | events.PullRequestMergeEvent, |
|
266 | 267 | events.PullRequestUpdateEvent, |
|
267 | 268 | events.PullRequestCommentEvent, |
|
268 | 269 | events.PullRequestReviewEvent, |
|
269 | 270 | events.PullRequestCreateEvent, |
|
270 | 271 | events.RepoPushEvent, |
|
271 | 272 | events.RepoCreateEvent, |
|
272 | 273 | ] |
|
273 | 274 | |
|
274 | 275 | def settings_schema(self): |
|
275 | 276 | schema = WebhookSettingsSchema() |
|
276 | 277 | schema.add(colander.SchemaNode( |
|
277 | 278 | colander.Set(), |
|
278 | 279 | widget=deform.widget.CheckboxChoiceWidget( |
|
279 | 280 | values=sorted( |
|
280 | 281 | [(e.name, e.display_name) for e in self.valid_events] |
|
281 | 282 | ) |
|
282 | 283 | ), |
|
283 | 284 | description="Events activated for this integration", |
|
284 | 285 | name='events' |
|
285 | 286 | )) |
|
286 | 287 | return schema |
|
287 | 288 | |
|
288 | 289 | def send_event(self, event): |
|
289 | 290 | log.debug('handling event %s with Webhook integration %s', |
|
290 | 291 | event.name, self) |
|
291 | 292 | |
|
292 | 293 | if event.__class__ not in self.valid_events: |
|
293 | 294 | log.debug('event not valid: %r' % event) |
|
294 | 295 | return |
|
295 | 296 | |
|
296 | 297 | if event.name not in self.settings['events']: |
|
297 | 298 | log.debug('event ignored: %r' % event) |
|
298 | 299 | return |
|
299 | 300 | |
|
300 | 301 | data = event.as_dict() |
|
301 | 302 | template_url = self.settings['url'] |
|
302 | 303 | |
|
303 | 304 | headers = {} |
|
304 | 305 | head_key = self.settings.get('custom_header_key') |
|
305 | 306 | head_val = self.settings.get('custom_header_val') |
|
306 | 307 | if head_key and head_val: |
|
307 | 308 | headers = {head_key: head_val} |
|
308 | 309 | |
|
309 | 310 | handler = WebhookHandler( |
|
310 | 311 | template_url, self.settings['secret_token'], headers) |
|
311 | 312 | |
|
312 | 313 | url_calls = handler(event, data) |
|
313 | 314 | log.debug('webhook: calling following urls: %s', |
|
314 | 315 | [x[0] for x in url_calls]) |
|
315 | 316 | post_to_webhook(url_calls, self.settings) |
|
316 | 317 | |
|
317 | 318 | |
|
318 | @task(ignore_result=True) | |
|
319 | @async_task(ignore_result=True, base=RequestContextTask) | |
|
319 | 320 | def post_to_webhook(url_calls, settings): |
|
320 | 321 | max_retries = 3 |
|
321 | 322 | retries = Retry( |
|
322 | 323 | total=max_retries, |
|
323 | 324 | backoff_factor=0.15, |
|
324 | 325 | status_forcelist=[500, 502, 503, 504]) |
|
325 | 326 | call_headers = { |
|
326 | 327 | 'User-Agent': 'RhodeCode-webhook-caller/{}'.format( |
|
327 | 328 | rhodecode.__version__) |
|
328 | 329 | } # updated below with custom ones, allows override |
|
329 | 330 | |
|
330 | 331 | for url, token, headers, data in url_calls: |
|
331 | 332 | req_session = requests.Session() |
|
332 | 333 | req_session.mount( # retry max N times |
|
333 | 334 | 'http://', requests.adapters.HTTPAdapter(max_retries=retries)) |
|
334 | 335 | |
|
335 | 336 | method = settings.get('method_type') or 'post' |
|
336 | 337 | call_method = getattr(req_session, method) |
|
337 | 338 | |
|
338 | 339 | headers = headers or {} |
|
339 | 340 | call_headers.update(headers) |
|
340 | 341 | auth = get_auth(settings) |
|
341 | 342 | |
|
342 | 343 | log.debug('calling Webhook with method: %s, and auth:%s', |
|
343 | 344 | call_method, auth) |
|
344 | 345 | resp = call_method(url, json={ |
|
345 | 346 | 'token': token, |
|
346 | 347 | 'event': data |
|
347 | 348 | }, headers=call_headers, auth=auth) |
|
348 | 349 | log.debug('Got Webhook response: %s', resp) |
|
349 | 350 | |
|
350 | 351 | resp.raise_for_status() # raise exception on a failed request |
@@ -1,236 +1,72 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | """ | |
|
21 | celery libs for RhodeCode | |
|
22 | """ | |
|
23 | 20 | |
|
24 | ||
|
25 | import pylons | |
|
26 | 21 | import socket |
|
27 | 22 | import logging |
|
28 | 23 | |
|
29 | 24 | import rhodecode |
|
30 | ||
|
31 | from os.path import join as jn | |
|
32 | from pylons import config | |
|
33 | from celery.task import Task | |
|
34 | from pyramid.request import Request | |
|
35 | from pyramid.scripting import prepare | |
|
36 | from pyramid.threadlocal import get_current_request | |
|
37 | ||
|
38 | from decorator import decorator | |
|
25 | from zope.cachedescriptors.property import Lazy as LazyProperty | |
|
26 | from rhodecode.lib.celerylib.loader import ( | |
|
27 | celery_app, RequestContextTask, get_logger) | |
|
39 | 28 | |
|
40 | from zope.cachedescriptors.property import Lazy as LazyProperty | |
|
29 | async_task = celery_app.task | |
|
41 | 30 | |
|
42 | from rhodecode.config import utils | |
|
43 | from rhodecode.lib.utils2 import ( | |
|
44 | safe_str, md5_safe, aslist, get_routes_generator_for_server_url, | |
|
45 | get_server_url) | |
|
46 | from rhodecode.lib.pidlock import DaemonLock, LockHeld | |
|
47 | from rhodecode.lib.vcs import connect_vcs | |
|
48 | from rhodecode.model import meta | |
|
49 | from rhodecode.lib.auth import AuthUser | |
|
50 | 31 | |
|
51 | 32 | log = logging.getLogger(__name__) |
|
52 | 33 | |
|
53 | 34 | |
|
54 | 35 | class ResultWrapper(object): |
|
55 | 36 | def __init__(self, task): |
|
56 | 37 | self.task = task |
|
57 | 38 | |
|
58 | 39 | @LazyProperty |
|
59 | 40 | def result(self): |
|
60 | 41 | return self.task |
|
61 | 42 | |
|
62 | 43 | |
|
63 | class RhodecodeCeleryTask(Task): | |
|
64 | """ | |
|
65 | This is a celery task which will create a rhodecode app instance context | |
|
66 | for the task, patch pyramid + pylons threadlocals with the original request | |
|
67 | that created the task and also add the user to the context. | |
|
68 | ||
|
69 | This class as a whole should be removed once the pylons port is complete | |
|
70 | and a pyramid only solution for celery is implemented as per issue #4139 | |
|
71 | """ | |
|
72 | ||
|
73 | def apply_async(self, args=None, kwargs=None, task_id=None, producer=None, | |
|
74 | link=None, link_error=None, **options): | |
|
75 | """ queue the job to run (we are in web request context here) """ | |
|
76 | ||
|
77 | request = get_current_request() | |
|
78 | ||
|
79 | if hasattr(request, 'user'): | |
|
80 | ip_addr = request.user.ip_addr | |
|
81 | user_id = request.user.user_id | |
|
82 | elif hasattr(request, 'rpc_params'): | |
|
83 | # TODO(marcink) remove when migration is finished | |
|
84 | # api specific call on Pyramid. | |
|
85 | ip_addr = request.rpc_params['apiuser'].ip_addr | |
|
86 | user_id = request.rpc_params['apiuser'].user_id | |
|
87 | else: | |
|
88 | raise Exception('Unable to fetch data from request: {}'.format( | |
|
89 | request)) | |
|
90 | ||
|
91 | if request: | |
|
92 | # we hook into kwargs since it is the only way to pass our data to | |
|
93 | # the celery worker in celery 2.2 | |
|
94 | kwargs.update({ | |
|
95 | '_rhodecode_proxy_data': { | |
|
96 | 'environ': { | |
|
97 | 'PATH_INFO': request.environ['PATH_INFO'], | |
|
98 | 'SCRIPT_NAME': request.environ['SCRIPT_NAME'], | |
|
99 | 'HTTP_HOST': request.environ.get('HTTP_HOST', | |
|
100 | request.environ['SERVER_NAME']), | |
|
101 | 'SERVER_NAME': request.environ['SERVER_NAME'], | |
|
102 | 'SERVER_PORT': request.environ['SERVER_PORT'], | |
|
103 | 'wsgi.url_scheme': request.environ['wsgi.url_scheme'], | |
|
104 | }, | |
|
105 | 'auth_user': { | |
|
106 | 'ip_addr': ip_addr, | |
|
107 | 'user_id': user_id | |
|
108 | }, | |
|
109 | } | |
|
110 | }) | |
|
111 | return super(RhodecodeCeleryTask, self).apply_async( | |
|
112 | args, kwargs, task_id, producer, link, link_error, **options) | |
|
113 | ||
|
114 | def __call__(self, *args, **kwargs): | |
|
115 | """ rebuild the context and then run task on celery worker """ | |
|
116 | proxy_data = kwargs.pop('_rhodecode_proxy_data', {}) | |
|
117 | ||
|
118 | if not proxy_data: | |
|
119 | return super(RhodecodeCeleryTask, self).__call__(*args, **kwargs) | |
|
120 | ||
|
121 | log.debug('using celery proxy data to run task: %r', proxy_data) | |
|
122 | ||
|
123 | from rhodecode.config.routing import make_map | |
|
124 | ||
|
125 | request = Request.blank('/', environ=proxy_data['environ']) | |
|
126 | request.user = AuthUser(user_id=proxy_data['auth_user']['user_id'], | |
|
127 | ip_addr=proxy_data['auth_user']['ip_addr']) | |
|
128 | ||
|
129 | pyramid_request = prepare(request) # set pyramid threadlocal request | |
|
130 | ||
|
131 | # pylons routing | |
|
132 | if not rhodecode.CONFIG.get('routes.map'): | |
|
133 | rhodecode.CONFIG['routes.map'] = make_map(config) | |
|
134 | pylons.url._push_object(get_routes_generator_for_server_url( | |
|
135 | get_server_url(request.environ) | |
|
136 | )) | |
|
137 | ||
|
138 | try: | |
|
139 | return super(RhodecodeCeleryTask, self).__call__(*args, **kwargs) | |
|
140 | finally: | |
|
141 | pyramid_request['closer']() | |
|
142 | pylons.url._pop_object() | |
|
143 | ||
|
144 | ||
|
145 | 44 | def run_task(task, *args, **kwargs): |
|
146 | 45 | if rhodecode.CELERY_ENABLED: |
|
147 | 46 | celery_is_up = False |
|
148 | 47 | try: |
|
149 | 48 | t = task.apply_async(args=args, kwargs=kwargs) |
|
150 | log.info('running task %s:%s', t.task_id, task) | |
|
151 | 49 | celery_is_up = True |
|
50 | log.debug('executing task %s:%s in async mode', t.task_id, task) | |
|
152 | 51 | return t |
|
153 | 52 | |
|
154 | 53 | except socket.error as e: |
|
155 | 54 | if isinstance(e, IOError) and e.errno == 111: |
|
156 | 55 | log.error('Unable to connect to celeryd. Sync execution') |
|
157 | 56 | else: |
|
158 | 57 | log.exception("Exception while connecting to celeryd.") |
|
159 | 58 | except KeyError as e: |
|
160 | 59 | log.error('Unable to connect to celeryd. Sync execution') |
|
161 | 60 | except Exception as e: |
|
162 | 61 | log.exception( |
|
163 | 62 | "Exception while trying to run task asynchronous. " |
|
164 | 63 | "Fallback to sync execution.") |
|
165 | 64 | |
|
166 | 65 | # keep in mind there maybe a subtle race condition where something |
|
167 |
# depending on rhodecode.CELERY_ENABLED |
|
|
66 | # depending on rhodecode.CELERY_ENABLED | |
|
168 | 67 | # will see CELERY_ENABLED as True before this has a chance to set False |
|
169 | 68 | rhodecode.CELERY_ENABLED = celery_is_up |
|
170 | 69 | else: |
|
171 | log.debug('executing task %s in sync mode', task) | |
|
172 | return ResultWrapper(task(*args, **kwargs)) | |
|
173 | ||
|
174 | ||
|
175 | def __get_lockkey(func, *fargs, **fkwargs): | |
|
176 | params = list(fargs) | |
|
177 | params.extend(['%s-%s' % ar for ar in fkwargs.items()]) | |
|
178 | ||
|
179 | func_name = str(func.__name__) if hasattr(func, '__name__') else str(func) | |
|
180 | _lock_key = func_name + '-' + '-'.join(map(safe_str, params)) | |
|
181 | return 'task_%s.lock' % (md5_safe(_lock_key),) | |
|
182 | ||
|
183 | ||
|
184 | def locked_task(func): | |
|
185 | def __wrapper(func, *fargs, **fkwargs): | |
|
186 | lockkey = __get_lockkey(func, *fargs, **fkwargs) | |
|
187 | lockkey_path = config['app_conf']['cache_dir'] | |
|
188 | ||
|
189 | log.info('running task with lockkey %s' % lockkey) | |
|
190 | try: | |
|
191 | l = DaemonLock(file_=jn(lockkey_path, lockkey)) | |
|
192 | ret = func(*fargs, **fkwargs) | |
|
193 | l.release() | |
|
194 | return ret | |
|
195 | except LockHeld: | |
|
196 | log.info('LockHeld') | |
|
197 | return 'Task with key %s already running' % lockkey | |
|
198 | ||
|
199 | return decorator(__wrapper, func) | |
|
200 | ||
|
70 | log.debug('executing task %s:%s in sync mode', 'TASK', task) | |
|
201 | 71 | |
|
202 | def get_session(): | |
|
203 | if rhodecode.CELERY_ENABLED: | |
|
204 | utils.initialize_database(config) | |
|
205 | sa = meta.Session() | |
|
206 | return sa | |
|
207 | ||
|
208 | ||
|
209 | def dbsession(func): | |
|
210 | def __wrapper(func, *fargs, **fkwargs): | |
|
211 | try: | |
|
212 | ret = func(*fargs, **fkwargs) | |
|
213 | return ret | |
|
214 | finally: | |
|
215 | if rhodecode.CELERY_ENABLED and not rhodecode.CELERY_EAGER: | |
|
216 | meta.Session.remove() | |
|
217 | ||
|
218 | return decorator(__wrapper, func) | |
|
219 | ||
|
220 | ||
|
221 | def vcsconnection(func): | |
|
222 | def __wrapper(func, *fargs, **fkwargs): | |
|
223 | if rhodecode.CELERY_ENABLED and not rhodecode.CELERY_EAGER: | |
|
224 | settings = rhodecode.PYRAMID_SETTINGS | |
|
225 | backends = settings['vcs.backends'] | |
|
226 | for alias in rhodecode.BACKENDS.keys(): | |
|
227 | if alias not in backends: | |
|
228 | del rhodecode.BACKENDS[alias] | |
|
229 | utils.configure_vcs(settings) | |
|
230 | connect_vcs( | |
|
231 | settings['vcs.server'], | |
|
232 | utils.get_vcs_server_protocol(settings)) | |
|
233 | ret = func(*fargs, **fkwargs) | |
|
234 | return ret | |
|
235 | ||
|
236 | return decorator(__wrapper, func) | |
|
72 | return ResultWrapper(task(*args, **kwargs)) |
@@ -1,293 +1,275 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2012-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | RhodeCode task modules, containing all task that suppose to be run |
|
23 | 23 | by celery daemon |
|
24 | 24 | """ |
|
25 | 25 | |
|
26 | ||
|
27 | 26 | import os |
|
28 | import logging | |
|
29 | ||
|
30 | from celery.task import task | |
|
31 | 27 | |
|
32 | 28 | import rhodecode |
|
33 | 29 | from rhodecode.lib import audit_logger |
|
34 |
from rhodecode.lib.celerylib import |
|
|
35 | run_task, dbsession, __get_lockkey, LockHeld, DaemonLock, | |
|
36 | get_session, vcsconnection, RhodecodeCeleryTask) | |
|
30 | from rhodecode.lib.celerylib import get_logger, async_task, RequestContextTask | |
|
37 | 31 | from rhodecode.lib.hooks_base import log_create_repository |
|
38 | 32 | from rhodecode.lib.rcmail.smtp_mailer import SmtpMailer |
|
39 | from rhodecode.lib.utils import add_cache | |
|
40 | 33 | from rhodecode.lib.utils2 import safe_int, str2bool |
|
41 | from rhodecode.model.db import Repository, User | |
|
34 | from rhodecode.model.db import Session, Repository, User | |
|
42 | 35 | |
|
43 | 36 | |
|
44 | def get_logger(cls): | |
|
45 | if rhodecode.CELERY_ENABLED: | |
|
46 | try: | |
|
47 | log = cls.get_logger() | |
|
48 | except Exception: | |
|
49 | log = logging.getLogger(__name__) | |
|
50 | else: | |
|
51 | log = logging.getLogger(__name__) | |
|
52 | ||
|
53 | return log | |
|
54 | ||
|
55 | ||
|
56 | @task(ignore_result=True, base=RhodecodeCeleryTask) | |
|
57 | @dbsession | |
|
37 | @async_task(ignore_result=True, base=RequestContextTask) | |
|
58 | 38 | def send_email(recipients, subject, body='', html_body='', email_config=None): |
|
59 | 39 | """ |
|
60 | 40 | Sends an email with defined parameters from the .ini files. |
|
61 | 41 | |
|
62 | 42 | :param recipients: list of recipients, it this is empty the defined email |
|
63 | 43 | address from field 'email_to' is used instead |
|
64 | 44 | :param subject: subject of the mail |
|
65 | 45 | :param body: body of the mail |
|
66 | 46 | :param html_body: html version of body |
|
67 | 47 | """ |
|
68 | 48 | log = get_logger(send_email) |
|
69 | 49 | |
|
70 | 50 | email_config = email_config or rhodecode.CONFIG |
|
71 | 51 | subject = "%s %s" % (email_config.get('email_prefix', ''), subject) |
|
72 | 52 | if not recipients: |
|
73 | 53 | # if recipients are not defined we send to email_config + all admins |
|
74 | 54 | admins = [ |
|
75 | 55 | u.email for u in User.query().filter(User.admin == True).all()] |
|
76 | 56 | recipients = [email_config.get('email_to')] + admins |
|
77 | 57 | |
|
78 | 58 | mail_server = email_config.get('smtp_server') or None |
|
79 | 59 | if mail_server is None: |
|
80 | 60 | log.error("SMTP server information missing. Sending email failed. " |
|
81 | 61 | "Make sure that `smtp_server` variable is configured " |
|
82 | 62 | "inside the .ini file") |
|
83 | 63 | return False |
|
84 | 64 | |
|
85 | 65 | mail_from = email_config.get('app_email_from', 'RhodeCode') |
|
86 | 66 | user = email_config.get('smtp_username') |
|
87 | 67 | passwd = email_config.get('smtp_password') |
|
88 | 68 | mail_port = email_config.get('smtp_port') |
|
89 | 69 | tls = str2bool(email_config.get('smtp_use_tls')) |
|
90 | 70 | ssl = str2bool(email_config.get('smtp_use_ssl')) |
|
91 | 71 | debug = str2bool(email_config.get('debug')) |
|
92 | 72 | smtp_auth = email_config.get('smtp_auth') |
|
93 | 73 | |
|
94 | 74 | try: |
|
95 | 75 | m = SmtpMailer(mail_from, user, passwd, mail_server, smtp_auth, |
|
96 | 76 | mail_port, ssl, tls, debug=debug) |
|
97 | 77 | m.send(recipients, subject, body, html_body) |
|
98 | 78 | except Exception: |
|
99 | 79 | log.exception('Mail sending failed') |
|
100 | 80 | return False |
|
101 | 81 | return True |
|
102 | 82 | |
|
103 | 83 | |
|
104 |
@task(ignore_result=True, base=R |
|
|
105 | @dbsession | |
|
106 | @vcsconnection | |
|
84 | @async_task(ignore_result=True, base=RequestContextTask) | |
|
107 | 85 | def create_repo(form_data, cur_user): |
|
108 | 86 | from rhodecode.model.repo import RepoModel |
|
109 | 87 | from rhodecode.model.user import UserModel |
|
110 | 88 | from rhodecode.model.settings import SettingsModel |
|
111 | 89 | |
|
112 | 90 | log = get_logger(create_repo) |
|
113 | DBS = get_session() | |
|
114 | 91 | |
|
115 |
cur_user = UserModel( |
|
|
92 | cur_user = UserModel()._get_user(cur_user) | |
|
116 | 93 | owner = cur_user |
|
117 | 94 | |
|
118 | 95 | repo_name = form_data['repo_name'] |
|
119 | 96 | repo_name_full = form_data['repo_name_full'] |
|
120 | 97 | repo_type = form_data['repo_type'] |
|
121 | 98 | description = form_data['repo_description'] |
|
122 | 99 | private = form_data['repo_private'] |
|
123 | 100 | clone_uri = form_data.get('clone_uri') |
|
124 | 101 | repo_group = safe_int(form_data['repo_group']) |
|
125 | 102 | landing_rev = form_data['repo_landing_rev'] |
|
126 | 103 | copy_fork_permissions = form_data.get('copy_permissions') |
|
127 | 104 | copy_group_permissions = form_data.get('repo_copy_permissions') |
|
128 | 105 | fork_of = form_data.get('fork_parent_id') |
|
129 | 106 | state = form_data.get('repo_state', Repository.STATE_PENDING) |
|
130 | 107 | |
|
131 | 108 | # repo creation defaults, private and repo_type are filled in form |
|
132 | 109 | defs = SettingsModel().get_default_repo_settings(strip_prefix=True) |
|
133 | 110 | enable_statistics = form_data.get( |
|
134 | 111 | 'enable_statistics', defs.get('repo_enable_statistics')) |
|
135 | 112 | enable_locking = form_data.get( |
|
136 | 113 | 'enable_locking', defs.get('repo_enable_locking')) |
|
137 | 114 | enable_downloads = form_data.get( |
|
138 | 115 | 'enable_downloads', defs.get('repo_enable_downloads')) |
|
139 | 116 | |
|
140 | 117 | try: |
|
141 |
repo = RepoModel( |
|
|
118 | repo = RepoModel()._create_repo( | |
|
142 | 119 | repo_name=repo_name_full, |
|
143 | 120 | repo_type=repo_type, |
|
144 | 121 | description=description, |
|
145 | 122 | owner=owner, |
|
146 | 123 | private=private, |
|
147 | 124 | clone_uri=clone_uri, |
|
148 | 125 | repo_group=repo_group, |
|
149 | 126 | landing_rev=landing_rev, |
|
150 | 127 | fork_of=fork_of, |
|
151 | 128 | copy_fork_permissions=copy_fork_permissions, |
|
152 | 129 | copy_group_permissions=copy_group_permissions, |
|
153 | 130 | enable_statistics=enable_statistics, |
|
154 | 131 | enable_locking=enable_locking, |
|
155 | 132 | enable_downloads=enable_downloads, |
|
156 | 133 | state=state |
|
157 | 134 | ) |
|
158 |
|
|
|
135 | Session().commit() | |
|
159 | 136 | |
|
160 | 137 | # now create this repo on Filesystem |
|
161 |
RepoModel( |
|
|
138 | RepoModel()._create_filesystem_repo( | |
|
162 | 139 | repo_name=repo_name, |
|
163 | 140 | repo_type=repo_type, |
|
164 |
repo_group=RepoModel( |
|
|
141 | repo_group=RepoModel()._get_repo_group(repo_group), | |
|
165 | 142 | clone_uri=clone_uri, |
|
166 | 143 | ) |
|
167 | 144 | repo = Repository.get_by_repo_name(repo_name_full) |
|
168 | 145 | log_create_repository(created_by=owner.username, **repo.get_dict()) |
|
169 | 146 | |
|
170 | 147 | # update repo commit caches initially |
|
171 | 148 | repo.update_commit_cache() |
|
172 | 149 | |
|
173 | 150 | # set new created state |
|
174 | 151 | repo.set_state(Repository.STATE_CREATED) |
|
175 | 152 | repo_id = repo.repo_id |
|
176 | 153 | repo_data = repo.get_api_data() |
|
177 | 154 | |
|
178 | 155 | audit_logger.store( |
|
179 | 156 | 'repo.create', action_data={'data': repo_data}, |
|
180 | 157 | user=cur_user, |
|
181 | 158 | repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id)) |
|
182 | 159 | |
|
183 |
|
|
|
160 | Session().commit() | |
|
184 | 161 | except Exception: |
|
185 | 162 | log.warning('Exception occurred when creating repository, ' |
|
186 | 163 | 'doing cleanup...', exc_info=True) |
|
187 | 164 | # rollback things manually ! |
|
188 | 165 | repo = Repository.get_by_repo_name(repo_name_full) |
|
189 | 166 | if repo: |
|
190 | 167 | Repository.delete(repo.repo_id) |
|
191 |
|
|
|
192 |
RepoModel( |
|
|
168 | Session().commit() | |
|
169 | RepoModel()._delete_filesystem_repo(repo) | |
|
193 | 170 | raise |
|
194 | 171 | |
|
195 | 172 | # it's an odd fix to make celery fail task when exception occurs |
|
196 | 173 | def on_failure(self, *args, **kwargs): |
|
197 | 174 | pass |
|
198 | 175 | |
|
199 | 176 | return True |
|
200 | 177 | |
|
201 | 178 | |
|
202 |
@task(ignore_result=True, base=R |
|
|
203 | @dbsession | |
|
204 | @vcsconnection | |
|
179 | @async_task(ignore_result=True, base=RequestContextTask) | |
|
205 | 180 | def create_repo_fork(form_data, cur_user): |
|
206 | 181 | """ |
|
207 | 182 | Creates a fork of repository using internal VCS methods |
|
208 | ||
|
209 | :param form_data: | |
|
210 | :param cur_user: | |
|
211 | 183 | """ |
|
212 | 184 | from rhodecode.model.repo import RepoModel |
|
213 | 185 | from rhodecode.model.user import UserModel |
|
214 | 186 | |
|
215 | 187 | log = get_logger(create_repo_fork) |
|
216 | DBS = get_session() | |
|
217 | 188 | |
|
218 |
cur_user = UserModel( |
|
|
189 | cur_user = UserModel()._get_user(cur_user) | |
|
219 | 190 | owner = cur_user |
|
220 | 191 | |
|
221 | 192 | repo_name = form_data['repo_name'] # fork in this case |
|
222 | 193 | repo_name_full = form_data['repo_name_full'] |
|
223 | 194 | repo_type = form_data['repo_type'] |
|
224 | 195 | description = form_data['description'] |
|
225 | 196 | private = form_data['private'] |
|
226 | 197 | clone_uri = form_data.get('clone_uri') |
|
227 | 198 | repo_group = safe_int(form_data['repo_group']) |
|
228 | 199 | landing_rev = form_data['landing_rev'] |
|
229 | 200 | copy_fork_permissions = form_data.get('copy_permissions') |
|
230 | 201 | fork_id = safe_int(form_data.get('fork_parent_id')) |
|
231 | 202 | |
|
232 | 203 | try: |
|
233 |
fork_of = RepoModel( |
|
|
234 |
RepoModel( |
|
|
204 | fork_of = RepoModel()._get_repo(fork_id) | |
|
205 | RepoModel()._create_repo( | |
|
235 | 206 | repo_name=repo_name_full, |
|
236 | 207 | repo_type=repo_type, |
|
237 | 208 | description=description, |
|
238 | 209 | owner=owner, |
|
239 | 210 | private=private, |
|
240 | 211 | clone_uri=clone_uri, |
|
241 | 212 | repo_group=repo_group, |
|
242 | 213 | landing_rev=landing_rev, |
|
243 | 214 | fork_of=fork_of, |
|
244 | 215 | copy_fork_permissions=copy_fork_permissions |
|
245 | 216 | ) |
|
246 | 217 | |
|
247 |
|
|
|
218 | Session().commit() | |
|
248 | 219 | |
|
249 | 220 | base_path = Repository.base_path() |
|
250 | 221 | source_repo_path = os.path.join(base_path, fork_of.repo_name) |
|
251 | 222 | |
|
252 | 223 | # now create this repo on Filesystem |
|
253 |
RepoModel( |
|
|
224 | RepoModel()._create_filesystem_repo( | |
|
254 | 225 | repo_name=repo_name, |
|
255 | 226 | repo_type=repo_type, |
|
256 |
repo_group=RepoModel( |
|
|
227 | repo_group=RepoModel()._get_repo_group(repo_group), | |
|
257 | 228 | clone_uri=source_repo_path, |
|
258 | 229 | ) |
|
259 | 230 | repo = Repository.get_by_repo_name(repo_name_full) |
|
260 | 231 | log_create_repository(created_by=owner.username, **repo.get_dict()) |
|
261 | 232 | |
|
262 | 233 | # update repo commit caches initially |
|
263 | 234 | config = repo._config |
|
264 | 235 | config.set('extensions', 'largefiles', '') |
|
265 | 236 | repo.update_commit_cache(config=config) |
|
266 | 237 | |
|
267 | 238 | # set new created state |
|
268 | 239 | repo.set_state(Repository.STATE_CREATED) |
|
269 | 240 | |
|
270 | 241 | repo_id = repo.repo_id |
|
271 | 242 | repo_data = repo.get_api_data() |
|
272 | 243 | audit_logger.store( |
|
273 | 244 | 'repo.fork', action_data={'data': repo_data}, |
|
274 | 245 | user=cur_user, |
|
275 | 246 | repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id)) |
|
276 | 247 | |
|
277 |
|
|
|
248 | Session().commit() | |
|
278 | 249 | except Exception as e: |
|
279 | 250 | log.warning('Exception %s occurred when forking repository, ' |
|
280 | 251 | 'doing cleanup...', e) |
|
281 | 252 | # rollback things manually ! |
|
282 | 253 | repo = Repository.get_by_repo_name(repo_name_full) |
|
283 | 254 | if repo: |
|
284 | 255 | Repository.delete(repo.repo_id) |
|
285 |
|
|
|
286 |
RepoModel( |
|
|
256 | Session().commit() | |
|
257 | RepoModel()._delete_filesystem_repo(repo) | |
|
287 | 258 | raise |
|
288 | 259 | |
|
289 | 260 | # it's an odd fix to make celery fail task when exception occurs |
|
290 | 261 | def on_failure(self, *args, **kwargs): |
|
291 | 262 | pass |
|
292 | 263 | |
|
293 | 264 | return True |
|
265 | ||
|
266 | ||
|
267 | @async_task(ignore_result=True) | |
|
268 | def sync_repo(*args, **kwargs): | |
|
269 | from rhodecode.model.scm import ScmModel | |
|
270 | log = get_logger(sync_repo) | |
|
271 | ||
|
272 | log.info('Pulling from %s', kwargs['repo_name']) | |
|
273 | ScmModel().pull_changes(kwargs['repo_name'], kwargs['username']) | |
|
274 | ||
|
275 |
@@ -1,49 +1,56 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import os |
|
22 | 22 | from pyramid.compat import configparser |
|
23 | 23 | from pyramid.paster import bootstrap as pyramid_bootstrap, setup_logging # noqa |
|
24 | 24 | from pyramid.request import Request |
|
25 | from pyramid.scripting import prepare | |
|
25 | 26 | |
|
26 | 27 | |
|
27 | 28 | def get_config(ini_path, **kwargs): |
|
28 | 29 | parser = configparser.ConfigParser(**kwargs) |
|
29 | 30 | parser.read(ini_path) |
|
30 | 31 | return parser |
|
31 | 32 | |
|
32 | 33 | |
|
33 | 34 | def get_app_config(ini_path): |
|
34 | 35 | from paste.deploy.loadwsgi import appconfig |
|
35 | 36 | return appconfig('config:{}'.format(ini_path), relative_to=os.getcwd()) |
|
36 | 37 | |
|
37 | 38 | |
|
38 | 39 | def bootstrap(config_uri, request=None, options=None): |
|
39 | 40 | |
|
40 | 41 | config = get_config(config_uri) |
|
41 | 42 | base_url = 'http://rhodecode.local' |
|
42 | 43 | try: |
|
43 | 44 | base_url = config.get('app:main', 'app.base_url') |
|
44 | 45 | except (configparser.NoSectionError, configparser.NoOptionError): |
|
45 | 46 | pass |
|
46 | 47 | |
|
47 | 48 | request = request or Request.blank('/', base_url=base_url) |
|
48 | 49 | |
|
49 | 50 | return pyramid_bootstrap(config_uri, request=request, options=options) |
|
51 | ||
|
52 | ||
|
53 | def prepare_request(environ): | |
|
54 | request = Request.blank('/', environ=environ) | |
|
55 | prepare(request) # set pyramid threadlocal request | |
|
56 | return request |
@@ -1,882 +1,802 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Utilities library for RhodeCode |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import datetime |
|
26 | 26 | import decorator |
|
27 | 27 | import json |
|
28 | 28 | import logging |
|
29 | 29 | import os |
|
30 | 30 | import re |
|
31 | 31 | import shutil |
|
32 | 32 | import tempfile |
|
33 | 33 | import traceback |
|
34 | 34 | import tarfile |
|
35 | 35 | import warnings |
|
36 | 36 | import hashlib |
|
37 | 37 | from os.path import join as jn |
|
38 | 38 | |
|
39 | 39 | import paste |
|
40 | 40 | import pkg_resources |
|
41 | from paste.script.command import Command, BadCommand | |
|
42 | 41 | from webhelpers.text import collapse, remove_formatting, strip_tags |
|
43 | 42 | from mako import exceptions |
|
44 | 43 | from pyramid.threadlocal import get_current_registry |
|
45 | 44 | from pyramid.request import Request |
|
46 | 45 | |
|
47 | 46 | from rhodecode.lib.fakemod import create_module |
|
48 | 47 | from rhodecode.lib.vcs.backends.base import Config |
|
49 | 48 | from rhodecode.lib.vcs.exceptions import VCSError |
|
50 | 49 | from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend |
|
51 | 50 | from rhodecode.lib.utils2 import ( |
|
52 | 51 | safe_str, safe_unicode, get_current_rhodecode_user, md5) |
|
53 | 52 | from rhodecode.model import meta |
|
54 | 53 | from rhodecode.model.db import ( |
|
55 | 54 | Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup) |
|
56 | 55 | from rhodecode.model.meta import Session |
|
57 | 56 | |
|
58 | 57 | |
|
59 | 58 | log = logging.getLogger(__name__) |
|
60 | 59 | |
|
61 | 60 | REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*') |
|
62 | 61 | |
|
63 | 62 | # String which contains characters that are not allowed in slug names for |
|
64 | 63 | # repositories or repository groups. It is properly escaped to use it in |
|
65 | 64 | # regular expressions. |
|
66 | 65 | SLUG_BAD_CHARS = re.escape('`?=[]\;\'"<>,/~!@#$%^&*()+{}|:') |
|
67 | 66 | |
|
68 | 67 | # Regex that matches forbidden characters in repo/group slugs. |
|
69 | 68 | SLUG_BAD_CHAR_RE = re.compile('[{}]'.format(SLUG_BAD_CHARS)) |
|
70 | 69 | |
|
71 | 70 | # Regex that matches allowed characters in repo/group slugs. |
|
72 | 71 | SLUG_GOOD_CHAR_RE = re.compile('[^{}]'.format(SLUG_BAD_CHARS)) |
|
73 | 72 | |
|
74 | 73 | # Regex that matches whole repo/group slugs. |
|
75 | 74 | SLUG_RE = re.compile('[^{}]+'.format(SLUG_BAD_CHARS)) |
|
76 | 75 | |
|
77 | 76 | _license_cache = None |
|
78 | 77 | |
|
79 | 78 | |
|
80 | 79 | def repo_name_slug(value): |
|
81 | 80 | """ |
|
82 | 81 | Return slug of name of repository |
|
83 | 82 | This function is called on each creation/modification |
|
84 | 83 | of repository to prevent bad names in repo |
|
85 | 84 | """ |
|
86 | 85 | replacement_char = '-' |
|
87 | 86 | |
|
88 | 87 | slug = remove_formatting(value) |
|
89 | 88 | slug = SLUG_BAD_CHAR_RE.sub('', slug) |
|
90 | 89 | slug = re.sub('[\s]+', '-', slug) |
|
91 | 90 | slug = collapse(slug, replacement_char) |
|
92 | 91 | return slug |
|
93 | 92 | |
|
94 | 93 | |
|
95 | 94 | #============================================================================== |
|
96 | 95 | # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS |
|
97 | 96 | #============================================================================== |
|
98 | 97 | def get_repo_slug(request): |
|
99 | 98 | _repo = '' |
|
100 | 99 | |
|
101 | 100 | if hasattr(request, 'db_repo'): |
|
102 | 101 | # if our requests has set db reference use it for name, this |
|
103 | 102 | # translates the example.com/_<id> into proper repo names |
|
104 | 103 | _repo = request.db_repo.repo_name |
|
105 | 104 | elif getattr(request, 'matchdict', None): |
|
106 | 105 | # pyramid |
|
107 | 106 | _repo = request.matchdict.get('repo_name') |
|
108 | 107 | |
|
109 | 108 | if _repo: |
|
110 | 109 | _repo = _repo.rstrip('/') |
|
111 | 110 | return _repo |
|
112 | 111 | |
|
113 | 112 | |
|
114 | 113 | def get_repo_group_slug(request): |
|
115 | 114 | _group = '' |
|
116 | 115 | if hasattr(request, 'db_repo_group'): |
|
117 | 116 | # if our requests has set db reference use it for name, this |
|
118 | 117 | # translates the example.com/_<id> into proper repo group names |
|
119 | 118 | _group = request.db_repo_group.group_name |
|
120 | 119 | elif getattr(request, 'matchdict', None): |
|
121 | 120 | # pyramid |
|
122 | 121 | _group = request.matchdict.get('repo_group_name') |
|
123 | 122 | |
|
124 | 123 | |
|
125 | 124 | if _group: |
|
126 | 125 | _group = _group.rstrip('/') |
|
127 | 126 | return _group |
|
128 | 127 | |
|
129 | 128 | |
|
130 | 129 | def get_user_group_slug(request): |
|
131 | 130 | _user_group = '' |
|
132 | 131 | |
|
133 | 132 | if hasattr(request, 'db_user_group'): |
|
134 | 133 | _user_group = request.db_user_group.users_group_name |
|
135 | 134 | elif getattr(request, 'matchdict', None): |
|
136 | 135 | # pyramid |
|
137 | 136 | _user_group = request.matchdict.get('user_group_id') |
|
138 | 137 | |
|
139 | 138 | try: |
|
140 | 139 | _user_group = UserGroup.get(_user_group) |
|
141 | 140 | if _user_group: |
|
142 | 141 | _user_group = _user_group.users_group_name |
|
143 | 142 | except Exception: |
|
144 | 143 | log.exception('Failed to get user group by id') |
|
145 | 144 | # catch all failures here |
|
146 | 145 | return None |
|
147 | 146 | |
|
148 | 147 | return _user_group |
|
149 | 148 | |
|
150 | 149 | |
|
151 | 150 | def get_filesystem_repos(path, recursive=False, skip_removed_repos=True): |
|
152 | 151 | """ |
|
153 | 152 | Scans given path for repos and return (name,(type,path)) tuple |
|
154 | 153 | |
|
155 | 154 | :param path: path to scan for repositories |
|
156 | 155 | :param recursive: recursive search and return names with subdirs in front |
|
157 | 156 | """ |
|
158 | 157 | |
|
159 | 158 | # remove ending slash for better results |
|
160 | 159 | path = path.rstrip(os.sep) |
|
161 | 160 | log.debug('now scanning in %s location recursive:%s...', path, recursive) |
|
162 | 161 | |
|
163 | 162 | def _get_repos(p): |
|
164 | 163 | dirpaths = _get_dirpaths(p) |
|
165 | 164 | if not _is_dir_writable(p): |
|
166 | 165 | log.warning('repo path without write access: %s', p) |
|
167 | 166 | |
|
168 | 167 | for dirpath in dirpaths: |
|
169 | 168 | if os.path.isfile(os.path.join(p, dirpath)): |
|
170 | 169 | continue |
|
171 | 170 | cur_path = os.path.join(p, dirpath) |
|
172 | 171 | |
|
173 | 172 | # skip removed repos |
|
174 | 173 | if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath): |
|
175 | 174 | continue |
|
176 | 175 | |
|
177 | 176 | #skip .<somethin> dirs |
|
178 | 177 | if dirpath.startswith('.'): |
|
179 | 178 | continue |
|
180 | 179 | |
|
181 | 180 | try: |
|
182 | 181 | scm_info = get_scm(cur_path) |
|
183 | 182 | yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info |
|
184 | 183 | except VCSError: |
|
185 | 184 | if not recursive: |
|
186 | 185 | continue |
|
187 | 186 | #check if this dir containts other repos for recursive scan |
|
188 | 187 | rec_path = os.path.join(p, dirpath) |
|
189 | 188 | if os.path.isdir(rec_path): |
|
190 | 189 | for inner_scm in _get_repos(rec_path): |
|
191 | 190 | yield inner_scm |
|
192 | 191 | |
|
193 | 192 | return _get_repos(path) |
|
194 | 193 | |
|
195 | 194 | |
|
196 | 195 | def _get_dirpaths(p): |
|
197 | 196 | try: |
|
198 | 197 | # OS-independable way of checking if we have at least read-only |
|
199 | 198 | # access or not. |
|
200 | 199 | dirpaths = os.listdir(p) |
|
201 | 200 | except OSError: |
|
202 | 201 | log.warning('ignoring repo path without read access: %s', p) |
|
203 | 202 | return [] |
|
204 | 203 | |
|
205 | 204 | # os.listpath has a tweak: If a unicode is passed into it, then it tries to |
|
206 | 205 | # decode paths and suddenly returns unicode objects itself. The items it |
|
207 | 206 | # cannot decode are returned as strings and cause issues. |
|
208 | 207 | # |
|
209 | 208 | # Those paths are ignored here until a solid solution for path handling has |
|
210 | 209 | # been built. |
|
211 | 210 | expected_type = type(p) |
|
212 | 211 | |
|
213 | 212 | def _has_correct_type(item): |
|
214 | 213 | if type(item) is not expected_type: |
|
215 | 214 | log.error( |
|
216 | 215 | u"Ignoring path %s since it cannot be decoded into unicode.", |
|
217 | 216 | # Using "repr" to make sure that we see the byte value in case |
|
218 | 217 | # of support. |
|
219 | 218 | repr(item)) |
|
220 | 219 | return False |
|
221 | 220 | return True |
|
222 | 221 | |
|
223 | 222 | dirpaths = [item for item in dirpaths if _has_correct_type(item)] |
|
224 | 223 | |
|
225 | 224 | return dirpaths |
|
226 | 225 | |
|
227 | 226 | |
|
228 | 227 | def _is_dir_writable(path): |
|
229 | 228 | """ |
|
230 | 229 | Probe if `path` is writable. |
|
231 | 230 | |
|
232 | 231 | Due to trouble on Cygwin / Windows, this is actually probing if it is |
|
233 | 232 | possible to create a file inside of `path`, stat does not produce reliable |
|
234 | 233 | results in this case. |
|
235 | 234 | """ |
|
236 | 235 | try: |
|
237 | 236 | with tempfile.TemporaryFile(dir=path): |
|
238 | 237 | pass |
|
239 | 238 | except OSError: |
|
240 | 239 | return False |
|
241 | 240 | return True |
|
242 | 241 | |
|
243 | 242 | |
|
244 | 243 | def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None): |
|
245 | 244 | """ |
|
246 | 245 | Returns True if given path is a valid repository False otherwise. |
|
247 | 246 | If expect_scm param is given also, compare if given scm is the same |
|
248 | 247 | as expected from scm parameter. If explicit_scm is given don't try to |
|
249 | 248 | detect the scm, just use the given one to check if repo is valid |
|
250 | 249 | |
|
251 | 250 | :param repo_name: |
|
252 | 251 | :param base_path: |
|
253 | 252 | :param expect_scm: |
|
254 | 253 | :param explicit_scm: |
|
255 | 254 | |
|
256 | 255 | :return True: if given path is a valid repository |
|
257 | 256 | """ |
|
258 | 257 | full_path = os.path.join(safe_str(base_path), safe_str(repo_name)) |
|
259 | 258 | log.debug('Checking if `%s` is a valid path for repository. ' |
|
260 | 259 | 'Explicit type: %s', repo_name, explicit_scm) |
|
261 | 260 | |
|
262 | 261 | try: |
|
263 | 262 | if explicit_scm: |
|
264 | 263 | detected_scms = [get_scm_backend(explicit_scm)] |
|
265 | 264 | else: |
|
266 | 265 | detected_scms = get_scm(full_path) |
|
267 | 266 | |
|
268 | 267 | if expect_scm: |
|
269 | 268 | return detected_scms[0] == expect_scm |
|
270 | 269 | log.debug('path: %s is an vcs object:%s', full_path, detected_scms) |
|
271 | 270 | return True |
|
272 | 271 | except VCSError: |
|
273 | 272 | log.debug('path: %s is not a valid repo !', full_path) |
|
274 | 273 | return False |
|
275 | 274 | |
|
276 | 275 | |
|
277 | 276 | def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False): |
|
278 | 277 | """ |
|
279 | 278 | Returns True if given path is a repository group, False otherwise |
|
280 | 279 | |
|
281 | 280 | :param repo_name: |
|
282 | 281 | :param base_path: |
|
283 | 282 | """ |
|
284 | 283 | full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name)) |
|
285 | 284 | log.debug('Checking if `%s` is a valid path for repository group', |
|
286 | 285 | repo_group_name) |
|
287 | 286 | |
|
288 | 287 | # check if it's not a repo |
|
289 | 288 | if is_valid_repo(repo_group_name, base_path): |
|
290 | 289 | log.debug('Repo called %s exist, it is not a valid ' |
|
291 | 290 | 'repo group' % repo_group_name) |
|
292 | 291 | return False |
|
293 | 292 | |
|
294 | 293 | try: |
|
295 | 294 | # we need to check bare git repos at higher level |
|
296 | 295 | # since we might match branches/hooks/info/objects or possible |
|
297 | 296 | # other things inside bare git repo |
|
298 | 297 | scm_ = get_scm(os.path.dirname(full_path)) |
|
299 | 298 | log.debug('path: %s is a vcs object:%s, not valid ' |
|
300 | 299 | 'repo group' % (full_path, scm_)) |
|
301 | 300 | return False |
|
302 | 301 | except VCSError: |
|
303 | 302 | pass |
|
304 | 303 | |
|
305 | 304 | # check if it's a valid path |
|
306 | 305 | if skip_path_check or os.path.isdir(full_path): |
|
307 | 306 | log.debug('path: %s is a valid repo group !', full_path) |
|
308 | 307 | return True |
|
309 | 308 | |
|
310 | 309 | log.debug('path: %s is not a valid repo group !', full_path) |
|
311 | 310 | return False |
|
312 | 311 | |
|
313 | 312 | |
|
314 | 313 | def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'): |
|
315 | 314 | while True: |
|
316 | 315 | ok = raw_input(prompt) |
|
317 | 316 | if ok.lower() in ('y', 'ye', 'yes'): |
|
318 | 317 | return True |
|
319 | 318 | if ok.lower() in ('n', 'no', 'nop', 'nope'): |
|
320 | 319 | return False |
|
321 | 320 | retries = retries - 1 |
|
322 | 321 | if retries < 0: |
|
323 | 322 | raise IOError |
|
324 | 323 | print(complaint) |
|
325 | 324 | |
|
326 | 325 | # propagated from mercurial documentation |
|
327 | 326 | ui_sections = [ |
|
328 | 327 | 'alias', 'auth', |
|
329 | 328 | 'decode/encode', 'defaults', |
|
330 | 329 | 'diff', 'email', |
|
331 | 330 | 'extensions', 'format', |
|
332 | 331 | 'merge-patterns', 'merge-tools', |
|
333 | 332 | 'hooks', 'http_proxy', |
|
334 | 333 | 'smtp', 'patch', |
|
335 | 334 | 'paths', 'profiling', |
|
336 | 335 | 'server', 'trusted', |
|
337 | 336 | 'ui', 'web', ] |
|
338 | 337 | |
|
339 | 338 | |
|
340 | 339 | def config_data_from_db(clear_session=True, repo=None): |
|
341 | 340 | """ |
|
342 | 341 | Read the configuration data from the database and return configuration |
|
343 | 342 | tuples. |
|
344 | 343 | """ |
|
345 | 344 | from rhodecode.model.settings import VcsSettingsModel |
|
346 | 345 | |
|
347 | 346 | config = [] |
|
348 | 347 | |
|
349 | 348 | sa = meta.Session() |
|
350 | 349 | settings_model = VcsSettingsModel(repo=repo, sa=sa) |
|
351 | 350 | |
|
352 | 351 | ui_settings = settings_model.get_ui_settings() |
|
353 | 352 | |
|
354 | 353 | for setting in ui_settings: |
|
355 | 354 | if setting.active: |
|
356 | 355 | log.debug( |
|
357 | 356 | 'settings ui from db: [%s] %s=%s', |
|
358 | 357 | setting.section, setting.key, setting.value) |
|
359 | 358 | config.append(( |
|
360 | 359 | safe_str(setting.section), safe_str(setting.key), |
|
361 | 360 | safe_str(setting.value))) |
|
362 | 361 | if setting.key == 'push_ssl': |
|
363 | 362 | # force set push_ssl requirement to False, rhodecode |
|
364 | 363 | # handles that |
|
365 | 364 | config.append(( |
|
366 | 365 | safe_str(setting.section), safe_str(setting.key), False)) |
|
367 | 366 | if clear_session: |
|
368 | 367 | meta.Session.remove() |
|
369 | 368 | |
|
370 | 369 | # TODO: mikhail: probably it makes no sense to re-read hooks information. |
|
371 | 370 | # It's already there and activated/deactivated |
|
372 | 371 | skip_entries = [] |
|
373 | 372 | enabled_hook_classes = get_enabled_hook_classes(ui_settings) |
|
374 | 373 | if 'pull' not in enabled_hook_classes: |
|
375 | 374 | skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL)) |
|
376 | 375 | if 'push' not in enabled_hook_classes: |
|
377 | 376 | skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH)) |
|
378 | 377 | skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH)) |
|
379 | 378 | skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY)) |
|
380 | 379 | |
|
381 | 380 | config = [entry for entry in config if entry[:2] not in skip_entries] |
|
382 | 381 | |
|
383 | 382 | return config |
|
384 | 383 | |
|
385 | 384 | |
|
386 | 385 | def make_db_config(clear_session=True, repo=None): |
|
387 | 386 | """ |
|
388 | 387 | Create a :class:`Config` instance based on the values in the database. |
|
389 | 388 | """ |
|
390 | 389 | config = Config() |
|
391 | 390 | config_data = config_data_from_db(clear_session=clear_session, repo=repo) |
|
392 | 391 | for section, option, value in config_data: |
|
393 | 392 | config.set(section, option, value) |
|
394 | 393 | return config |
|
395 | 394 | |
|
396 | 395 | |
|
397 | 396 | def get_enabled_hook_classes(ui_settings): |
|
398 | 397 | """ |
|
399 | 398 | Return the enabled hook classes. |
|
400 | 399 | |
|
401 | 400 | :param ui_settings: List of ui_settings as returned |
|
402 | 401 | by :meth:`VcsSettingsModel.get_ui_settings` |
|
403 | 402 | |
|
404 | 403 | :return: a list with the enabled hook classes. The order is not guaranteed. |
|
405 | 404 | :rtype: list |
|
406 | 405 | """ |
|
407 | 406 | enabled_hooks = [] |
|
408 | 407 | active_hook_keys = [ |
|
409 | 408 | key for section, key, value, active in ui_settings |
|
410 | 409 | if section == 'hooks' and active] |
|
411 | 410 | |
|
412 | 411 | hook_names = { |
|
413 | 412 | RhodeCodeUi.HOOK_PUSH: 'push', |
|
414 | 413 | RhodeCodeUi.HOOK_PULL: 'pull', |
|
415 | 414 | RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size' |
|
416 | 415 | } |
|
417 | 416 | |
|
418 | 417 | for key in active_hook_keys: |
|
419 | 418 | hook = hook_names.get(key) |
|
420 | 419 | if hook: |
|
421 | 420 | enabled_hooks.append(hook) |
|
422 | 421 | |
|
423 | 422 | return enabled_hooks |
|
424 | 423 | |
|
425 | 424 | |
|
426 | 425 | def set_rhodecode_config(config): |
|
427 | 426 | """ |
|
428 | 427 | Updates pyramid config with new settings from database |
|
429 | 428 | |
|
430 | 429 | :param config: |
|
431 | 430 | """ |
|
432 | 431 | from rhodecode.model.settings import SettingsModel |
|
433 | 432 | app_settings = SettingsModel().get_all_settings() |
|
434 | 433 | |
|
435 | 434 | for k, v in app_settings.items(): |
|
436 | 435 | config[k] = v |
|
437 | 436 | |
|
438 | 437 | |
|
439 | 438 | def get_rhodecode_realm(): |
|
440 | 439 | """ |
|
441 | 440 | Return the rhodecode realm from database. |
|
442 | 441 | """ |
|
443 | 442 | from rhodecode.model.settings import SettingsModel |
|
444 | 443 | realm = SettingsModel().get_setting_by_name('realm') |
|
445 | 444 | return safe_str(realm.app_settings_value) |
|
446 | 445 | |
|
447 | 446 | |
|
448 | 447 | def get_rhodecode_base_path(): |
|
449 | 448 | """ |
|
450 | 449 | Returns the base path. The base path is the filesystem path which points |
|
451 | 450 | to the repository store. |
|
452 | 451 | """ |
|
453 | 452 | from rhodecode.model.settings import SettingsModel |
|
454 | 453 | paths_ui = SettingsModel().get_ui_by_section_and_key('paths', '/') |
|
455 | 454 | return safe_str(paths_ui.ui_value) |
|
456 | 455 | |
|
457 | 456 | |
|
458 | 457 | def map_groups(path): |
|
459 | 458 | """ |
|
460 | 459 | Given a full path to a repository, create all nested groups that this |
|
461 | 460 | repo is inside. This function creates parent-child relationships between |
|
462 | 461 | groups and creates default perms for all new groups. |
|
463 | 462 | |
|
464 | 463 | :param paths: full path to repository |
|
465 | 464 | """ |
|
466 | 465 | from rhodecode.model.repo_group import RepoGroupModel |
|
467 | 466 | sa = meta.Session() |
|
468 | 467 | groups = path.split(Repository.NAME_SEP) |
|
469 | 468 | parent = None |
|
470 | 469 | group = None |
|
471 | 470 | |
|
472 | 471 | # last element is repo in nested groups structure |
|
473 | 472 | groups = groups[:-1] |
|
474 | 473 | rgm = RepoGroupModel(sa) |
|
475 | 474 | owner = User.get_first_super_admin() |
|
476 | 475 | for lvl, group_name in enumerate(groups): |
|
477 | 476 | group_name = '/'.join(groups[:lvl] + [group_name]) |
|
478 | 477 | group = RepoGroup.get_by_group_name(group_name) |
|
479 | 478 | desc = '%s group' % group_name |
|
480 | 479 | |
|
481 | 480 | # skip folders that are now removed repos |
|
482 | 481 | if REMOVED_REPO_PAT.match(group_name): |
|
483 | 482 | break |
|
484 | 483 | |
|
485 | 484 | if group is None: |
|
486 | 485 | log.debug('creating group level: %s group_name: %s', |
|
487 | 486 | lvl, group_name) |
|
488 | 487 | group = RepoGroup(group_name, parent) |
|
489 | 488 | group.group_description = desc |
|
490 | 489 | group.user = owner |
|
491 | 490 | sa.add(group) |
|
492 | 491 | perm_obj = rgm._create_default_perms(group) |
|
493 | 492 | sa.add(perm_obj) |
|
494 | 493 | sa.flush() |
|
495 | 494 | |
|
496 | 495 | parent = group |
|
497 | 496 | return group |
|
498 | 497 | |
|
499 | 498 | |
|
500 | 499 | def repo2db_mapper(initial_repo_list, remove_obsolete=False): |
|
501 | 500 | """ |
|
502 | 501 | maps all repos given in initial_repo_list, non existing repositories |
|
503 | 502 | are created, if remove_obsolete is True it also checks for db entries |
|
504 | 503 | that are not in initial_repo_list and removes them. |
|
505 | 504 | |
|
506 | 505 | :param initial_repo_list: list of repositories found by scanning methods |
|
507 | 506 | :param remove_obsolete: check for obsolete entries in database |
|
508 | 507 | """ |
|
509 | 508 | from rhodecode.model.repo import RepoModel |
|
510 | 509 | from rhodecode.model.scm import ScmModel |
|
511 | 510 | from rhodecode.model.repo_group import RepoGroupModel |
|
512 | 511 | from rhodecode.model.settings import SettingsModel |
|
513 | 512 | |
|
514 | 513 | sa = meta.Session() |
|
515 | 514 | repo_model = RepoModel() |
|
516 | 515 | user = User.get_first_super_admin() |
|
517 | 516 | added = [] |
|
518 | 517 | |
|
519 | 518 | # creation defaults |
|
520 | 519 | defs = SettingsModel().get_default_repo_settings(strip_prefix=True) |
|
521 | 520 | enable_statistics = defs.get('repo_enable_statistics') |
|
522 | 521 | enable_locking = defs.get('repo_enable_locking') |
|
523 | 522 | enable_downloads = defs.get('repo_enable_downloads') |
|
524 | 523 | private = defs.get('repo_private') |
|
525 | 524 | |
|
526 | 525 | for name, repo in initial_repo_list.items(): |
|
527 | 526 | group = map_groups(name) |
|
528 | 527 | unicode_name = safe_unicode(name) |
|
529 | 528 | db_repo = repo_model.get_by_repo_name(unicode_name) |
|
530 | 529 | # found repo that is on filesystem not in RhodeCode database |
|
531 | 530 | if not db_repo: |
|
532 | 531 | log.info('repository %s not found, creating now', name) |
|
533 | 532 | added.append(name) |
|
534 | 533 | desc = (repo.description |
|
535 | 534 | if repo.description != 'unknown' |
|
536 | 535 | else '%s repository' % name) |
|
537 | 536 | |
|
538 | 537 | db_repo = repo_model._create_repo( |
|
539 | 538 | repo_name=name, |
|
540 | 539 | repo_type=repo.alias, |
|
541 | 540 | description=desc, |
|
542 | 541 | repo_group=getattr(group, 'group_id', None), |
|
543 | 542 | owner=user, |
|
544 | 543 | enable_locking=enable_locking, |
|
545 | 544 | enable_downloads=enable_downloads, |
|
546 | 545 | enable_statistics=enable_statistics, |
|
547 | 546 | private=private, |
|
548 | 547 | state=Repository.STATE_CREATED |
|
549 | 548 | ) |
|
550 | 549 | sa.commit() |
|
551 | 550 | # we added that repo just now, and make sure we updated server info |
|
552 | 551 | if db_repo.repo_type == 'git': |
|
553 | 552 | git_repo = db_repo.scm_instance() |
|
554 | 553 | # update repository server-info |
|
555 | 554 | log.debug('Running update server info') |
|
556 | 555 | git_repo._update_server_info() |
|
557 | 556 | |
|
558 | 557 | db_repo.update_commit_cache() |
|
559 | 558 | |
|
560 | 559 | config = db_repo._config |
|
561 | 560 | config.set('extensions', 'largefiles', '') |
|
562 | 561 | ScmModel().install_hooks( |
|
563 | 562 | db_repo.scm_instance(config=config), |
|
564 | 563 | repo_type=db_repo.repo_type) |
|
565 | 564 | |
|
566 | 565 | removed = [] |
|
567 | 566 | if remove_obsolete: |
|
568 | 567 | # remove from database those repositories that are not in the filesystem |
|
569 | 568 | for repo in sa.query(Repository).all(): |
|
570 | 569 | if repo.repo_name not in initial_repo_list.keys(): |
|
571 | 570 | log.debug("Removing non-existing repository found in db `%s`", |
|
572 | 571 | repo.repo_name) |
|
573 | 572 | try: |
|
574 | 573 | RepoModel(sa).delete(repo, forks='detach', fs_remove=False) |
|
575 | 574 | sa.commit() |
|
576 | 575 | removed.append(repo.repo_name) |
|
577 | 576 | except Exception: |
|
578 | 577 | # don't hold further removals on error |
|
579 | 578 | log.error(traceback.format_exc()) |
|
580 | 579 | sa.rollback() |
|
581 | 580 | |
|
582 | 581 | def splitter(full_repo_name): |
|
583 | 582 | _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1) |
|
584 | 583 | gr_name = None |
|
585 | 584 | if len(_parts) == 2: |
|
586 | 585 | gr_name = _parts[0] |
|
587 | 586 | return gr_name |
|
588 | 587 | |
|
589 | 588 | initial_repo_group_list = [splitter(x) for x in |
|
590 | 589 | initial_repo_list.keys() if splitter(x)] |
|
591 | 590 | |
|
592 | 591 | # remove from database those repository groups that are not in the |
|
593 | 592 | # filesystem due to parent child relationships we need to delete them |
|
594 | 593 | # in a specific order of most nested first |
|
595 | 594 | all_groups = [x.group_name for x in sa.query(RepoGroup).all()] |
|
596 | 595 | nested_sort = lambda gr: len(gr.split('/')) |
|
597 | 596 | for group_name in sorted(all_groups, key=nested_sort, reverse=True): |
|
598 | 597 | if group_name not in initial_repo_group_list: |
|
599 | 598 | repo_group = RepoGroup.get_by_group_name(group_name) |
|
600 | 599 | if (repo_group.children.all() or |
|
601 | 600 | not RepoGroupModel().check_exist_filesystem( |
|
602 | 601 | group_name=group_name, exc_on_failure=False)): |
|
603 | 602 | continue |
|
604 | 603 | |
|
605 | 604 | log.info( |
|
606 | 605 | 'Removing non-existing repository group found in db `%s`', |
|
607 | 606 | group_name) |
|
608 | 607 | try: |
|
609 | 608 | RepoGroupModel(sa).delete(group_name, fs_remove=False) |
|
610 | 609 | sa.commit() |
|
611 | 610 | removed.append(group_name) |
|
612 | 611 | except Exception: |
|
613 | 612 | # don't hold further removals on error |
|
614 | 613 | log.exception( |
|
615 | 614 | 'Unable to remove repository group `%s`', |
|
616 | 615 | group_name) |
|
617 | 616 | sa.rollback() |
|
618 | 617 | raise |
|
619 | 618 | |
|
620 | 619 | return added, removed |
|
621 | 620 | |
|
622 | 621 | |
|
623 | 622 | def get_default_cache_settings(settings): |
|
624 | 623 | cache_settings = {} |
|
625 | 624 | for key in settings.keys(): |
|
626 | 625 | for prefix in ['beaker.cache.', 'cache.']: |
|
627 | 626 | if key.startswith(prefix): |
|
628 | 627 | name = key.split(prefix)[1].strip() |
|
629 | 628 | cache_settings[name] = settings[key].strip() |
|
630 | 629 | return cache_settings |
|
631 | 630 | |
|
632 | 631 | |
|
633 | 632 | # set cache regions for beaker so celery can utilise it |
|
634 | 633 | def add_cache(settings): |
|
635 | 634 | from rhodecode.lib import caches |
|
636 | 635 | cache_settings = {'regions': None} |
|
637 | 636 | # main cache settings used as default ... |
|
638 | 637 | cache_settings.update(get_default_cache_settings(settings)) |
|
639 | 638 | |
|
640 | 639 | if cache_settings['regions']: |
|
641 | 640 | for region in cache_settings['regions'].split(','): |
|
642 | 641 | region = region.strip() |
|
643 | 642 | region_settings = {} |
|
644 | 643 | for key, value in cache_settings.items(): |
|
645 | 644 | if key.startswith(region): |
|
646 | 645 | region_settings[key.split('.')[1]] = value |
|
647 | 646 | |
|
648 | 647 | caches.configure_cache_region( |
|
649 | 648 | region, region_settings, cache_settings) |
|
650 | 649 | |
|
651 | 650 | |
|
652 | 651 | def load_rcextensions(root_path): |
|
653 | 652 | import rhodecode |
|
654 | 653 | from rhodecode.config import conf |
|
655 | 654 | |
|
656 | 655 | path = os.path.join(root_path, 'rcextensions', '__init__.py') |
|
657 | 656 | if os.path.isfile(path): |
|
658 | 657 | rcext = create_module('rc', path) |
|
659 | 658 | EXT = rhodecode.EXTENSIONS = rcext |
|
660 | 659 | log.debug('Found rcextensions now loading %s...', rcext) |
|
661 | 660 | |
|
662 | 661 | # Additional mappings that are not present in the pygments lexers |
|
663 | 662 | conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {})) |
|
664 | 663 | |
|
665 | 664 | # auto check if the module is not missing any data, set to default if is |
|
666 | 665 | # this will help autoupdate new feature of rcext module |
|
667 | 666 | #from rhodecode.config import rcextensions |
|
668 | 667 | #for k in dir(rcextensions): |
|
669 | 668 | # if not k.startswith('_') and not hasattr(EXT, k): |
|
670 | 669 | # setattr(EXT, k, getattr(rcextensions, k)) |
|
671 | 670 | |
|
672 | 671 | |
|
673 | 672 | def get_custom_lexer(extension): |
|
674 | 673 | """ |
|
675 | 674 | returns a custom lexer if it is defined in rcextensions module, or None |
|
676 | 675 | if there's no custom lexer defined |
|
677 | 676 | """ |
|
678 | 677 | import rhodecode |
|
679 | 678 | from pygments import lexers |
|
680 | 679 | |
|
681 | 680 | # custom override made by RhodeCode |
|
682 | 681 | if extension in ['mako']: |
|
683 | 682 | return lexers.get_lexer_by_name('html+mako') |
|
684 | 683 | |
|
685 | 684 | # check if we didn't define this extension as other lexer |
|
686 | 685 | extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None) |
|
687 | 686 | if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS: |
|
688 | 687 | _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension] |
|
689 | 688 | return lexers.get_lexer_by_name(_lexer_name) |
|
690 | 689 | |
|
691 | 690 | |
|
692 | 691 | #============================================================================== |
|
693 | 692 | # TEST FUNCTIONS AND CREATORS |
|
694 | 693 | #============================================================================== |
|
695 | 694 | def create_test_index(repo_location, config): |
|
696 | 695 | """ |
|
697 | 696 | Makes default test index. |
|
698 | 697 | """ |
|
699 | 698 | import rc_testdata |
|
700 | 699 | |
|
701 | 700 | rc_testdata.extract_search_index( |
|
702 | 701 | 'vcs_search_index', os.path.dirname(config['search.location'])) |
|
703 | 702 | |
|
704 | 703 | |
|
705 | 704 | def create_test_directory(test_path): |
|
706 | 705 | """ |
|
707 | 706 | Create test directory if it doesn't exist. |
|
708 | 707 | """ |
|
709 | 708 | if not os.path.isdir(test_path): |
|
710 | 709 | log.debug('Creating testdir %s', test_path) |
|
711 | 710 | os.makedirs(test_path) |
|
712 | 711 | |
|
713 | 712 | |
|
714 | 713 | def create_test_database(test_path, config): |
|
715 | 714 | """ |
|
716 | 715 | Makes a fresh database. |
|
717 | 716 | """ |
|
718 | 717 | from rhodecode.lib.db_manage import DbManage |
|
719 | 718 | |
|
720 | 719 | # PART ONE create db |
|
721 | 720 | dbconf = config['sqlalchemy.db1.url'] |
|
722 | 721 | log.debug('making test db %s', dbconf) |
|
723 | 722 | |
|
724 | 723 | dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'], |
|
725 | 724 | tests=True, cli_args={'force_ask': True}) |
|
726 | 725 | dbmanage.create_tables(override=True) |
|
727 | 726 | dbmanage.set_db_version() |
|
728 | 727 | # for tests dynamically set new root paths based on generated content |
|
729 | 728 | dbmanage.create_settings(dbmanage.config_prompt(test_path)) |
|
730 | 729 | dbmanage.create_default_user() |
|
731 | 730 | dbmanage.create_test_admin_and_users() |
|
732 | 731 | dbmanage.create_permissions() |
|
733 | 732 | dbmanage.populate_default_permissions() |
|
734 | 733 | Session().commit() |
|
735 | 734 | |
|
736 | 735 | |
|
737 | 736 | def create_test_repositories(test_path, config): |
|
738 | 737 | """ |
|
739 | 738 | Creates test repositories in the temporary directory. Repositories are |
|
740 | 739 | extracted from archives within the rc_testdata package. |
|
741 | 740 | """ |
|
742 | 741 | import rc_testdata |
|
743 | 742 | from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO |
|
744 | 743 | |
|
745 | 744 | log.debug('making test vcs repositories') |
|
746 | 745 | |
|
747 | 746 | idx_path = config['search.location'] |
|
748 | 747 | data_path = config['cache_dir'] |
|
749 | 748 | |
|
750 | 749 | # clean index and data |
|
751 | 750 | if idx_path and os.path.exists(idx_path): |
|
752 | 751 | log.debug('remove %s', idx_path) |
|
753 | 752 | shutil.rmtree(idx_path) |
|
754 | 753 | |
|
755 | 754 | if data_path and os.path.exists(data_path): |
|
756 | 755 | log.debug('remove %s', data_path) |
|
757 | 756 | shutil.rmtree(data_path) |
|
758 | 757 | |
|
759 | 758 | rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO)) |
|
760 | 759 | rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO)) |
|
761 | 760 | |
|
762 | 761 | # Note: Subversion is in the process of being integrated with the system, |
|
763 | 762 | # until we have a properly packed version of the test svn repository, this |
|
764 | 763 | # tries to copy over the repo from a package "rc_testdata" |
|
765 | 764 | svn_repo_path = rc_testdata.get_svn_repo_archive() |
|
766 | 765 | with tarfile.open(svn_repo_path) as tar: |
|
767 | 766 | tar.extractall(jn(test_path, SVN_REPO)) |
|
768 | 767 | |
|
769 | 768 | |
|
770 | #============================================================================== | |
|
771 | # PASTER COMMANDS | |
|
772 | #============================================================================== | |
|
773 | class BasePasterCommand(Command): | |
|
774 | """ | |
|
775 | Abstract Base Class for paster commands. | |
|
776 | ||
|
777 | The celery commands are somewhat aggressive about loading | |
|
778 | celery.conf, and since our module sets the `CELERY_LOADER` | |
|
779 | environment variable to our loader, we have to bootstrap a bit and | |
|
780 | make sure we've had a chance to load the pylons config off of the | |
|
781 | command line, otherwise everything fails. | |
|
782 | """ | |
|
783 | min_args = 1 | |
|
784 | min_args_error = "Please provide a paster config file as an argument." | |
|
785 | takes_config_file = 1 | |
|
786 | requires_config_file = True | |
|
787 | ||
|
788 | def notify_msg(self, msg, log=False): | |
|
789 | """Make a notification to user, additionally if logger is passed | |
|
790 | it logs this action using given logger | |
|
791 | ||
|
792 | :param msg: message that will be printed to user | |
|
793 | :param log: logging instance, to use to additionally log this message | |
|
794 | ||
|
795 | """ | |
|
796 | if log and isinstance(log, logging): | |
|
797 | log(msg) | |
|
798 | ||
|
799 | def run(self, args): | |
|
800 | """ | |
|
801 | Overrides Command.run | |
|
802 | ||
|
803 | Checks for a config file argument and loads it. | |
|
804 | """ | |
|
805 | if len(args) < self.min_args: | |
|
806 | raise BadCommand( | |
|
807 | self.min_args_error % {'min_args': self.min_args, | |
|
808 | 'actual_args': len(args)}) | |
|
809 | ||
|
810 | # Decrement because we're going to lob off the first argument. | |
|
811 | # @@ This is hacky | |
|
812 | self.min_args -= 1 | |
|
813 | self.bootstrap_config(args[0]) | |
|
814 | self.update_parser() | |
|
815 | return super(BasePasterCommand, self).run(args[1:]) | |
|
816 | ||
|
817 | def update_parser(self): | |
|
818 | """ | |
|
819 | Abstract method. Allows for the class' parser to be updated | |
|
820 | before the superclass' `run` method is called. Necessary to | |
|
821 | allow options/arguments to be passed through to the underlying | |
|
822 | celery command. | |
|
823 | """ | |
|
824 | raise NotImplementedError("Abstract Method.") | |
|
825 | ||
|
826 | def bootstrap_config(self, conf): | |
|
827 | """ | |
|
828 | Loads the pylons configuration. | |
|
829 | """ | |
|
830 | from pylons import config as pylonsconfig | |
|
831 | ||
|
832 | self.path_to_ini_file = os.path.realpath(conf) | |
|
833 | conf = paste.deploy.appconfig('config:' + self.path_to_ini_file) | |
|
834 | pylonsconfig.init_app(conf.global_conf, conf.local_conf) | |
|
835 | ||
|
836 | def _init_session(self): | |
|
837 | """ | |
|
838 | Inits SqlAlchemy Session | |
|
839 | """ | |
|
840 | logging.config.fileConfig(self.path_to_ini_file) | |
|
841 | from pylons import config | |
|
842 | from rhodecode.config.utils import initialize_database | |
|
843 | ||
|
844 | # get to remove repos !! | |
|
845 | add_cache(config) | |
|
846 | initialize_database(config) | |
|
847 | ||
|
848 | ||
|
849 | 769 | def password_changed(auth_user, session): |
|
850 | 770 | # Never report password change in case of default user or anonymous user. |
|
851 | 771 | if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None: |
|
852 | 772 | return False |
|
853 | 773 | |
|
854 | 774 | password_hash = md5(auth_user.password) if auth_user.password else None |
|
855 | 775 | rhodecode_user = session.get('rhodecode_user', {}) |
|
856 | 776 | session_password_hash = rhodecode_user.get('password', '') |
|
857 | 777 | return password_hash != session_password_hash |
|
858 | 778 | |
|
859 | 779 | |
|
860 | 780 | def read_opensource_licenses(): |
|
861 | 781 | global _license_cache |
|
862 | 782 | |
|
863 | 783 | if not _license_cache: |
|
864 | 784 | licenses = pkg_resources.resource_string( |
|
865 | 785 | 'rhodecode', 'config/licenses.json') |
|
866 | 786 | _license_cache = json.loads(licenses) |
|
867 | 787 | |
|
868 | 788 | return _license_cache |
|
869 | 789 | |
|
870 | 790 | |
|
871 | 791 | def generate_platform_uuid(): |
|
872 | 792 | """ |
|
873 | 793 | Generates platform UUID based on it's name |
|
874 | 794 | """ |
|
875 | 795 | import platform |
|
876 | 796 | |
|
877 | 797 | try: |
|
878 | 798 | uuid_list = [platform.platform()] |
|
879 | 799 | return hashlib.sha256(':'.join(uuid_list)).hexdigest() |
|
880 | 800 | except Exception as e: |
|
881 | 801 | log.error('Failed to generate host uuid: %s' % e) |
|
882 | 802 | return 'UNDEFINED' |
@@ -1,1007 +1,980 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2011-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | """ |
|
23 | 23 | Some simple helper functions |
|
24 | 24 | """ |
|
25 | 25 | |
|
26 | ||
|
27 | 26 | import collections |
|
28 | 27 | import datetime |
|
29 | 28 | import dateutil.relativedelta |
|
30 | 29 | import hashlib |
|
31 | 30 | import logging |
|
32 | 31 | import re |
|
33 | 32 | import sys |
|
34 | 33 | import time |
|
35 | 34 | import urllib |
|
36 | 35 | import urlobject |
|
37 | 36 | import uuid |
|
38 | 37 | |
|
39 | 38 | import pygments.lexers |
|
40 | 39 | import sqlalchemy |
|
41 | 40 | import sqlalchemy.engine.url |
|
42 | 41 | import sqlalchemy.exc |
|
43 | 42 | import sqlalchemy.sql |
|
44 | 43 | import webob |
|
45 | import routes.util | |
|
46 | 44 | import pyramid.threadlocal |
|
47 | 45 | |
|
48 | 46 | import rhodecode |
|
49 | 47 | from rhodecode.translation import _, _pluralize |
|
50 | 48 | |
|
51 | 49 | |
|
52 | 50 | def md5(s): |
|
53 | 51 | return hashlib.md5(s).hexdigest() |
|
54 | 52 | |
|
55 | 53 | |
|
56 | 54 | def md5_safe(s): |
|
57 | 55 | return md5(safe_str(s)) |
|
58 | 56 | |
|
59 | 57 | |
|
60 | 58 | def __get_lem(extra_mapping=None): |
|
61 | 59 | """ |
|
62 | 60 | Get language extension map based on what's inside pygments lexers |
|
63 | 61 | """ |
|
64 | 62 | d = collections.defaultdict(lambda: []) |
|
65 | 63 | |
|
66 | 64 | def __clean(s): |
|
67 | 65 | s = s.lstrip('*') |
|
68 | 66 | s = s.lstrip('.') |
|
69 | 67 | |
|
70 | 68 | if s.find('[') != -1: |
|
71 | 69 | exts = [] |
|
72 | 70 | start, stop = s.find('['), s.find(']') |
|
73 | 71 | |
|
74 | 72 | for suffix in s[start + 1:stop]: |
|
75 | 73 | exts.append(s[:s.find('[')] + suffix) |
|
76 | 74 | return [e.lower() for e in exts] |
|
77 | 75 | else: |
|
78 | 76 | return [s.lower()] |
|
79 | 77 | |
|
80 | 78 | for lx, t in sorted(pygments.lexers.LEXERS.items()): |
|
81 | 79 | m = map(__clean, t[-2]) |
|
82 | 80 | if m: |
|
83 | 81 | m = reduce(lambda x, y: x + y, m) |
|
84 | 82 | for ext in m: |
|
85 | 83 | desc = lx.replace('Lexer', '') |
|
86 | 84 | d[ext].append(desc) |
|
87 | 85 | |
|
88 | 86 | data = dict(d) |
|
89 | 87 | |
|
90 | 88 | extra_mapping = extra_mapping or {} |
|
91 | 89 | if extra_mapping: |
|
92 | 90 | for k, v in extra_mapping.items(): |
|
93 | 91 | if k not in data: |
|
94 | 92 | # register new mapping2lexer |
|
95 | 93 | data[k] = [v] |
|
96 | 94 | |
|
97 | 95 | return data |
|
98 | 96 | |
|
99 | 97 | |
|
100 | 98 | def str2bool(_str): |
|
101 | 99 | """ |
|
102 | 100 | returns True/False value from given string, it tries to translate the |
|
103 | 101 | string into boolean |
|
104 | 102 | |
|
105 | 103 | :param _str: string value to translate into boolean |
|
106 | 104 | :rtype: boolean |
|
107 | 105 | :returns: boolean from given string |
|
108 | 106 | """ |
|
109 | 107 | if _str is None: |
|
110 | 108 | return False |
|
111 | 109 | if _str in (True, False): |
|
112 | 110 | return _str |
|
113 | 111 | _str = str(_str).strip().lower() |
|
114 | 112 | return _str in ('t', 'true', 'y', 'yes', 'on', '1') |
|
115 | 113 | |
|
116 | 114 | |
|
117 | 115 | def aslist(obj, sep=None, strip=True): |
|
118 | 116 | """ |
|
119 | 117 | Returns given string separated by sep as list |
|
120 | 118 | |
|
121 | 119 | :param obj: |
|
122 | 120 | :param sep: |
|
123 | 121 | :param strip: |
|
124 | 122 | """ |
|
125 | 123 | if isinstance(obj, (basestring,)): |
|
126 | 124 | lst = obj.split(sep) |
|
127 | 125 | if strip: |
|
128 | 126 | lst = [v.strip() for v in lst] |
|
129 | 127 | return lst |
|
130 | 128 | elif isinstance(obj, (list, tuple)): |
|
131 | 129 | return obj |
|
132 | 130 | elif obj is None: |
|
133 | 131 | return [] |
|
134 | 132 | else: |
|
135 | 133 | return [obj] |
|
136 | 134 | |
|
137 | 135 | |
|
138 | 136 | def convert_line_endings(line, mode): |
|
139 | 137 | """ |
|
140 | 138 | Converts a given line "line end" accordingly to given mode |
|
141 | 139 | |
|
142 | 140 | Available modes are:: |
|
143 | 141 | 0 - Unix |
|
144 | 142 | 1 - Mac |
|
145 | 143 | 2 - DOS |
|
146 | 144 | |
|
147 | 145 | :param line: given line to convert |
|
148 | 146 | :param mode: mode to convert to |
|
149 | 147 | :rtype: str |
|
150 | 148 | :return: converted line according to mode |
|
151 | 149 | """ |
|
152 | 150 | if mode == 0: |
|
153 | 151 | line = line.replace('\r\n', '\n') |
|
154 | 152 | line = line.replace('\r', '\n') |
|
155 | 153 | elif mode == 1: |
|
156 | 154 | line = line.replace('\r\n', '\r') |
|
157 | 155 | line = line.replace('\n', '\r') |
|
158 | 156 | elif mode == 2: |
|
159 | 157 | line = re.sub('\r(?!\n)|(?<!\r)\n', '\r\n', line) |
|
160 | 158 | return line |
|
161 | 159 | |
|
162 | 160 | |
|
163 | 161 | def detect_mode(line, default): |
|
164 | 162 | """ |
|
165 | 163 | Detects line break for given line, if line break couldn't be found |
|
166 | 164 | given default value is returned |
|
167 | 165 | |
|
168 | 166 | :param line: str line |
|
169 | 167 | :param default: default |
|
170 | 168 | :rtype: int |
|
171 | 169 | :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS |
|
172 | 170 | """ |
|
173 | 171 | if line.endswith('\r\n'): |
|
174 | 172 | return 2 |
|
175 | 173 | elif line.endswith('\n'): |
|
176 | 174 | return 0 |
|
177 | 175 | elif line.endswith('\r'): |
|
178 | 176 | return 1 |
|
179 | 177 | else: |
|
180 | 178 | return default |
|
181 | 179 | |
|
182 | 180 | |
|
183 | 181 | def safe_int(val, default=None): |
|
184 | 182 | """ |
|
185 | 183 | Returns int() of val if val is not convertable to int use default |
|
186 | 184 | instead |
|
187 | 185 | |
|
188 | 186 | :param val: |
|
189 | 187 | :param default: |
|
190 | 188 | """ |
|
191 | 189 | |
|
192 | 190 | try: |
|
193 | 191 | val = int(val) |
|
194 | 192 | except (ValueError, TypeError): |
|
195 | 193 | val = default |
|
196 | 194 | |
|
197 | 195 | return val |
|
198 | 196 | |
|
199 | 197 | |
|
200 | 198 | def safe_unicode(str_, from_encoding=None): |
|
201 | 199 | """ |
|
202 | 200 | safe unicode function. Does few trick to turn str_ into unicode |
|
203 | 201 | |
|
204 | 202 | In case of UnicodeDecode error, we try to return it with encoding detected |
|
205 | 203 | by chardet library if it fails fallback to unicode with errors replaced |
|
206 | 204 | |
|
207 | 205 | :param str_: string to decode |
|
208 | 206 | :rtype: unicode |
|
209 | 207 | :returns: unicode object |
|
210 | 208 | """ |
|
211 | 209 | if isinstance(str_, unicode): |
|
212 | 210 | return str_ |
|
213 | 211 | |
|
214 | 212 | if not from_encoding: |
|
215 | 213 | DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding', |
|
216 | 214 | 'utf8'), sep=',') |
|
217 | 215 | from_encoding = DEFAULT_ENCODINGS |
|
218 | 216 | |
|
219 | 217 | if not isinstance(from_encoding, (list, tuple)): |
|
220 | 218 | from_encoding = [from_encoding] |
|
221 | 219 | |
|
222 | 220 | try: |
|
223 | 221 | return unicode(str_) |
|
224 | 222 | except UnicodeDecodeError: |
|
225 | 223 | pass |
|
226 | 224 | |
|
227 | 225 | for enc in from_encoding: |
|
228 | 226 | try: |
|
229 | 227 | return unicode(str_, enc) |
|
230 | 228 | except UnicodeDecodeError: |
|
231 | 229 | pass |
|
232 | 230 | |
|
233 | 231 | try: |
|
234 | 232 | import chardet |
|
235 | 233 | encoding = chardet.detect(str_)['encoding'] |
|
236 | 234 | if encoding is None: |
|
237 | 235 | raise Exception() |
|
238 | 236 | return str_.decode(encoding) |
|
239 | 237 | except (ImportError, UnicodeDecodeError, Exception): |
|
240 | 238 | return unicode(str_, from_encoding[0], 'replace') |
|
241 | 239 | |
|
242 | 240 | |
|
243 | 241 | def safe_str(unicode_, to_encoding=None): |
|
244 | 242 | """ |
|
245 | 243 | safe str function. Does few trick to turn unicode_ into string |
|
246 | 244 | |
|
247 | 245 | In case of UnicodeEncodeError, we try to return it with encoding detected |
|
248 | 246 | by chardet library if it fails fallback to string with errors replaced |
|
249 | 247 | |
|
250 | 248 | :param unicode_: unicode to encode |
|
251 | 249 | :rtype: str |
|
252 | 250 | :returns: str object |
|
253 | 251 | """ |
|
254 | 252 | |
|
255 | 253 | # if it's not basestr cast to str |
|
256 | 254 | if not isinstance(unicode_, basestring): |
|
257 | 255 | return str(unicode_) |
|
258 | 256 | |
|
259 | 257 | if isinstance(unicode_, str): |
|
260 | 258 | return unicode_ |
|
261 | 259 | |
|
262 | 260 | if not to_encoding: |
|
263 | 261 | DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding', |
|
264 | 262 | 'utf8'), sep=',') |
|
265 | 263 | to_encoding = DEFAULT_ENCODINGS |
|
266 | 264 | |
|
267 | 265 | if not isinstance(to_encoding, (list, tuple)): |
|
268 | 266 | to_encoding = [to_encoding] |
|
269 | 267 | |
|
270 | 268 | for enc in to_encoding: |
|
271 | 269 | try: |
|
272 | 270 | return unicode_.encode(enc) |
|
273 | 271 | except UnicodeEncodeError: |
|
274 | 272 | pass |
|
275 | 273 | |
|
276 | 274 | try: |
|
277 | 275 | import chardet |
|
278 | 276 | encoding = chardet.detect(unicode_)['encoding'] |
|
279 | 277 | if encoding is None: |
|
280 | 278 | raise UnicodeEncodeError() |
|
281 | 279 | |
|
282 | 280 | return unicode_.encode(encoding) |
|
283 | 281 | except (ImportError, UnicodeEncodeError): |
|
284 | 282 | return unicode_.encode(to_encoding[0], 'replace') |
|
285 | 283 | |
|
286 | 284 | |
|
287 | 285 | def remove_suffix(s, suffix): |
|
288 | 286 | if s.endswith(suffix): |
|
289 | 287 | s = s[:-1 * len(suffix)] |
|
290 | 288 | return s |
|
291 | 289 | |
|
292 | 290 | |
|
293 | 291 | def remove_prefix(s, prefix): |
|
294 | 292 | if s.startswith(prefix): |
|
295 | 293 | s = s[len(prefix):] |
|
296 | 294 | return s |
|
297 | 295 | |
|
298 | 296 | |
|
299 | 297 | def find_calling_context(ignore_modules=None): |
|
300 | 298 | """ |
|
301 | 299 | Look through the calling stack and return the frame which called |
|
302 | 300 | this function and is part of core module ( ie. rhodecode.* ) |
|
303 | 301 | |
|
304 | 302 | :param ignore_modules: list of modules to ignore eg. ['rhodecode.lib'] |
|
305 | 303 | """ |
|
306 | 304 | |
|
307 | 305 | ignore_modules = ignore_modules or [] |
|
308 | 306 | |
|
309 | 307 | f = sys._getframe(2) |
|
310 | 308 | while f.f_back is not None: |
|
311 | 309 | name = f.f_globals.get('__name__') |
|
312 | 310 | if name and name.startswith(__name__.split('.')[0]): |
|
313 | 311 | if name not in ignore_modules: |
|
314 | 312 | return f |
|
315 | 313 | f = f.f_back |
|
316 | 314 | return None |
|
317 | 315 | |
|
318 | 316 | |
|
319 | 317 | def ping_connection(connection, branch): |
|
320 | 318 | if branch: |
|
321 | 319 | # "branch" refers to a sub-connection of a connection, |
|
322 | 320 | # we don't want to bother pinging on these. |
|
323 | 321 | return |
|
324 | 322 | |
|
325 | 323 | # turn off "close with result". This flag is only used with |
|
326 | 324 | # "connectionless" execution, otherwise will be False in any case |
|
327 | 325 | save_should_close_with_result = connection.should_close_with_result |
|
328 | 326 | connection.should_close_with_result = False |
|
329 | 327 | |
|
330 | 328 | try: |
|
331 | 329 | # run a SELECT 1. use a core select() so that |
|
332 | 330 | # the SELECT of a scalar value without a table is |
|
333 | 331 | # appropriately formatted for the backend |
|
334 | 332 | connection.scalar(sqlalchemy.sql.select([1])) |
|
335 | 333 | except sqlalchemy.exc.DBAPIError as err: |
|
336 | 334 | # catch SQLAlchemy's DBAPIError, which is a wrapper |
|
337 | 335 | # for the DBAPI's exception. It includes a .connection_invalidated |
|
338 | 336 | # attribute which specifies if this connection is a "disconnect" |
|
339 | 337 | # condition, which is based on inspection of the original exception |
|
340 | 338 | # by the dialect in use. |
|
341 | 339 | if err.connection_invalidated: |
|
342 | 340 | # run the same SELECT again - the connection will re-validate |
|
343 | 341 | # itself and establish a new connection. The disconnect detection |
|
344 | 342 | # here also causes the whole connection pool to be invalidated |
|
345 | 343 | # so that all stale connections are discarded. |
|
346 | 344 | connection.scalar(sqlalchemy.sql.select([1])) |
|
347 | 345 | else: |
|
348 | 346 | raise |
|
349 | 347 | finally: |
|
350 | 348 | # restore "close with result" |
|
351 | 349 | connection.should_close_with_result = save_should_close_with_result |
|
352 | 350 | |
|
353 | 351 | |
|
354 | 352 | def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs): |
|
355 | 353 | """Custom engine_from_config functions.""" |
|
356 | 354 | log = logging.getLogger('sqlalchemy.engine') |
|
357 | 355 | engine = sqlalchemy.engine_from_config(configuration, prefix, **kwargs) |
|
358 | 356 | |
|
359 | 357 | def color_sql(sql): |
|
360 | 358 | color_seq = '\033[1;33m' # This is yellow: code 33 |
|
361 | 359 | normal = '\x1b[0m' |
|
362 | 360 | return ''.join([color_seq, sql, normal]) |
|
363 | 361 | |
|
364 | 362 | if configuration['debug']: |
|
365 | 363 | # attach events only for debug configuration |
|
366 | 364 | |
|
367 | 365 | def before_cursor_execute(conn, cursor, statement, |
|
368 | 366 | parameters, context, executemany): |
|
369 | 367 | setattr(conn, 'query_start_time', time.time()) |
|
370 | 368 | log.info(color_sql(">>>>> STARTING QUERY >>>>>")) |
|
371 | 369 | calling_context = find_calling_context(ignore_modules=[ |
|
372 | 370 | 'rhodecode.lib.caching_query', |
|
373 | 371 | 'rhodecode.model.settings', |
|
374 | 372 | ]) |
|
375 | 373 | if calling_context: |
|
376 | 374 | log.info(color_sql('call context %s:%s' % ( |
|
377 | 375 | calling_context.f_code.co_filename, |
|
378 | 376 | calling_context.f_lineno, |
|
379 | 377 | ))) |
|
380 | 378 | |
|
381 | 379 | def after_cursor_execute(conn, cursor, statement, |
|
382 | 380 | parameters, context, executemany): |
|
383 | 381 | delattr(conn, 'query_start_time') |
|
384 | 382 | |
|
385 | 383 | sqlalchemy.event.listen(engine, "engine_connect", |
|
386 | 384 | ping_connection) |
|
387 | 385 | sqlalchemy.event.listen(engine, "before_cursor_execute", |
|
388 | 386 | before_cursor_execute) |
|
389 | 387 | sqlalchemy.event.listen(engine, "after_cursor_execute", |
|
390 | 388 | after_cursor_execute) |
|
391 | 389 | |
|
392 | 390 | return engine |
|
393 | 391 | |
|
394 | 392 | |
|
395 | 393 | def get_encryption_key(config): |
|
396 | 394 | secret = config.get('rhodecode.encrypted_values.secret') |
|
397 | 395 | default = config['beaker.session.secret'] |
|
398 | 396 | return secret or default |
|
399 | 397 | |
|
400 | 398 | |
|
401 | 399 | def age(prevdate, now=None, show_short_version=False, show_suffix=True, |
|
402 | 400 | short_format=False): |
|
403 | 401 | """ |
|
404 | 402 | Turns a datetime into an age string. |
|
405 | 403 | If show_short_version is True, this generates a shorter string with |
|
406 | 404 | an approximate age; ex. '1 day ago', rather than '1 day and 23 hours ago'. |
|
407 | 405 | |
|
408 | 406 | * IMPORTANT* |
|
409 | 407 | Code of this function is written in special way so it's easier to |
|
410 | 408 | backport it to javascript. If you mean to update it, please also update |
|
411 | 409 | `jquery.timeago-extension.js` file |
|
412 | 410 | |
|
413 | 411 | :param prevdate: datetime object |
|
414 | 412 | :param now: get current time, if not define we use |
|
415 | 413 | `datetime.datetime.now()` |
|
416 | 414 | :param show_short_version: if it should approximate the date and |
|
417 | 415 | return a shorter string |
|
418 | 416 | :param show_suffix: |
|
419 | 417 | :param short_format: show short format, eg 2D instead of 2 days |
|
420 | 418 | :rtype: unicode |
|
421 | 419 | :returns: unicode words describing age |
|
422 | 420 | """ |
|
423 | 421 | |
|
424 | 422 | def _get_relative_delta(now, prevdate): |
|
425 | 423 | base = dateutil.relativedelta.relativedelta(now, prevdate) |
|
426 | 424 | return { |
|
427 | 425 | 'year': base.years, |
|
428 | 426 | 'month': base.months, |
|
429 | 427 | 'day': base.days, |
|
430 | 428 | 'hour': base.hours, |
|
431 | 429 | 'minute': base.minutes, |
|
432 | 430 | 'second': base.seconds, |
|
433 | 431 | } |
|
434 | 432 | |
|
435 | 433 | def _is_leap_year(year): |
|
436 | 434 | return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0) |
|
437 | 435 | |
|
438 | 436 | def get_month(prevdate): |
|
439 | 437 | return prevdate.month |
|
440 | 438 | |
|
441 | 439 | def get_year(prevdate): |
|
442 | 440 | return prevdate.year |
|
443 | 441 | |
|
444 | 442 | now = now or datetime.datetime.now() |
|
445 | 443 | order = ['year', 'month', 'day', 'hour', 'minute', 'second'] |
|
446 | 444 | deltas = {} |
|
447 | 445 | future = False |
|
448 | 446 | |
|
449 | 447 | if prevdate > now: |
|
450 | 448 | now_old = now |
|
451 | 449 | now = prevdate |
|
452 | 450 | prevdate = now_old |
|
453 | 451 | future = True |
|
454 | 452 | if future: |
|
455 | 453 | prevdate = prevdate.replace(microsecond=0) |
|
456 | 454 | # Get date parts deltas |
|
457 | 455 | for part in order: |
|
458 | 456 | rel_delta = _get_relative_delta(now, prevdate) |
|
459 | 457 | deltas[part] = rel_delta[part] |
|
460 | 458 | |
|
461 | 459 | # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00, |
|
462 | 460 | # not 1 hour, -59 minutes and -59 seconds) |
|
463 | 461 | offsets = [[5, 60], [4, 60], [3, 24]] |
|
464 | 462 | for element in offsets: # seconds, minutes, hours |
|
465 | 463 | num = element[0] |
|
466 | 464 | length = element[1] |
|
467 | 465 | |
|
468 | 466 | part = order[num] |
|
469 | 467 | carry_part = order[num - 1] |
|
470 | 468 | |
|
471 | 469 | if deltas[part] < 0: |
|
472 | 470 | deltas[part] += length |
|
473 | 471 | deltas[carry_part] -= 1 |
|
474 | 472 | |
|
475 | 473 | # Same thing for days except that the increment depends on the (variable) |
|
476 | 474 | # number of days in the month |
|
477 | 475 | month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] |
|
478 | 476 | if deltas['day'] < 0: |
|
479 | 477 | if get_month(prevdate) == 2 and _is_leap_year(get_year(prevdate)): |
|
480 | 478 | deltas['day'] += 29 |
|
481 | 479 | else: |
|
482 | 480 | deltas['day'] += month_lengths[get_month(prevdate) - 1] |
|
483 | 481 | |
|
484 | 482 | deltas['month'] -= 1 |
|
485 | 483 | |
|
486 | 484 | if deltas['month'] < 0: |
|
487 | 485 | deltas['month'] += 12 |
|
488 | 486 | deltas['year'] -= 1 |
|
489 | 487 | |
|
490 | 488 | # Format the result |
|
491 | 489 | if short_format: |
|
492 | 490 | fmt_funcs = { |
|
493 | 491 | 'year': lambda d: u'%dy' % d, |
|
494 | 492 | 'month': lambda d: u'%dm' % d, |
|
495 | 493 | 'day': lambda d: u'%dd' % d, |
|
496 | 494 | 'hour': lambda d: u'%dh' % d, |
|
497 | 495 | 'minute': lambda d: u'%dmin' % d, |
|
498 | 496 | 'second': lambda d: u'%dsec' % d, |
|
499 | 497 | } |
|
500 | 498 | else: |
|
501 | 499 | fmt_funcs = { |
|
502 | 500 | 'year': lambda d: _pluralize(u'${num} year', u'${num} years', d, mapping={'num': d}).interpolate(), |
|
503 | 501 | 'month': lambda d: _pluralize(u'${num} month', u'${num} months', d, mapping={'num': d}).interpolate(), |
|
504 | 502 | 'day': lambda d: _pluralize(u'${num} day', u'${num} days', d, mapping={'num': d}).interpolate(), |
|
505 | 503 | 'hour': lambda d: _pluralize(u'${num} hour', u'${num} hours', d, mapping={'num': d}).interpolate(), |
|
506 | 504 | 'minute': lambda d: _pluralize(u'${num} minute', u'${num} minutes', d, mapping={'num': d}).interpolate(), |
|
507 | 505 | 'second': lambda d: _pluralize(u'${num} second', u'${num} seconds', d, mapping={'num': d}).interpolate(), |
|
508 | 506 | } |
|
509 | 507 | |
|
510 | 508 | i = 0 |
|
511 | 509 | for part in order: |
|
512 | 510 | value = deltas[part] |
|
513 | 511 | if value != 0: |
|
514 | 512 | |
|
515 | 513 | if i < 5: |
|
516 | 514 | sub_part = order[i + 1] |
|
517 | 515 | sub_value = deltas[sub_part] |
|
518 | 516 | else: |
|
519 | 517 | sub_value = 0 |
|
520 | 518 | |
|
521 | 519 | if sub_value == 0 or show_short_version: |
|
522 | 520 | _val = fmt_funcs[part](value) |
|
523 | 521 | if future: |
|
524 | 522 | if show_suffix: |
|
525 | 523 | return _(u'in ${ago}', mapping={'ago': _val}) |
|
526 | 524 | else: |
|
527 | 525 | return _(_val) |
|
528 | 526 | |
|
529 | 527 | else: |
|
530 | 528 | if show_suffix: |
|
531 | 529 | return _(u'${ago} ago', mapping={'ago': _val}) |
|
532 | 530 | else: |
|
533 | 531 | return _(_val) |
|
534 | 532 | |
|
535 | 533 | val = fmt_funcs[part](value) |
|
536 | 534 | val_detail = fmt_funcs[sub_part](sub_value) |
|
537 | 535 | mapping = {'val': val, 'detail': val_detail} |
|
538 | 536 | |
|
539 | 537 | if short_format: |
|
540 | 538 | datetime_tmpl = _(u'${val}, ${detail}', mapping=mapping) |
|
541 | 539 | if show_suffix: |
|
542 | 540 | datetime_tmpl = _(u'${val}, ${detail} ago', mapping=mapping) |
|
543 | 541 | if future: |
|
544 | 542 | datetime_tmpl = _(u'in ${val}, ${detail}', mapping=mapping) |
|
545 | 543 | else: |
|
546 | 544 | datetime_tmpl = _(u'${val} and ${detail}', mapping=mapping) |
|
547 | 545 | if show_suffix: |
|
548 | 546 | datetime_tmpl = _(u'${val} and ${detail} ago', mapping=mapping) |
|
549 | 547 | if future: |
|
550 | 548 | datetime_tmpl = _(u'in ${val} and ${detail}', mapping=mapping) |
|
551 | 549 | |
|
552 | 550 | return datetime_tmpl |
|
553 | 551 | i += 1 |
|
554 | 552 | return _(u'just now') |
|
555 | 553 | |
|
556 | 554 | |
|
557 | 555 | def cleaned_uri(uri): |
|
558 | 556 | """ |
|
559 | 557 | Quotes '[' and ']' from uri if there is only one of them. |
|
560 | 558 | according to RFC3986 we cannot use such chars in uri |
|
561 | 559 | :param uri: |
|
562 | 560 | :return: uri without this chars |
|
563 | 561 | """ |
|
564 | 562 | return urllib.quote(uri, safe='@$:/') |
|
565 | 563 | |
|
566 | 564 | |
|
567 | 565 | def uri_filter(uri): |
|
568 | 566 | """ |
|
569 | 567 | Removes user:password from given url string |
|
570 | 568 | |
|
571 | 569 | :param uri: |
|
572 | 570 | :rtype: unicode |
|
573 | 571 | :returns: filtered list of strings |
|
574 | 572 | """ |
|
575 | 573 | if not uri: |
|
576 | 574 | return '' |
|
577 | 575 | |
|
578 | 576 | proto = '' |
|
579 | 577 | |
|
580 | 578 | for pat in ('https://', 'http://'): |
|
581 | 579 | if uri.startswith(pat): |
|
582 | 580 | uri = uri[len(pat):] |
|
583 | 581 | proto = pat |
|
584 | 582 | break |
|
585 | 583 | |
|
586 | 584 | # remove passwords and username |
|
587 | 585 | uri = uri[uri.find('@') + 1:] |
|
588 | 586 | |
|
589 | 587 | # get the port |
|
590 | 588 | cred_pos = uri.find(':') |
|
591 | 589 | if cred_pos == -1: |
|
592 | 590 | host, port = uri, None |
|
593 | 591 | else: |
|
594 | 592 | host, port = uri[:cred_pos], uri[cred_pos + 1:] |
|
595 | 593 | |
|
596 | 594 | return filter(None, [proto, host, port]) |
|
597 | 595 | |
|
598 | 596 | |
|
599 | 597 | def credentials_filter(uri): |
|
600 | 598 | """ |
|
601 | 599 | Returns a url with removed credentials |
|
602 | 600 | |
|
603 | 601 | :param uri: |
|
604 | 602 | """ |
|
605 | 603 | |
|
606 | 604 | uri = uri_filter(uri) |
|
607 | 605 | # check if we have port |
|
608 | 606 | if len(uri) > 2 and uri[2]: |
|
609 | 607 | uri[2] = ':' + uri[2] |
|
610 | 608 | |
|
611 | 609 | return ''.join(uri) |
|
612 | 610 | |
|
613 | 611 | |
|
614 | 612 | def get_clone_url(request, uri_tmpl, repo_name, repo_id, **override): |
|
615 | 613 | qualifed_home_url = request.route_url('home') |
|
616 | 614 | parsed_url = urlobject.URLObject(qualifed_home_url) |
|
617 | 615 | decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/'))) |
|
618 | 616 | args = { |
|
619 | 617 | 'scheme': parsed_url.scheme, |
|
620 | 618 | 'user': '', |
|
621 | 619 | # path if we use proxy-prefix |
|
622 | 620 | 'netloc': parsed_url.netloc+decoded_path, |
|
623 | 621 | 'prefix': decoded_path, |
|
624 | 622 | 'repo': repo_name, |
|
625 | 623 | 'repoid': str(repo_id) |
|
626 | 624 | } |
|
627 | 625 | args.update(override) |
|
628 | 626 | args['user'] = urllib.quote(safe_str(args['user'])) |
|
629 | 627 | |
|
630 | 628 | for k, v in args.items(): |
|
631 | 629 | uri_tmpl = uri_tmpl.replace('{%s}' % k, v) |
|
632 | 630 | |
|
633 | 631 | # remove leading @ sign if it's present. Case of empty user |
|
634 | 632 | url_obj = urlobject.URLObject(uri_tmpl) |
|
635 | 633 | url = url_obj.with_netloc(url_obj.netloc.lstrip('@')) |
|
636 | 634 | |
|
637 | 635 | return safe_unicode(url) |
|
638 | 636 | |
|
639 | 637 | |
|
640 | 638 | def get_commit_safe(repo, commit_id=None, commit_idx=None, pre_load=None): |
|
641 | 639 | """ |
|
642 | 640 | Safe version of get_commit if this commit doesn't exists for a |
|
643 | 641 | repository it returns a Dummy one instead |
|
644 | 642 | |
|
645 | 643 | :param repo: repository instance |
|
646 | 644 | :param commit_id: commit id as str |
|
647 | 645 | :param pre_load: optional list of commit attributes to load |
|
648 | 646 | """ |
|
649 | 647 | # TODO(skreft): remove these circular imports |
|
650 | 648 | from rhodecode.lib.vcs.backends.base import BaseRepository, EmptyCommit |
|
651 | 649 | from rhodecode.lib.vcs.exceptions import RepositoryError |
|
652 | 650 | if not isinstance(repo, BaseRepository): |
|
653 | 651 | raise Exception('You must pass an Repository ' |
|
654 | 652 | 'object as first argument got %s', type(repo)) |
|
655 | 653 | |
|
656 | 654 | try: |
|
657 | 655 | commit = repo.get_commit( |
|
658 | 656 | commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load) |
|
659 | 657 | except (RepositoryError, LookupError): |
|
660 | 658 | commit = EmptyCommit() |
|
661 | 659 | return commit |
|
662 | 660 | |
|
663 | 661 | |
|
664 | 662 | def datetime_to_time(dt): |
|
665 | 663 | if dt: |
|
666 | 664 | return time.mktime(dt.timetuple()) |
|
667 | 665 | |
|
668 | 666 | |
|
669 | 667 | def time_to_datetime(tm): |
|
670 | 668 | if tm: |
|
671 | 669 | if isinstance(tm, basestring): |
|
672 | 670 | try: |
|
673 | 671 | tm = float(tm) |
|
674 | 672 | except ValueError: |
|
675 | 673 | return |
|
676 | 674 | return datetime.datetime.fromtimestamp(tm) |
|
677 | 675 | |
|
678 | 676 | |
|
679 | 677 | def time_to_utcdatetime(tm): |
|
680 | 678 | if tm: |
|
681 | 679 | if isinstance(tm, basestring): |
|
682 | 680 | try: |
|
683 | 681 | tm = float(tm) |
|
684 | 682 | except ValueError: |
|
685 | 683 | return |
|
686 | 684 | return datetime.datetime.utcfromtimestamp(tm) |
|
687 | 685 | |
|
688 | 686 | |
|
689 | 687 | MENTIONS_REGEX = re.compile( |
|
690 | 688 | # ^@ or @ without any special chars in front |
|
691 | 689 | r'(?:^@|[^a-zA-Z0-9\-\_\.]@)' |
|
692 | 690 | # main body starts with letter, then can be . - _ |
|
693 | 691 | r'([a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+)', |
|
694 | 692 | re.VERBOSE | re.MULTILINE) |
|
695 | 693 | |
|
696 | 694 | |
|
697 | 695 | def extract_mentioned_users(s): |
|
698 | 696 | """ |
|
699 | 697 | Returns unique usernames from given string s that have @mention |
|
700 | 698 | |
|
701 | 699 | :param s: string to get mentions |
|
702 | 700 | """ |
|
703 | 701 | usrs = set() |
|
704 | 702 | for username in MENTIONS_REGEX.findall(s): |
|
705 | 703 | usrs.add(username) |
|
706 | 704 | |
|
707 | 705 | return sorted(list(usrs), key=lambda k: k.lower()) |
|
708 | 706 | |
|
709 | 707 | |
|
710 | 708 | class StrictAttributeDict(dict): |
|
711 | 709 | """ |
|
712 | 710 | Strict Version of Attribute dict which raises an Attribute error when |
|
713 | 711 | requested attribute is not set |
|
714 | 712 | """ |
|
715 | 713 | def __getattr__(self, attr): |
|
716 | 714 | try: |
|
717 | 715 | return self[attr] |
|
718 | 716 | except KeyError: |
|
719 | 717 | raise AttributeError('%s object has no attribute %s' % ( |
|
720 | 718 | self.__class__, attr)) |
|
721 | 719 | __setattr__ = dict.__setitem__ |
|
722 | 720 | __delattr__ = dict.__delitem__ |
|
723 | 721 | |
|
724 | 722 | |
|
725 | 723 | class AttributeDict(dict): |
|
726 | 724 | def __getattr__(self, attr): |
|
727 | 725 | return self.get(attr, None) |
|
728 | 726 | __setattr__ = dict.__setitem__ |
|
729 | 727 | __delattr__ = dict.__delitem__ |
|
730 | 728 | |
|
731 | 729 | |
|
732 | 730 | def fix_PATH(os_=None): |
|
733 | 731 | """ |
|
734 | 732 | Get current active python path, and append it to PATH variable to fix |
|
735 | 733 | issues of subprocess calls and different python versions |
|
736 | 734 | """ |
|
737 | 735 | if os_ is None: |
|
738 | 736 | import os |
|
739 | 737 | else: |
|
740 | 738 | os = os_ |
|
741 | 739 | |
|
742 | 740 | cur_path = os.path.split(sys.executable)[0] |
|
743 | 741 | if not os.environ['PATH'].startswith(cur_path): |
|
744 | 742 | os.environ['PATH'] = '%s:%s' % (cur_path, os.environ['PATH']) |
|
745 | 743 | |
|
746 | 744 | |
|
747 | 745 | def obfuscate_url_pw(engine): |
|
748 | 746 | _url = engine or '' |
|
749 | 747 | try: |
|
750 | 748 | _url = sqlalchemy.engine.url.make_url(engine) |
|
751 | 749 | if _url.password: |
|
752 | 750 | _url.password = 'XXXXX' |
|
753 | 751 | except Exception: |
|
754 | 752 | pass |
|
755 | 753 | return unicode(_url) |
|
756 | 754 | |
|
757 | 755 | |
|
758 | 756 | def get_server_url(environ): |
|
759 | 757 | req = webob.Request(environ) |
|
760 | 758 | return req.host_url + req.script_name |
|
761 | 759 | |
|
762 | 760 | |
|
763 | 761 | def unique_id(hexlen=32): |
|
764 | 762 | alphabet = "23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz" |
|
765 | 763 | return suuid(truncate_to=hexlen, alphabet=alphabet) |
|
766 | 764 | |
|
767 | 765 | |
|
768 | 766 | def suuid(url=None, truncate_to=22, alphabet=None): |
|
769 | 767 | """ |
|
770 | 768 | Generate and return a short URL safe UUID. |
|
771 | 769 | |
|
772 | 770 | If the url parameter is provided, set the namespace to the provided |
|
773 | 771 | URL and generate a UUID. |
|
774 | 772 | |
|
775 | 773 | :param url to get the uuid for |
|
776 | 774 | :truncate_to: truncate the basic 22 UUID to shorter version |
|
777 | 775 | |
|
778 | 776 | The IDs won't be universally unique any longer, but the probability of |
|
779 | 777 | a collision will still be very low. |
|
780 | 778 | """ |
|
781 | 779 | # Define our alphabet. |
|
782 | 780 | _ALPHABET = alphabet or "23456789ABCDEFGHJKLMNPQRSTUVWXYZ" |
|
783 | 781 | |
|
784 | 782 | # If no URL is given, generate a random UUID. |
|
785 | 783 | if url is None: |
|
786 | 784 | unique_id = uuid.uuid4().int |
|
787 | 785 | else: |
|
788 | 786 | unique_id = uuid.uuid3(uuid.NAMESPACE_URL, url).int |
|
789 | 787 | |
|
790 | 788 | alphabet_length = len(_ALPHABET) |
|
791 | 789 | output = [] |
|
792 | 790 | while unique_id > 0: |
|
793 | 791 | digit = unique_id % alphabet_length |
|
794 | 792 | output.append(_ALPHABET[digit]) |
|
795 | 793 | unique_id = int(unique_id / alphabet_length) |
|
796 | 794 | return "".join(output)[:truncate_to] |
|
797 | 795 | |
|
798 | 796 | |
|
799 | 797 | def get_current_rhodecode_user(request=None): |
|
800 | 798 | """ |
|
801 | 799 | Gets rhodecode user from request |
|
802 | 800 | """ |
|
803 | 801 | pyramid_request = request or pyramid.threadlocal.get_current_request() |
|
804 | 802 | |
|
805 | 803 | # web case |
|
806 | 804 | if pyramid_request and hasattr(pyramid_request, 'user'): |
|
807 | 805 | return pyramid_request.user |
|
808 | 806 | |
|
809 | 807 | # api case |
|
810 | 808 | if pyramid_request and hasattr(pyramid_request, 'rpc_user'): |
|
811 | 809 | return pyramid_request.rpc_user |
|
812 | 810 | |
|
813 | 811 | return None |
|
814 | 812 | |
|
815 | 813 | |
|
816 | 814 | def action_logger_generic(action, namespace=''): |
|
817 | 815 | """ |
|
818 | 816 | A generic logger for actions useful to the system overview, tries to find |
|
819 | 817 | an acting user for the context of the call otherwise reports unknown user |
|
820 | 818 | |
|
821 | 819 | :param action: logging message eg 'comment 5 deleted' |
|
822 | 820 | :param type: string |
|
823 | 821 | |
|
824 | 822 | :param namespace: namespace of the logging message eg. 'repo.comments' |
|
825 | 823 | :param type: string |
|
826 | 824 | |
|
827 | 825 | """ |
|
828 | 826 | |
|
829 | 827 | logger_name = 'rhodecode.actions' |
|
830 | 828 | |
|
831 | 829 | if namespace: |
|
832 | 830 | logger_name += '.' + namespace |
|
833 | 831 | |
|
834 | 832 | log = logging.getLogger(logger_name) |
|
835 | 833 | |
|
836 | 834 | # get a user if we can |
|
837 | 835 | user = get_current_rhodecode_user() |
|
838 | 836 | |
|
839 | 837 | logfunc = log.info |
|
840 | 838 | |
|
841 | 839 | if not user: |
|
842 | 840 | user = '<unknown user>' |
|
843 | 841 | logfunc = log.warning |
|
844 | 842 | |
|
845 | 843 | logfunc('Logging action by {}: {}'.format(user, action)) |
|
846 | 844 | |
|
847 | 845 | |
|
848 | 846 | def escape_split(text, sep=',', maxsplit=-1): |
|
849 | 847 | r""" |
|
850 | 848 | Allows for escaping of the separator: e.g. arg='foo\, bar' |
|
851 | 849 | |
|
852 | 850 | It should be noted that the way bash et. al. do command line parsing, those |
|
853 | 851 | single quotes are required. |
|
854 | 852 | """ |
|
855 | 853 | escaped_sep = r'\%s' % sep |
|
856 | 854 | |
|
857 | 855 | if escaped_sep not in text: |
|
858 | 856 | return text.split(sep, maxsplit) |
|
859 | 857 | |
|
860 | 858 | before, _mid, after = text.partition(escaped_sep) |
|
861 | 859 | startlist = before.split(sep, maxsplit) # a regular split is fine here |
|
862 | 860 | unfinished = startlist[-1] |
|
863 | 861 | startlist = startlist[:-1] |
|
864 | 862 | |
|
865 | 863 | # recurse because there may be more escaped separators |
|
866 | 864 | endlist = escape_split(after, sep, maxsplit) |
|
867 | 865 | |
|
868 | 866 | # finish building the escaped value. we use endlist[0] becaue the first |
|
869 | 867 | # part of the string sent in recursion is the rest of the escaped value. |
|
870 | 868 | unfinished += sep + endlist[0] |
|
871 | 869 | |
|
872 | 870 | return startlist + [unfinished] + endlist[1:] # put together all the parts |
|
873 | 871 | |
|
874 | 872 | |
|
875 | 873 | class OptionalAttr(object): |
|
876 | 874 | """ |
|
877 | 875 | Special Optional Option that defines other attribute. Example:: |
|
878 | 876 | |
|
879 | 877 | def test(apiuser, userid=Optional(OAttr('apiuser')): |
|
880 | 878 | user = Optional.extract(userid) |
|
881 | 879 | # calls |
|
882 | 880 | |
|
883 | 881 | """ |
|
884 | 882 | |
|
885 | 883 | def __init__(self, attr_name): |
|
886 | 884 | self.attr_name = attr_name |
|
887 | 885 | |
|
888 | 886 | def __repr__(self): |
|
889 | 887 | return '<OptionalAttr:%s>' % self.attr_name |
|
890 | 888 | |
|
891 | 889 | def __call__(self): |
|
892 | 890 | return self |
|
893 | 891 | |
|
894 | 892 | |
|
895 | 893 | # alias |
|
896 | 894 | OAttr = OptionalAttr |
|
897 | 895 | |
|
898 | 896 | |
|
899 | 897 | class Optional(object): |
|
900 | 898 | """ |
|
901 | 899 | Defines an optional parameter:: |
|
902 | 900 | |
|
903 | 901 | param = param.getval() if isinstance(param, Optional) else param |
|
904 | 902 | param = param() if isinstance(param, Optional) else param |
|
905 | 903 | |
|
906 | 904 | is equivalent of:: |
|
907 | 905 | |
|
908 | 906 | param = Optional.extract(param) |
|
909 | 907 | |
|
910 | 908 | """ |
|
911 | 909 | |
|
912 | 910 | def __init__(self, type_): |
|
913 | 911 | self.type_ = type_ |
|
914 | 912 | |
|
915 | 913 | def __repr__(self): |
|
916 | 914 | return '<Optional:%s>' % self.type_.__repr__() |
|
917 | 915 | |
|
918 | 916 | def __call__(self): |
|
919 | 917 | return self.getval() |
|
920 | 918 | |
|
921 | 919 | def getval(self): |
|
922 | 920 | """ |
|
923 | 921 | returns value from this Optional instance |
|
924 | 922 | """ |
|
925 | 923 | if isinstance(self.type_, OAttr): |
|
926 | 924 | # use params name |
|
927 | 925 | return self.type_.attr_name |
|
928 | 926 | return self.type_ |
|
929 | 927 | |
|
930 | 928 | @classmethod |
|
931 | 929 | def extract(cls, val): |
|
932 | 930 | """ |
|
933 | 931 | Extracts value from Optional() instance |
|
934 | 932 | |
|
935 | 933 | :param val: |
|
936 | 934 | :return: original value if it's not Optional instance else |
|
937 | 935 | value of instance |
|
938 | 936 | """ |
|
939 | 937 | if isinstance(val, cls): |
|
940 | 938 | return val.getval() |
|
941 | 939 | return val |
|
942 | 940 | |
|
943 | 941 | |
|
944 | def get_routes_generator_for_server_url(server_url): | |
|
945 | parsed_url = urlobject.URLObject(server_url) | |
|
946 | netloc = safe_str(parsed_url.netloc) | |
|
947 | script_name = safe_str(parsed_url.path) | |
|
948 | ||
|
949 | if ':' in netloc: | |
|
950 | server_name, server_port = netloc.split(':') | |
|
951 | else: | |
|
952 | server_name = netloc | |
|
953 | server_port = (parsed_url.scheme == 'https' and '443' or '80') | |
|
954 | ||
|
955 | environ = { | |
|
956 | 'REQUEST_METHOD': 'GET', | |
|
957 | 'PATH_INFO': '/', | |
|
958 | 'SERVER_NAME': server_name, | |
|
959 | 'SERVER_PORT': server_port, | |
|
960 | 'SCRIPT_NAME': script_name, | |
|
961 | } | |
|
962 | if parsed_url.scheme == 'https': | |
|
963 | environ['HTTPS'] = 'on' | |
|
964 | environ['wsgi.url_scheme'] = 'https' | |
|
965 | ||
|
966 | return routes.util.URLGenerator(rhodecode.CONFIG['routes.map'], environ) | |
|
967 | ||
|
968 | ||
|
969 | 942 | def glob2re(pat): |
|
970 | 943 | """ |
|
971 | 944 | Translate a shell PATTERN to a regular expression. |
|
972 | 945 | |
|
973 | 946 | There is no way to quote meta-characters. |
|
974 | 947 | """ |
|
975 | 948 | |
|
976 | 949 | i, n = 0, len(pat) |
|
977 | 950 | res = '' |
|
978 | 951 | while i < n: |
|
979 | 952 | c = pat[i] |
|
980 | 953 | i = i+1 |
|
981 | 954 | if c == '*': |
|
982 | 955 | #res = res + '.*' |
|
983 | 956 | res = res + '[^/]*' |
|
984 | 957 | elif c == '?': |
|
985 | 958 | #res = res + '.' |
|
986 | 959 | res = res + '[^/]' |
|
987 | 960 | elif c == '[': |
|
988 | 961 | j = i |
|
989 | 962 | if j < n and pat[j] == '!': |
|
990 | 963 | j = j+1 |
|
991 | 964 | if j < n and pat[j] == ']': |
|
992 | 965 | j = j+1 |
|
993 | 966 | while j < n and pat[j] != ']': |
|
994 | 967 | j = j+1 |
|
995 | 968 | if j >= n: |
|
996 | 969 | res = res + '\\[' |
|
997 | 970 | else: |
|
998 | 971 | stuff = pat[i:j].replace('\\','\\\\') |
|
999 | 972 | i = j+1 |
|
1000 | 973 | if stuff[0] == '!': |
|
1001 | 974 | stuff = '^' + stuff[1:] |
|
1002 | 975 | elif stuff[0] == '^': |
|
1003 | 976 | stuff = '\\' + stuff |
|
1004 | 977 | res = '%s[%s]' % (res, stuff) |
|
1005 | 978 | else: |
|
1006 | 979 | res = res + re.escape(c) |
|
1007 | 980 | return res + '\Z(?ms)' |
@@ -1,450 +1,456 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import json |
|
22 | 22 | import multiprocessing |
|
23 | 23 | import os |
|
24 | 24 | |
|
25 | 25 | import mock |
|
26 | 26 | import py |
|
27 | 27 | import pytest |
|
28 | 28 | |
|
29 | 29 | from rhodecode.lib import caching_query |
|
30 | 30 | from rhodecode.lib import utils |
|
31 | 31 | from rhodecode.lib.utils2 import md5 |
|
32 | 32 | from rhodecode.model import settings |
|
33 | 33 | from rhodecode.model import db |
|
34 | 34 | from rhodecode.model import meta |
|
35 | 35 | from rhodecode.model.repo import RepoModel |
|
36 | 36 | from rhodecode.model.repo_group import RepoGroupModel |
|
37 | 37 | from rhodecode.model.scm import ScmModel |
|
38 | 38 | from rhodecode.model.settings import UiSetting, SettingsModel |
|
39 | 39 | from rhodecode.tests.fixture import Fixture |
|
40 | 40 | |
|
41 | 41 | |
|
42 | 42 | fixture = Fixture() |
|
43 | 43 | |
|
44 | 44 | |
|
45 | 45 | def extract_hooks(config): |
|
46 | 46 | """Return a dictionary with the hook entries of the given config.""" |
|
47 | 47 | hooks = {} |
|
48 | 48 | config_items = config.serialize() |
|
49 | 49 | for section, name, value in config_items: |
|
50 | 50 | if section != 'hooks': |
|
51 | 51 | continue |
|
52 | 52 | hooks[name] = value |
|
53 | 53 | |
|
54 | 54 | return hooks |
|
55 | 55 | |
|
56 | 56 | |
|
57 | 57 | def disable_hooks(request, hooks): |
|
58 | 58 | """Disables the given hooks from the UI settings.""" |
|
59 | 59 | session = meta.Session() |
|
60 | 60 | |
|
61 | 61 | model = SettingsModel() |
|
62 | 62 | for hook_key in hooks: |
|
63 | 63 | sett = model.get_ui_by_key(hook_key) |
|
64 | 64 | sett.ui_active = False |
|
65 | 65 | session.add(sett) |
|
66 | 66 | |
|
67 | 67 | # Invalidate cache |
|
68 | 68 | ui_settings = session.query(db.RhodeCodeUi).options( |
|
69 | 69 | caching_query.FromCache('sql_cache_short', 'get_hg_ui_settings')) |
|
70 | 70 | ui_settings.invalidate() |
|
71 | 71 | |
|
72 | 72 | ui_settings = session.query(db.RhodeCodeUi).options( |
|
73 | 73 | caching_query.FromCache( |
|
74 | 74 | 'sql_cache_short', 'get_hook_settings', 'get_hook_settings')) |
|
75 | 75 | ui_settings.invalidate() |
|
76 | 76 | |
|
77 | 77 | @request.addfinalizer |
|
78 | 78 | def rollback(): |
|
79 | 79 | session.rollback() |
|
80 | 80 | |
|
81 | 81 | |
|
82 | 82 | HOOK_PRE_PUSH = db.RhodeCodeUi.HOOK_PRE_PUSH |
|
83 | 83 | HOOK_PRETX_PUSH = db.RhodeCodeUi.HOOK_PRETX_PUSH |
|
84 | 84 | HOOK_PUSH = db.RhodeCodeUi.HOOK_PUSH |
|
85 | 85 | HOOK_PRE_PULL = db.RhodeCodeUi.HOOK_PRE_PULL |
|
86 | 86 | HOOK_PULL = db.RhodeCodeUi.HOOK_PULL |
|
87 | 87 | HOOK_REPO_SIZE = db.RhodeCodeUi.HOOK_REPO_SIZE |
|
88 | 88 | HOOK_PUSH_KEY = db.RhodeCodeUi.HOOK_PUSH_KEY |
|
89 | 89 | |
|
90 | 90 | HG_HOOKS = frozenset( |
|
91 | 91 | (HOOK_PRE_PULL, HOOK_PULL, HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_PUSH, |
|
92 | 92 | HOOK_REPO_SIZE, HOOK_PUSH_KEY)) |
|
93 | 93 | |
|
94 | 94 | |
|
95 | 95 | @pytest.mark.parametrize('disabled_hooks,expected_hooks', [ |
|
96 | 96 | ([], HG_HOOKS), |
|
97 | 97 | (HG_HOOKS, []), |
|
98 | 98 | |
|
99 | 99 | ([HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_REPO_SIZE, HOOK_PUSH_KEY], [HOOK_PRE_PULL, HOOK_PULL, HOOK_PUSH]), |
|
100 | 100 | |
|
101 | 101 | # When a pull/push hook is disabled, its pre-pull/push counterpart should |
|
102 | 102 | # be disabled too. |
|
103 | 103 | ([HOOK_PUSH], [HOOK_PRE_PULL, HOOK_PULL, HOOK_REPO_SIZE]), |
|
104 | 104 | ([HOOK_PULL], [HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_PUSH, HOOK_REPO_SIZE, |
|
105 | 105 | HOOK_PUSH_KEY]), |
|
106 | 106 | ]) |
|
107 | 107 | def test_make_db_config_hg_hooks(baseapp, request, disabled_hooks, |
|
108 | 108 | expected_hooks): |
|
109 | 109 | disable_hooks(request, disabled_hooks) |
|
110 | 110 | |
|
111 | 111 | config = utils.make_db_config() |
|
112 | 112 | hooks = extract_hooks(config) |
|
113 | 113 | |
|
114 | 114 | assert set(hooks.iterkeys()).intersection(HG_HOOKS) == set(expected_hooks) |
|
115 | 115 | |
|
116 | 116 | |
|
117 | 117 | @pytest.mark.parametrize('disabled_hooks,expected_hooks', [ |
|
118 | 118 | ([], ['pull', 'push']), |
|
119 | 119 | ([HOOK_PUSH], ['pull']), |
|
120 | 120 | ([HOOK_PULL], ['push']), |
|
121 | 121 | ([HOOK_PULL, HOOK_PUSH], []), |
|
122 | 122 | ]) |
|
123 | 123 | def test_get_enabled_hook_classes(disabled_hooks, expected_hooks): |
|
124 | 124 | hook_keys = (HOOK_PUSH, HOOK_PULL) |
|
125 | 125 | ui_settings = [ |
|
126 | 126 | ('hooks', key, 'some value', key not in disabled_hooks) |
|
127 | 127 | for key in hook_keys] |
|
128 | 128 | |
|
129 | 129 | result = utils.get_enabled_hook_classes(ui_settings) |
|
130 | 130 | assert sorted(result) == expected_hooks |
|
131 | 131 | |
|
132 | 132 | |
|
133 | 133 | def test_get_filesystem_repos_finds_repos(tmpdir, baseapp): |
|
134 | 134 | _stub_git_repo(tmpdir.ensure('repo', dir=True)) |
|
135 | 135 | repos = list(utils.get_filesystem_repos(str(tmpdir))) |
|
136 | 136 | assert repos == [('repo', ('git', tmpdir.join('repo')))] |
|
137 | 137 | |
|
138 | 138 | |
|
139 | 139 | def test_get_filesystem_repos_skips_directories(tmpdir, baseapp): |
|
140 | 140 | tmpdir.ensure('not-a-repo', dir=True) |
|
141 | 141 | repos = list(utils.get_filesystem_repos(str(tmpdir))) |
|
142 | 142 | assert repos == [] |
|
143 | 143 | |
|
144 | 144 | |
|
145 | 145 | def test_get_filesystem_repos_skips_directories_with_repos(tmpdir, baseapp): |
|
146 | 146 | _stub_git_repo(tmpdir.ensure('subdir/repo', dir=True)) |
|
147 | 147 | repos = list(utils.get_filesystem_repos(str(tmpdir))) |
|
148 | 148 | assert repos == [] |
|
149 | 149 | |
|
150 | 150 | |
|
151 | 151 | def test_get_filesystem_repos_finds_repos_in_subdirectories(tmpdir, baseapp): |
|
152 | 152 | _stub_git_repo(tmpdir.ensure('subdir/repo', dir=True)) |
|
153 | 153 | repos = list(utils.get_filesystem_repos(str(tmpdir), recursive=True)) |
|
154 | 154 | assert repos == [('subdir/repo', ('git', tmpdir.join('subdir', 'repo')))] |
|
155 | 155 | |
|
156 | 156 | |
|
157 | 157 | def test_get_filesystem_repos_skips_names_starting_with_dot(tmpdir): |
|
158 | 158 | _stub_git_repo(tmpdir.ensure('.repo', dir=True)) |
|
159 | 159 | repos = list(utils.get_filesystem_repos(str(tmpdir))) |
|
160 | 160 | assert repos == [] |
|
161 | 161 | |
|
162 | 162 | |
|
163 | 163 | def test_get_filesystem_repos_skips_files(tmpdir): |
|
164 | 164 | tmpdir.ensure('test-file') |
|
165 | 165 | repos = list(utils.get_filesystem_repos(str(tmpdir))) |
|
166 | 166 | assert repos == [] |
|
167 | 167 | |
|
168 | 168 | |
|
169 | 169 | def test_get_filesystem_repos_skips_removed_repositories(tmpdir): |
|
170 | 170 | removed_repo_name = 'rm__00000000_000000_000000__.stub' |
|
171 | 171 | assert utils.REMOVED_REPO_PAT.match(removed_repo_name) |
|
172 | 172 | _stub_git_repo(tmpdir.ensure(removed_repo_name, dir=True)) |
|
173 | 173 | repos = list(utils.get_filesystem_repos(str(tmpdir))) |
|
174 | 174 | assert repos == [] |
|
175 | 175 | |
|
176 | 176 | |
|
177 | 177 | def _stub_git_repo(repo_path): |
|
178 | 178 | """ |
|
179 | 179 | Make `repo_path` look like a Git repository. |
|
180 | 180 | """ |
|
181 | 181 | repo_path.ensure('.git', dir=True) |
|
182 | 182 | |
|
183 | 183 | |
|
184 | 184 | @pytest.mark.parametrize('str_class', [str, unicode], ids=['str', 'unicode']) |
|
185 | 185 | def test_get_dirpaths_returns_all_paths(tmpdir, str_class): |
|
186 | 186 | tmpdir.ensure('test-file') |
|
187 | 187 | dirpaths = utils._get_dirpaths(str_class(tmpdir)) |
|
188 | 188 | assert dirpaths == ['test-file'] |
|
189 | 189 | |
|
190 | 190 | |
|
191 | 191 | def test_get_dirpaths_returns_all_paths_bytes( |
|
192 | 192 | tmpdir, platform_encodes_filenames): |
|
193 | 193 | if platform_encodes_filenames: |
|
194 | 194 | pytest.skip("This platform seems to encode filenames.") |
|
195 | 195 | tmpdir.ensure('repo-a-umlaut-\xe4') |
|
196 | 196 | dirpaths = utils._get_dirpaths(str(tmpdir)) |
|
197 | 197 | assert dirpaths == ['repo-a-umlaut-\xe4'] |
|
198 | 198 | |
|
199 | 199 | |
|
200 | 200 | def test_get_dirpaths_skips_paths_it_cannot_decode( |
|
201 | 201 | tmpdir, platform_encodes_filenames): |
|
202 | 202 | if platform_encodes_filenames: |
|
203 | 203 | pytest.skip("This platform seems to encode filenames.") |
|
204 | 204 | path_with_latin1 = 'repo-a-umlaut-\xe4' |
|
205 | 205 | tmpdir.ensure(path_with_latin1) |
|
206 | 206 | dirpaths = utils._get_dirpaths(unicode(tmpdir)) |
|
207 | 207 | assert dirpaths == [] |
|
208 | 208 | |
|
209 | 209 | |
|
210 | 210 | @pytest.fixture(scope='session') |
|
211 | 211 | def platform_encodes_filenames(): |
|
212 | 212 | """ |
|
213 | 213 | Boolean indicator if the current platform changes filename encodings. |
|
214 | 214 | """ |
|
215 | 215 | path_with_latin1 = 'repo-a-umlaut-\xe4' |
|
216 | 216 | tmpdir = py.path.local.mkdtemp() |
|
217 | 217 | tmpdir.ensure(path_with_latin1) |
|
218 | 218 | read_path = tmpdir.listdir()[0].basename |
|
219 | 219 | tmpdir.remove() |
|
220 | 220 | return path_with_latin1 != read_path |
|
221 | 221 | |
|
222 | 222 | |
|
223 | 223 | |
|
224 | 224 | |
|
225 | 225 | def test_repo2db_mapper_groups(repo_groups): |
|
226 | 226 | session = meta.Session() |
|
227 | 227 | zombie_group, parent_group, child_group = repo_groups |
|
228 | 228 | zombie_path = os.path.join( |
|
229 | 229 | RepoGroupModel().repos_path, zombie_group.full_path) |
|
230 | 230 | os.rmdir(zombie_path) |
|
231 | 231 | |
|
232 | 232 | # Avoid removing test repos when calling repo2db_mapper |
|
233 | 233 | repo_list = { |
|
234 | 234 | repo.repo_name: 'test' for repo in session.query(db.Repository).all() |
|
235 | 235 | } |
|
236 | 236 | utils.repo2db_mapper(repo_list, remove_obsolete=True) |
|
237 | 237 | |
|
238 | 238 | groups_in_db = session.query(db.RepoGroup).all() |
|
239 | 239 | assert child_group in groups_in_db |
|
240 | 240 | assert parent_group in groups_in_db |
|
241 | 241 | assert zombie_path not in groups_in_db |
|
242 | 242 | |
|
243 | 243 | |
|
244 | 244 | def test_repo2db_mapper_enables_largefiles(backend): |
|
245 | 245 | repo = backend.create_repo() |
|
246 | 246 | repo_list = {repo.repo_name: 'test'} |
|
247 | 247 | with mock.patch('rhodecode.model.db.Repository.scm_instance') as scm_mock: |
|
248 | 248 | with mock.patch.multiple('rhodecode.model.scm.ScmModel', |
|
249 | 249 | install_git_hook=mock.DEFAULT, |
|
250 | 250 | install_svn_hooks=mock.DEFAULT): |
|
251 | 251 | utils.repo2db_mapper(repo_list, remove_obsolete=False) |
|
252 | 252 | _, kwargs = scm_mock.call_args |
|
253 | 253 | assert kwargs['config'].get('extensions', 'largefiles') == '' |
|
254 | 254 | |
|
255 | 255 | |
|
256 | 256 | @pytest.mark.backends("git", "svn") |
|
257 | 257 | def test_repo2db_mapper_installs_hooks_for_repos_in_db(backend): |
|
258 | 258 | repo = backend.create_repo() |
|
259 | 259 | repo_list = {repo.repo_name: 'test'} |
|
260 | 260 | with mock.patch.object(ScmModel, 'install_hooks') as install_hooks_mock: |
|
261 | 261 | utils.repo2db_mapper(repo_list, remove_obsolete=False) |
|
262 | 262 | install_hooks_mock.assert_called_once_with( |
|
263 | 263 | repo.scm_instance(), repo_type=backend.alias) |
|
264 | 264 | |
|
265 | 265 | |
|
266 | 266 | @pytest.mark.backends("git", "svn") |
|
267 | 267 | def test_repo2db_mapper_installs_hooks_for_newly_added_repos(backend): |
|
268 | 268 | repo = backend.create_repo() |
|
269 | 269 | RepoModel().delete(repo, fs_remove=False) |
|
270 | 270 | meta.Session().commit() |
|
271 | 271 | repo_list = {repo.repo_name: repo.scm_instance()} |
|
272 | 272 | with mock.patch.object(ScmModel, 'install_hooks') as install_hooks_mock: |
|
273 | 273 | utils.repo2db_mapper(repo_list, remove_obsolete=False) |
|
274 | 274 | assert install_hooks_mock.call_count == 1 |
|
275 | 275 | install_hooks_args, _ = install_hooks_mock.call_args |
|
276 | 276 | assert install_hooks_args[0].name == repo.repo_name |
|
277 | 277 | |
|
278 | 278 | |
|
279 | 279 | class TestPasswordChanged(object): |
|
280 | 280 | def setup(self): |
|
281 | 281 | self.session = { |
|
282 | 282 | 'rhodecode_user': { |
|
283 | 283 | 'password': '0cc175b9c0f1b6a831c399e269772661' |
|
284 | 284 | } |
|
285 | 285 | } |
|
286 | 286 | self.auth_user = mock.Mock() |
|
287 | 287 | self.auth_user.userame = 'test' |
|
288 | 288 | self.auth_user.password = 'abc123' |
|
289 | 289 | |
|
290 | 290 | def test_returns_false_for_default_user(self): |
|
291 | 291 | self.auth_user.username = db.User.DEFAULT_USER |
|
292 | 292 | result = utils.password_changed(self.auth_user, self.session) |
|
293 | 293 | assert result is False |
|
294 | 294 | |
|
295 | 295 | def test_returns_false_if_password_was_not_changed(self): |
|
296 | 296 | self.session['rhodecode_user']['password'] = md5( |
|
297 | 297 | self.auth_user.password) |
|
298 | 298 | result = utils.password_changed(self.auth_user, self.session) |
|
299 | 299 | assert result is False |
|
300 | 300 | |
|
301 | 301 | def test_returns_true_if_password_was_changed(self): |
|
302 | 302 | result = utils.password_changed(self.auth_user, self.session) |
|
303 | 303 | assert result is True |
|
304 | 304 | |
|
305 | 305 | def test_returns_true_if_auth_user_password_is_empty(self): |
|
306 | 306 | self.auth_user.password = None |
|
307 | 307 | result = utils.password_changed(self.auth_user, self.session) |
|
308 | 308 | assert result is True |
|
309 | 309 | |
|
310 | 310 | def test_returns_true_if_session_password_is_empty(self): |
|
311 | 311 | self.session['rhodecode_user'].pop('password') |
|
312 | 312 | result = utils.password_changed(self.auth_user, self.session) |
|
313 | 313 | assert result is True |
|
314 | 314 | |
|
315 | 315 | |
|
316 | 316 | class TestReadOpensourceLicenses(object): |
|
317 | 317 | def test_success(self): |
|
318 | 318 | utils._license_cache = None |
|
319 | 319 | json_data = ''' |
|
320 | 320 | { |
|
321 | 321 | "python2.7-pytest-2.7.1": {"UNKNOWN": null}, |
|
322 | 322 | "python2.7-Markdown-2.6.2": { |
|
323 | 323 | "BSD-3-Clause": "http://spdx.org/licenses/BSD-3-Clause" |
|
324 | 324 | } |
|
325 | 325 | } |
|
326 | 326 | ''' |
|
327 | 327 | resource_string_patch = mock.patch.object( |
|
328 | 328 | utils.pkg_resources, 'resource_string', return_value=json_data) |
|
329 | 329 | with resource_string_patch: |
|
330 | 330 | result = utils.read_opensource_licenses() |
|
331 | 331 | assert result == json.loads(json_data) |
|
332 | 332 | |
|
333 | 333 | def test_caching(self): |
|
334 | 334 | utils._license_cache = { |
|
335 | 335 | "python2.7-pytest-2.7.1": { |
|
336 | 336 | "UNKNOWN": None |
|
337 | 337 | }, |
|
338 | 338 | "python2.7-Markdown-2.6.2": { |
|
339 | 339 | "BSD-3-Clause": "http://spdx.org/licenses/BSD-3-Clause" |
|
340 | 340 | } |
|
341 | 341 | } |
|
342 | 342 | resource_patch = mock.patch.object( |
|
343 | 343 | utils.pkg_resources, 'resource_string', side_effect=Exception) |
|
344 | 344 | json_patch = mock.patch.object( |
|
345 | 345 | utils.json, 'loads', side_effect=Exception) |
|
346 | 346 | |
|
347 | 347 | with resource_patch as resource_mock, json_patch as json_mock: |
|
348 | 348 | result = utils.read_opensource_licenses() |
|
349 | 349 | |
|
350 | 350 | assert resource_mock.call_count == 0 |
|
351 | 351 | assert json_mock.call_count == 0 |
|
352 | 352 | assert result == utils._license_cache |
|
353 | 353 | |
|
354 | 354 | def test_licenses_file_contains_no_unknown_licenses(self): |
|
355 | 355 | utils._license_cache = None |
|
356 | 356 | result = utils.read_opensource_licenses() |
|
357 | 357 | license_names = [] |
|
358 | 358 | for licenses in result.values(): |
|
359 | 359 | license_names.extend(licenses.keys()) |
|
360 | 360 | assert 'UNKNOWN' not in license_names |
|
361 | 361 | |
|
362 | 362 | |
|
363 | 363 | class TestMakeDbConfig(object): |
|
364 | 364 | def test_data_from_config_data_from_db_returned(self): |
|
365 | 365 | test_data = [ |
|
366 | 366 | ('section1', 'option1', 'value1'), |
|
367 | 367 | ('section2', 'option2', 'value2'), |
|
368 | 368 | ('section3', 'option3', 'value3'), |
|
369 | 369 | ] |
|
370 | 370 | with mock.patch.object(utils, 'config_data_from_db') as config_mock: |
|
371 | 371 | config_mock.return_value = test_data |
|
372 | 372 | kwargs = {'clear_session': False, 'repo': 'test_repo'} |
|
373 | 373 | result = utils.make_db_config(**kwargs) |
|
374 | 374 | config_mock.assert_called_once_with(**kwargs) |
|
375 | 375 | for section, option, expected_value in test_data: |
|
376 | 376 | value = result.get(section, option) |
|
377 | 377 | assert value == expected_value |
|
378 | 378 | |
|
379 | 379 | |
|
380 | 380 | class TestConfigDataFromDb(object): |
|
381 | 381 | def test_config_data_from_db_returns_active_settings(self): |
|
382 | 382 | test_data = [ |
|
383 | 383 | UiSetting('section1', 'option1', 'value1', True), |
|
384 | 384 | UiSetting('section2', 'option2', 'value2', True), |
|
385 | 385 | UiSetting('section3', 'option3', 'value3', False), |
|
386 | 386 | ] |
|
387 | 387 | repo_name = 'test_repo' |
|
388 | 388 | |
|
389 | 389 | model_patch = mock.patch.object(settings, 'VcsSettingsModel') |
|
390 | 390 | hooks_patch = mock.patch.object( |
|
391 | 391 | utils, 'get_enabled_hook_classes', |
|
392 | 392 | return_value=['pull', 'push', 'repo_size']) |
|
393 | 393 | with model_patch as model_mock, hooks_patch: |
|
394 | 394 | instance_mock = mock.Mock() |
|
395 | 395 | model_mock.return_value = instance_mock |
|
396 | 396 | instance_mock.get_ui_settings.return_value = test_data |
|
397 | 397 | result = utils.config_data_from_db( |
|
398 | 398 | clear_session=False, repo=repo_name) |
|
399 | 399 | |
|
400 | 400 | self._assert_repo_name_passed(model_mock, repo_name) |
|
401 | 401 | |
|
402 | 402 | expected_result = [ |
|
403 | 403 | ('section1', 'option1', 'value1'), |
|
404 | 404 | ('section2', 'option2', 'value2'), |
|
405 | 405 | ] |
|
406 | 406 | assert result == expected_result |
|
407 | 407 | |
|
408 | 408 | def _assert_repo_name_passed(self, model_mock, repo_name): |
|
409 | 409 | assert model_mock.call_count == 1 |
|
410 | 410 | call_args, call_kwargs = model_mock.call_args |
|
411 | 411 | assert call_kwargs['repo'] == repo_name |
|
412 | 412 | |
|
413 | 413 | |
|
414 | 414 | class TestIsDirWritable(object): |
|
415 | 415 | def test_returns_false_when_not_writable(self): |
|
416 | 416 | with mock.patch('__builtin__.open', side_effect=OSError): |
|
417 | 417 | assert not utils._is_dir_writable('/stub-path') |
|
418 | 418 | |
|
419 | 419 | def test_returns_true_when_writable(self, tmpdir): |
|
420 | 420 | assert utils._is_dir_writable(str(tmpdir)) |
|
421 | 421 | |
|
422 | 422 | def test_is_safe_against_race_conditions(self, tmpdir): |
|
423 | 423 | workers = multiprocessing.Pool() |
|
424 | 424 | directories = [str(tmpdir)] * 10 |
|
425 | 425 | workers.map(utils._is_dir_writable, directories) |
|
426 | 426 | |
|
427 | 427 | |
|
428 | 428 | class TestGetEnabledHooks(object): |
|
429 | 429 | def test_only_active_hooks_are_enabled(self): |
|
430 | 430 | ui_settings = [ |
|
431 | 431 | UiSetting('hooks', db.RhodeCodeUi.HOOK_PUSH, 'value', True), |
|
432 | 432 | UiSetting('hooks', db.RhodeCodeUi.HOOK_REPO_SIZE, 'value', True), |
|
433 | 433 | UiSetting('hooks', db.RhodeCodeUi.HOOK_PULL, 'value', False) |
|
434 | 434 | ] |
|
435 | 435 | result = utils.get_enabled_hook_classes(ui_settings) |
|
436 | 436 | assert result == ['push', 'repo_size'] |
|
437 | 437 | |
|
438 | 438 | def test_all_hooks_are_enabled(self): |
|
439 | 439 | ui_settings = [ |
|
440 | 440 | UiSetting('hooks', db.RhodeCodeUi.HOOK_PUSH, 'value', True), |
|
441 | 441 | UiSetting('hooks', db.RhodeCodeUi.HOOK_REPO_SIZE, 'value', True), |
|
442 | 442 | UiSetting('hooks', db.RhodeCodeUi.HOOK_PULL, 'value', True) |
|
443 | 443 | ] |
|
444 | 444 | result = utils.get_enabled_hook_classes(ui_settings) |
|
445 | 445 | assert result == ['push', 'repo_size', 'pull'] |
|
446 | 446 | |
|
447 | 447 | def test_no_enabled_hooks_when_no_hook_settings_are_found(self): |
|
448 | 448 | ui_settings = [] |
|
449 | 449 | result = utils.get_enabled_hook_classes(ui_settings) |
|
450 | 450 | assert result == [] |
|
451 | ||
|
452 | ||
|
453 | def test_obfuscate_url_pw(): | |
|
454 | from rhodecode.lib.utils2 import obfuscate_url_pw | |
|
455 | engine = u'/home/repos/malmö' | |
|
456 | assert obfuscate_url_pw(engine) No newline at end of file |
@@ -1,1837 +1,1836 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import collections |
|
22 | 22 | import datetime |
|
23 | 23 | import hashlib |
|
24 | 24 | import os |
|
25 | 25 | import re |
|
26 | 26 | import pprint |
|
27 | 27 | import shutil |
|
28 | 28 | import socket |
|
29 | 29 | import subprocess32 |
|
30 | 30 | import time |
|
31 | 31 | import uuid |
|
32 | 32 | import dateutil.tz |
|
33 | 33 | |
|
34 | 34 | import mock |
|
35 | 35 | import pyramid.testing |
|
36 | 36 | import pytest |
|
37 | 37 | import colander |
|
38 | 38 | import requests |
|
39 | 39 | |
|
40 | 40 | import rhodecode |
|
41 | 41 | from rhodecode.lib.utils2 import AttributeDict |
|
42 | 42 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
43 | 43 | from rhodecode.model.comment import CommentsModel |
|
44 | 44 | from rhodecode.model.db import ( |
|
45 | 45 | PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup, |
|
46 | 46 | UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi) |
|
47 | 47 | from rhodecode.model.meta import Session |
|
48 | 48 | from rhodecode.model.pull_request import PullRequestModel |
|
49 | 49 | from rhodecode.model.repo import RepoModel |
|
50 | 50 | from rhodecode.model.repo_group import RepoGroupModel |
|
51 | 51 | from rhodecode.model.user import UserModel |
|
52 | 52 | from rhodecode.model.settings import VcsSettingsModel |
|
53 | 53 | from rhodecode.model.user_group import UserGroupModel |
|
54 | 54 | from rhodecode.model.integration import IntegrationModel |
|
55 | 55 | from rhodecode.integrations import integration_type_registry |
|
56 | 56 | from rhodecode.integrations.types.base import IntegrationTypeBase |
|
57 | 57 | from rhodecode.lib.utils import repo2db_mapper |
|
58 | 58 | from rhodecode.lib.vcs import create_vcsserver_proxy |
|
59 | 59 | from rhodecode.lib.vcs.backends import get_backend |
|
60 | 60 | from rhodecode.lib.vcs.nodes import FileNode |
|
61 | 61 | from rhodecode.tests import ( |
|
62 | 62 | login_user_session, get_new_dir, utils, TESTS_TMP_PATH, |
|
63 | 63 | TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN, |
|
64 | 64 | TEST_USER_REGULAR_PASS) |
|
65 | 65 | from rhodecode.tests.utils import CustomTestApp, set_anonymous_access |
|
66 | 66 | from rhodecode.tests.fixture import Fixture |
|
67 | 67 | |
|
68 | 68 | |
|
69 | 69 | def _split_comma(value): |
|
70 | 70 | return value.split(',') |
|
71 | 71 | |
|
72 | 72 | |
|
73 | 73 | def pytest_addoption(parser): |
|
74 | 74 | parser.addoption( |
|
75 | 75 | '--keep-tmp-path', action='store_true', |
|
76 | 76 | help="Keep the test temporary directories") |
|
77 | 77 | parser.addoption( |
|
78 | 78 | '--backends', action='store', type=_split_comma, |
|
79 | 79 | default=['git', 'hg', 'svn'], |
|
80 | 80 | help="Select which backends to test for backend specific tests.") |
|
81 | 81 | parser.addoption( |
|
82 | 82 | '--dbs', action='store', type=_split_comma, |
|
83 | 83 | default=['sqlite'], |
|
84 | 84 | help="Select which database to test for database specific tests. " |
|
85 | 85 | "Possible options are sqlite,postgres,mysql") |
|
86 | 86 | parser.addoption( |
|
87 | 87 | '--appenlight', '--ae', action='store_true', |
|
88 | 88 | help="Track statistics in appenlight.") |
|
89 | 89 | parser.addoption( |
|
90 | 90 | '--appenlight-api-key', '--ae-key', |
|
91 | 91 | help="API key for Appenlight.") |
|
92 | 92 | parser.addoption( |
|
93 | 93 | '--appenlight-url', '--ae-url', |
|
94 | 94 | default="https://ae.rhodecode.com", |
|
95 | 95 | help="Appenlight service URL, defaults to https://ae.rhodecode.com") |
|
96 | 96 | parser.addoption( |
|
97 | 97 | '--sqlite-connection-string', action='store', |
|
98 | 98 | default='', help="Connection string for the dbs tests with SQLite") |
|
99 | 99 | parser.addoption( |
|
100 | 100 | '--postgres-connection-string', action='store', |
|
101 | 101 | default='', help="Connection string for the dbs tests with Postgres") |
|
102 | 102 | parser.addoption( |
|
103 | 103 | '--mysql-connection-string', action='store', |
|
104 | 104 | default='', help="Connection string for the dbs tests with MySQL") |
|
105 | 105 | parser.addoption( |
|
106 | 106 | '--repeat', type=int, default=100, |
|
107 | 107 | help="Number of repetitions in performance tests.") |
|
108 | 108 | |
|
109 | 109 | |
|
110 | 110 | def pytest_configure(config): |
|
111 | # Appy the kombu patch early on, needed for test discovery on Python 2.7.11 | |
|
112 | 111 | from rhodecode.config import patches |
|
113 | 112 | |
|
114 | 113 | |
|
115 | 114 | def pytest_collection_modifyitems(session, config, items): |
|
116 | 115 | # nottest marked, compare nose, used for transition from nose to pytest |
|
117 | 116 | remaining = [ |
|
118 | 117 | i for i in items if getattr(i.obj, '__test__', True)] |
|
119 | 118 | items[:] = remaining |
|
120 | 119 | |
|
121 | 120 | |
|
122 | 121 | def pytest_generate_tests(metafunc): |
|
123 | 122 | # Support test generation based on --backend parameter |
|
124 | 123 | if 'backend_alias' in metafunc.fixturenames: |
|
125 | 124 | backends = get_backends_from_metafunc(metafunc) |
|
126 | 125 | scope = None |
|
127 | 126 | if not backends: |
|
128 | 127 | pytest.skip("Not enabled for any of selected backends") |
|
129 | 128 | metafunc.parametrize('backend_alias', backends, scope=scope) |
|
130 | 129 | elif hasattr(metafunc.function, 'backends'): |
|
131 | 130 | backends = get_backends_from_metafunc(metafunc) |
|
132 | 131 | if not backends: |
|
133 | 132 | pytest.skip("Not enabled for any of selected backends") |
|
134 | 133 | |
|
135 | 134 | |
|
136 | 135 | def get_backends_from_metafunc(metafunc): |
|
137 | 136 | requested_backends = set(metafunc.config.getoption('--backends')) |
|
138 | 137 | if hasattr(metafunc.function, 'backends'): |
|
139 | 138 | # Supported backends by this test function, created from |
|
140 | 139 | # pytest.mark.backends |
|
141 | 140 | backends = metafunc.function.backends.args |
|
142 | 141 | elif hasattr(metafunc.cls, 'backend_alias'): |
|
143 | 142 | # Support class attribute "backend_alias", this is mainly |
|
144 | 143 | # for legacy reasons for tests not yet using pytest.mark.backends |
|
145 | 144 | backends = [metafunc.cls.backend_alias] |
|
146 | 145 | else: |
|
147 | 146 | backends = metafunc.config.getoption('--backends') |
|
148 | 147 | return requested_backends.intersection(backends) |
|
149 | 148 | |
|
150 | 149 | |
|
151 | 150 | @pytest.fixture(scope='session', autouse=True) |
|
152 | 151 | def activate_example_rcextensions(request): |
|
153 | 152 | """ |
|
154 | 153 | Patch in an example rcextensions module which verifies passed in kwargs. |
|
155 | 154 | """ |
|
156 | 155 | from rhodecode.tests.other import example_rcextensions |
|
157 | 156 | |
|
158 | 157 | old_extensions = rhodecode.EXTENSIONS |
|
159 | 158 | rhodecode.EXTENSIONS = example_rcextensions |
|
160 | 159 | |
|
161 | 160 | @request.addfinalizer |
|
162 | 161 | def cleanup(): |
|
163 | 162 | rhodecode.EXTENSIONS = old_extensions |
|
164 | 163 | |
|
165 | 164 | |
|
166 | 165 | @pytest.fixture |
|
167 | 166 | def capture_rcextensions(): |
|
168 | 167 | """ |
|
169 | 168 | Returns the recorded calls to entry points in rcextensions. |
|
170 | 169 | """ |
|
171 | 170 | calls = rhodecode.EXTENSIONS.calls |
|
172 | 171 | calls.clear() |
|
173 | 172 | # Note: At this moment, it is still the empty dict, but that will |
|
174 | 173 | # be filled during the test run and since it is a reference this |
|
175 | 174 | # is enough to make it work. |
|
176 | 175 | return calls |
|
177 | 176 | |
|
178 | 177 | |
|
179 | 178 | @pytest.fixture(scope='session') |
|
180 | 179 | def http_environ_session(): |
|
181 | 180 | """ |
|
182 | 181 | Allow to use "http_environ" in session scope. |
|
183 | 182 | """ |
|
184 | 183 | return http_environ( |
|
185 | 184 | http_host_stub=http_host_stub()) |
|
186 | 185 | |
|
187 | 186 | |
|
188 | 187 | @pytest.fixture |
|
189 | 188 | def http_host_stub(): |
|
190 | 189 | """ |
|
191 | 190 | Value of HTTP_HOST in the test run. |
|
192 | 191 | """ |
|
193 | 192 | return 'example.com:80' |
|
194 | 193 | |
|
195 | 194 | |
|
196 | 195 | @pytest.fixture |
|
197 | 196 | def http_host_only_stub(): |
|
198 | 197 | """ |
|
199 | 198 | Value of HTTP_HOST in the test run. |
|
200 | 199 | """ |
|
201 | 200 | return http_host_stub().split(':')[0] |
|
202 | 201 | |
|
203 | 202 | |
|
204 | 203 | @pytest.fixture |
|
205 | 204 | def http_environ(http_host_stub): |
|
206 | 205 | """ |
|
207 | 206 | HTTP extra environ keys. |
|
208 | 207 | |
|
209 | 208 | User by the test application and as well for setting up the pylons |
|
210 | 209 | environment. In the case of the fixture "app" it should be possible |
|
211 | 210 | to override this for a specific test case. |
|
212 | 211 | """ |
|
213 | 212 | return { |
|
214 | 213 | 'SERVER_NAME': http_host_only_stub(), |
|
215 | 214 | 'SERVER_PORT': http_host_stub.split(':')[1], |
|
216 | 215 | 'HTTP_HOST': http_host_stub, |
|
217 | 216 | 'HTTP_USER_AGENT': 'rc-test-agent', |
|
218 | 217 | 'REQUEST_METHOD': 'GET' |
|
219 | 218 | } |
|
220 | 219 | |
|
221 | 220 | |
|
222 | 221 | @pytest.fixture(scope='function') |
|
223 | 222 | def app(request, config_stub, baseapp, http_environ): |
|
224 | 223 | app = CustomTestApp( |
|
225 | 224 | baseapp, |
|
226 | 225 | extra_environ=http_environ) |
|
227 | 226 | if request.cls: |
|
228 | 227 | request.cls.app = app |
|
229 | 228 | return app |
|
230 | 229 | |
|
231 | 230 | |
|
232 | 231 | @pytest.fixture(scope='session') |
|
233 | 232 | def app_settings(baseapp, ini_config): |
|
234 | 233 | """ |
|
235 | 234 | Settings dictionary used to create the app. |
|
236 | 235 | |
|
237 | 236 | Parses the ini file and passes the result through the sanitize and apply |
|
238 | 237 | defaults mechanism in `rhodecode.config.middleware`. |
|
239 | 238 | """ |
|
240 | 239 | return baseapp.config.get_settings() |
|
241 | 240 | |
|
242 | 241 | |
|
243 | 242 | LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user')) |
|
244 | 243 | |
|
245 | 244 | |
|
246 | 245 | def _autologin_user(app, *args): |
|
247 | 246 | session = login_user_session(app, *args) |
|
248 | 247 | csrf_token = rhodecode.lib.auth.get_csrf_token(session) |
|
249 | 248 | return LoginData(csrf_token, session['rhodecode_user']) |
|
250 | 249 | |
|
251 | 250 | |
|
252 | 251 | @pytest.fixture |
|
253 | 252 | def autologin_user(app): |
|
254 | 253 | """ |
|
255 | 254 | Utility fixture which makes sure that the admin user is logged in |
|
256 | 255 | """ |
|
257 | 256 | return _autologin_user(app) |
|
258 | 257 | |
|
259 | 258 | |
|
260 | 259 | @pytest.fixture |
|
261 | 260 | def autologin_regular_user(app): |
|
262 | 261 | """ |
|
263 | 262 | Utility fixture which makes sure that the regular user is logged in |
|
264 | 263 | """ |
|
265 | 264 | return _autologin_user( |
|
266 | 265 | app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) |
|
267 | 266 | |
|
268 | 267 | |
|
269 | 268 | @pytest.fixture(scope='function') |
|
270 | 269 | def csrf_token(request, autologin_user): |
|
271 | 270 | return autologin_user.csrf_token |
|
272 | 271 | |
|
273 | 272 | |
|
274 | 273 | @pytest.fixture(scope='function') |
|
275 | 274 | def xhr_header(request): |
|
276 | 275 | return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'} |
|
277 | 276 | |
|
278 | 277 | |
|
279 | 278 | @pytest.fixture |
|
280 | 279 | def real_crypto_backend(monkeypatch): |
|
281 | 280 | """ |
|
282 | 281 | Switch the production crypto backend on for this test. |
|
283 | 282 | |
|
284 | 283 | During the test run the crypto backend is replaced with a faster |
|
285 | 284 | implementation based on the MD5 algorithm. |
|
286 | 285 | """ |
|
287 | 286 | monkeypatch.setattr(rhodecode, 'is_test', False) |
|
288 | 287 | |
|
289 | 288 | |
|
290 | 289 | @pytest.fixture(scope='class') |
|
291 | 290 | def index_location(request, baseapp): |
|
292 | 291 | index_location = baseapp.config.get_settings()['search.location'] |
|
293 | 292 | if request.cls: |
|
294 | 293 | request.cls.index_location = index_location |
|
295 | 294 | return index_location |
|
296 | 295 | |
|
297 | 296 | |
|
298 | 297 | @pytest.fixture(scope='session', autouse=True) |
|
299 | 298 | def tests_tmp_path(request): |
|
300 | 299 | """ |
|
301 | 300 | Create temporary directory to be used during the test session. |
|
302 | 301 | """ |
|
303 | 302 | if not os.path.exists(TESTS_TMP_PATH): |
|
304 | 303 | os.makedirs(TESTS_TMP_PATH) |
|
305 | 304 | |
|
306 | 305 | if not request.config.getoption('--keep-tmp-path'): |
|
307 | 306 | @request.addfinalizer |
|
308 | 307 | def remove_tmp_path(): |
|
309 | 308 | shutil.rmtree(TESTS_TMP_PATH) |
|
310 | 309 | |
|
311 | 310 | return TESTS_TMP_PATH |
|
312 | 311 | |
|
313 | 312 | |
|
314 | 313 | @pytest.fixture |
|
315 | 314 | def test_repo_group(request): |
|
316 | 315 | """ |
|
317 | 316 | Create a temporary repository group, and destroy it after |
|
318 | 317 | usage automatically |
|
319 | 318 | """ |
|
320 | 319 | fixture = Fixture() |
|
321 | 320 | repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '') |
|
322 | 321 | repo_group = fixture.create_repo_group(repogroupid) |
|
323 | 322 | |
|
324 | 323 | def _cleanup(): |
|
325 | 324 | fixture.destroy_repo_group(repogroupid) |
|
326 | 325 | |
|
327 | 326 | request.addfinalizer(_cleanup) |
|
328 | 327 | return repo_group |
|
329 | 328 | |
|
330 | 329 | |
|
331 | 330 | @pytest.fixture |
|
332 | 331 | def test_user_group(request): |
|
333 | 332 | """ |
|
334 | 333 | Create a temporary user group, and destroy it after |
|
335 | 334 | usage automatically |
|
336 | 335 | """ |
|
337 | 336 | fixture = Fixture() |
|
338 | 337 | usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '') |
|
339 | 338 | user_group = fixture.create_user_group(usergroupid) |
|
340 | 339 | |
|
341 | 340 | def _cleanup(): |
|
342 | 341 | fixture.destroy_user_group(user_group) |
|
343 | 342 | |
|
344 | 343 | request.addfinalizer(_cleanup) |
|
345 | 344 | return user_group |
|
346 | 345 | |
|
347 | 346 | |
|
348 | 347 | @pytest.fixture(scope='session') |
|
349 | 348 | def test_repo(request): |
|
350 | 349 | container = TestRepoContainer() |
|
351 | 350 | request.addfinalizer(container._cleanup) |
|
352 | 351 | return container |
|
353 | 352 | |
|
354 | 353 | |
|
355 | 354 | class TestRepoContainer(object): |
|
356 | 355 | """ |
|
357 | 356 | Container for test repositories which are used read only. |
|
358 | 357 | |
|
359 | 358 | Repositories will be created on demand and re-used during the lifetime |
|
360 | 359 | of this object. |
|
361 | 360 | |
|
362 | 361 | Usage to get the svn test repository "minimal":: |
|
363 | 362 | |
|
364 | 363 | test_repo = TestContainer() |
|
365 | 364 | repo = test_repo('minimal', 'svn') |
|
366 | 365 | |
|
367 | 366 | """ |
|
368 | 367 | |
|
369 | 368 | dump_extractors = { |
|
370 | 369 | 'git': utils.extract_git_repo_from_dump, |
|
371 | 370 | 'hg': utils.extract_hg_repo_from_dump, |
|
372 | 371 | 'svn': utils.extract_svn_repo_from_dump, |
|
373 | 372 | } |
|
374 | 373 | |
|
375 | 374 | def __init__(self): |
|
376 | 375 | self._cleanup_repos = [] |
|
377 | 376 | self._fixture = Fixture() |
|
378 | 377 | self._repos = {} |
|
379 | 378 | |
|
380 | 379 | def __call__(self, dump_name, backend_alias, config=None): |
|
381 | 380 | key = (dump_name, backend_alias) |
|
382 | 381 | if key not in self._repos: |
|
383 | 382 | repo = self._create_repo(dump_name, backend_alias, config) |
|
384 | 383 | self._repos[key] = repo.repo_id |
|
385 | 384 | return Repository.get(self._repos[key]) |
|
386 | 385 | |
|
387 | 386 | def _create_repo(self, dump_name, backend_alias, config): |
|
388 | 387 | repo_name = '%s-%s' % (backend_alias, dump_name) |
|
389 | 388 | backend_class = get_backend(backend_alias) |
|
390 | 389 | dump_extractor = self.dump_extractors[backend_alias] |
|
391 | 390 | repo_path = dump_extractor(dump_name, repo_name) |
|
392 | 391 | |
|
393 | 392 | vcs_repo = backend_class(repo_path, config=config) |
|
394 | 393 | repo2db_mapper({repo_name: vcs_repo}) |
|
395 | 394 | |
|
396 | 395 | repo = RepoModel().get_by_repo_name(repo_name) |
|
397 | 396 | self._cleanup_repos.append(repo_name) |
|
398 | 397 | return repo |
|
399 | 398 | |
|
400 | 399 | def _cleanup(self): |
|
401 | 400 | for repo_name in reversed(self._cleanup_repos): |
|
402 | 401 | self._fixture.destroy_repo(repo_name) |
|
403 | 402 | |
|
404 | 403 | |
|
405 | 404 | @pytest.fixture |
|
406 | 405 | def backend(request, backend_alias, baseapp, test_repo): |
|
407 | 406 | """ |
|
408 | 407 | Parametrized fixture which represents a single backend implementation. |
|
409 | 408 | |
|
410 | 409 | It respects the option `--backends` to focus the test run on specific |
|
411 | 410 | backend implementations. |
|
412 | 411 | |
|
413 | 412 | It also supports `pytest.mark.xfail_backends` to mark tests as failing |
|
414 | 413 | for specific backends. This is intended as a utility for incremental |
|
415 | 414 | development of a new backend implementation. |
|
416 | 415 | """ |
|
417 | 416 | if backend_alias not in request.config.getoption('--backends'): |
|
418 | 417 | pytest.skip("Backend %s not selected." % (backend_alias, )) |
|
419 | 418 | |
|
420 | 419 | utils.check_xfail_backends(request.node, backend_alias) |
|
421 | 420 | utils.check_skip_backends(request.node, backend_alias) |
|
422 | 421 | |
|
423 | 422 | repo_name = 'vcs_test_%s' % (backend_alias, ) |
|
424 | 423 | backend = Backend( |
|
425 | 424 | alias=backend_alias, |
|
426 | 425 | repo_name=repo_name, |
|
427 | 426 | test_name=request.node.name, |
|
428 | 427 | test_repo_container=test_repo) |
|
429 | 428 | request.addfinalizer(backend.cleanup) |
|
430 | 429 | return backend |
|
431 | 430 | |
|
432 | 431 | |
|
433 | 432 | @pytest.fixture |
|
434 | 433 | def backend_git(request, baseapp, test_repo): |
|
435 | 434 | return backend(request, 'git', baseapp, test_repo) |
|
436 | 435 | |
|
437 | 436 | |
|
438 | 437 | @pytest.fixture |
|
439 | 438 | def backend_hg(request, baseapp, test_repo): |
|
440 | 439 | return backend(request, 'hg', baseapp, test_repo) |
|
441 | 440 | |
|
442 | 441 | |
|
443 | 442 | @pytest.fixture |
|
444 | 443 | def backend_svn(request, baseapp, test_repo): |
|
445 | 444 | return backend(request, 'svn', baseapp, test_repo) |
|
446 | 445 | |
|
447 | 446 | |
|
448 | 447 | @pytest.fixture |
|
449 | 448 | def backend_random(backend_git): |
|
450 | 449 | """ |
|
451 | 450 | Use this to express that your tests need "a backend. |
|
452 | 451 | |
|
453 | 452 | A few of our tests need a backend, so that we can run the code. This |
|
454 | 453 | fixture is intended to be used for such cases. It will pick one of the |
|
455 | 454 | backends and run the tests. |
|
456 | 455 | |
|
457 | 456 | The fixture `backend` would run the test multiple times for each |
|
458 | 457 | available backend which is a pure waste of time if the test is |
|
459 | 458 | independent of the backend type. |
|
460 | 459 | """ |
|
461 | 460 | # TODO: johbo: Change this to pick a random backend |
|
462 | 461 | return backend_git |
|
463 | 462 | |
|
464 | 463 | |
|
465 | 464 | @pytest.fixture |
|
466 | 465 | def backend_stub(backend_git): |
|
467 | 466 | """ |
|
468 | 467 | Use this to express that your tests need a backend stub |
|
469 | 468 | |
|
470 | 469 | TODO: mikhail: Implement a real stub logic instead of returning |
|
471 | 470 | a git backend |
|
472 | 471 | """ |
|
473 | 472 | return backend_git |
|
474 | 473 | |
|
475 | 474 | |
|
476 | 475 | @pytest.fixture |
|
477 | 476 | def repo_stub(backend_stub): |
|
478 | 477 | """ |
|
479 | 478 | Use this to express that your tests need a repository stub |
|
480 | 479 | """ |
|
481 | 480 | return backend_stub.create_repo() |
|
482 | 481 | |
|
483 | 482 | |
|
484 | 483 | class Backend(object): |
|
485 | 484 | """ |
|
486 | 485 | Represents the test configuration for one supported backend |
|
487 | 486 | |
|
488 | 487 | Provides easy access to different test repositories based on |
|
489 | 488 | `__getitem__`. Such repositories will only be created once per test |
|
490 | 489 | session. |
|
491 | 490 | """ |
|
492 | 491 | |
|
493 | 492 | invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+') |
|
494 | 493 | _master_repo = None |
|
495 | 494 | _commit_ids = {} |
|
496 | 495 | |
|
497 | 496 | def __init__(self, alias, repo_name, test_name, test_repo_container): |
|
498 | 497 | self.alias = alias |
|
499 | 498 | self.repo_name = repo_name |
|
500 | 499 | self._cleanup_repos = [] |
|
501 | 500 | self._test_name = test_name |
|
502 | 501 | self._test_repo_container = test_repo_container |
|
503 | 502 | # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or |
|
504 | 503 | # Fixture will survive in the end. |
|
505 | 504 | self._fixture = Fixture() |
|
506 | 505 | |
|
507 | 506 | def __getitem__(self, key): |
|
508 | 507 | return self._test_repo_container(key, self.alias) |
|
509 | 508 | |
|
510 | 509 | def create_test_repo(self, key, config=None): |
|
511 | 510 | return self._test_repo_container(key, self.alias, config) |
|
512 | 511 | |
|
513 | 512 | @property |
|
514 | 513 | def repo(self): |
|
515 | 514 | """ |
|
516 | 515 | Returns the "current" repository. This is the vcs_test repo or the |
|
517 | 516 | last repo which has been created with `create_repo`. |
|
518 | 517 | """ |
|
519 | 518 | from rhodecode.model.db import Repository |
|
520 | 519 | return Repository.get_by_repo_name(self.repo_name) |
|
521 | 520 | |
|
522 | 521 | @property |
|
523 | 522 | def default_branch_name(self): |
|
524 | 523 | VcsRepository = get_backend(self.alias) |
|
525 | 524 | return VcsRepository.DEFAULT_BRANCH_NAME |
|
526 | 525 | |
|
527 | 526 | @property |
|
528 | 527 | def default_head_id(self): |
|
529 | 528 | """ |
|
530 | 529 | Returns the default head id of the underlying backend. |
|
531 | 530 | |
|
532 | 531 | This will be the default branch name in case the backend does have a |
|
533 | 532 | default branch. In the other cases it will point to a valid head |
|
534 | 533 | which can serve as the base to create a new commit on top of it. |
|
535 | 534 | """ |
|
536 | 535 | vcsrepo = self.repo.scm_instance() |
|
537 | 536 | head_id = ( |
|
538 | 537 | vcsrepo.DEFAULT_BRANCH_NAME or |
|
539 | 538 | vcsrepo.commit_ids[-1]) |
|
540 | 539 | return head_id |
|
541 | 540 | |
|
542 | 541 | @property |
|
543 | 542 | def commit_ids(self): |
|
544 | 543 | """ |
|
545 | 544 | Returns the list of commits for the last created repository |
|
546 | 545 | """ |
|
547 | 546 | return self._commit_ids |
|
548 | 547 | |
|
549 | 548 | def create_master_repo(self, commits): |
|
550 | 549 | """ |
|
551 | 550 | Create a repository and remember it as a template. |
|
552 | 551 | |
|
553 | 552 | This allows to easily create derived repositories to construct |
|
554 | 553 | more complex scenarios for diff, compare and pull requests. |
|
555 | 554 | |
|
556 | 555 | Returns a commit map which maps from commit message to raw_id. |
|
557 | 556 | """ |
|
558 | 557 | self._master_repo = self.create_repo(commits=commits) |
|
559 | 558 | return self._commit_ids |
|
560 | 559 | |
|
561 | 560 | def create_repo( |
|
562 | 561 | self, commits=None, number_of_commits=0, heads=None, |
|
563 | 562 | name_suffix=u'', **kwargs): |
|
564 | 563 | """ |
|
565 | 564 | Create a repository and record it for later cleanup. |
|
566 | 565 | |
|
567 | 566 | :param commits: Optional. A sequence of dict instances. |
|
568 | 567 | Will add a commit per entry to the new repository. |
|
569 | 568 | :param number_of_commits: Optional. If set to a number, this number of |
|
570 | 569 | commits will be added to the new repository. |
|
571 | 570 | :param heads: Optional. Can be set to a sequence of of commit |
|
572 | 571 | names which shall be pulled in from the master repository. |
|
573 | 572 | |
|
574 | 573 | """ |
|
575 | 574 | self.repo_name = self._next_repo_name() + name_suffix |
|
576 | 575 | repo = self._fixture.create_repo( |
|
577 | 576 | self.repo_name, repo_type=self.alias, **kwargs) |
|
578 | 577 | self._cleanup_repos.append(repo.repo_name) |
|
579 | 578 | |
|
580 | 579 | commits = commits or [ |
|
581 | 580 | {'message': 'Commit %s of %s' % (x, self.repo_name)} |
|
582 | 581 | for x in xrange(number_of_commits)] |
|
583 | 582 | self._add_commits_to_repo(repo.scm_instance(), commits) |
|
584 | 583 | if heads: |
|
585 | 584 | self.pull_heads(repo, heads) |
|
586 | 585 | |
|
587 | 586 | return repo |
|
588 | 587 | |
|
589 | 588 | def pull_heads(self, repo, heads): |
|
590 | 589 | """ |
|
591 | 590 | Make sure that repo contains all commits mentioned in `heads` |
|
592 | 591 | """ |
|
593 | 592 | vcsmaster = self._master_repo.scm_instance() |
|
594 | 593 | vcsrepo = repo.scm_instance() |
|
595 | 594 | vcsrepo.config.clear_section('hooks') |
|
596 | 595 | commit_ids = [self._commit_ids[h] for h in heads] |
|
597 | 596 | vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids) |
|
598 | 597 | |
|
599 | 598 | def create_fork(self): |
|
600 | 599 | repo_to_fork = self.repo_name |
|
601 | 600 | self.repo_name = self._next_repo_name() |
|
602 | 601 | repo = self._fixture.create_fork(repo_to_fork, self.repo_name) |
|
603 | 602 | self._cleanup_repos.append(self.repo_name) |
|
604 | 603 | return repo |
|
605 | 604 | |
|
606 | 605 | def new_repo_name(self, suffix=u''): |
|
607 | 606 | self.repo_name = self._next_repo_name() + suffix |
|
608 | 607 | self._cleanup_repos.append(self.repo_name) |
|
609 | 608 | return self.repo_name |
|
610 | 609 | |
|
611 | 610 | def _next_repo_name(self): |
|
612 | 611 | return u"%s_%s" % ( |
|
613 | 612 | self.invalid_repo_name.sub(u'_', self._test_name), |
|
614 | 613 | len(self._cleanup_repos)) |
|
615 | 614 | |
|
616 | 615 | def ensure_file(self, filename, content='Test content\n'): |
|
617 | 616 | assert self._cleanup_repos, "Avoid writing into vcs_test repos" |
|
618 | 617 | commits = [ |
|
619 | 618 | {'added': [ |
|
620 | 619 | FileNode(filename, content=content), |
|
621 | 620 | ]}, |
|
622 | 621 | ] |
|
623 | 622 | self._add_commits_to_repo(self.repo.scm_instance(), commits) |
|
624 | 623 | |
|
625 | 624 | def enable_downloads(self): |
|
626 | 625 | repo = self.repo |
|
627 | 626 | repo.enable_downloads = True |
|
628 | 627 | Session().add(repo) |
|
629 | 628 | Session().commit() |
|
630 | 629 | |
|
631 | 630 | def cleanup(self): |
|
632 | 631 | for repo_name in reversed(self._cleanup_repos): |
|
633 | 632 | self._fixture.destroy_repo(repo_name) |
|
634 | 633 | |
|
635 | 634 | def _add_commits_to_repo(self, repo, commits): |
|
636 | 635 | commit_ids = _add_commits_to_repo(repo, commits) |
|
637 | 636 | if not commit_ids: |
|
638 | 637 | return |
|
639 | 638 | self._commit_ids = commit_ids |
|
640 | 639 | |
|
641 | 640 | # Creating refs for Git to allow fetching them from remote repository |
|
642 | 641 | if self.alias == 'git': |
|
643 | 642 | refs = {} |
|
644 | 643 | for message in self._commit_ids: |
|
645 | 644 | # TODO: mikhail: do more special chars replacements |
|
646 | 645 | ref_name = 'refs/test-refs/{}'.format( |
|
647 | 646 | message.replace(' ', '')) |
|
648 | 647 | refs[ref_name] = self._commit_ids[message] |
|
649 | 648 | self._create_refs(repo, refs) |
|
650 | 649 | |
|
651 | 650 | def _create_refs(self, repo, refs): |
|
652 | 651 | for ref_name in refs: |
|
653 | 652 | repo.set_refs(ref_name, refs[ref_name]) |
|
654 | 653 | |
|
655 | 654 | |
|
656 | 655 | @pytest.fixture |
|
657 | 656 | def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo): |
|
658 | 657 | """ |
|
659 | 658 | Parametrized fixture which represents a single vcs backend implementation. |
|
660 | 659 | |
|
661 | 660 | See the fixture `backend` for more details. This one implements the same |
|
662 | 661 | concept, but on vcs level. So it does not provide model instances etc. |
|
663 | 662 | |
|
664 | 663 | Parameters are generated dynamically, see :func:`pytest_generate_tests` |
|
665 | 664 | for how this works. |
|
666 | 665 | """ |
|
667 | 666 | if backend_alias not in request.config.getoption('--backends'): |
|
668 | 667 | pytest.skip("Backend %s not selected." % (backend_alias, )) |
|
669 | 668 | |
|
670 | 669 | utils.check_xfail_backends(request.node, backend_alias) |
|
671 | 670 | utils.check_skip_backends(request.node, backend_alias) |
|
672 | 671 | |
|
673 | 672 | repo_name = 'vcs_test_%s' % (backend_alias, ) |
|
674 | 673 | repo_path = os.path.join(tests_tmp_path, repo_name) |
|
675 | 674 | backend = VcsBackend( |
|
676 | 675 | alias=backend_alias, |
|
677 | 676 | repo_path=repo_path, |
|
678 | 677 | test_name=request.node.name, |
|
679 | 678 | test_repo_container=test_repo) |
|
680 | 679 | request.addfinalizer(backend.cleanup) |
|
681 | 680 | return backend |
|
682 | 681 | |
|
683 | 682 | |
|
684 | 683 | @pytest.fixture |
|
685 | 684 | def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo): |
|
686 | 685 | return vcsbackend(request, 'git', tests_tmp_path, baseapp, test_repo) |
|
687 | 686 | |
|
688 | 687 | |
|
689 | 688 | @pytest.fixture |
|
690 | 689 | def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo): |
|
691 | 690 | return vcsbackend(request, 'hg', tests_tmp_path, baseapp, test_repo) |
|
692 | 691 | |
|
693 | 692 | |
|
694 | 693 | @pytest.fixture |
|
695 | 694 | def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo): |
|
696 | 695 | return vcsbackend(request, 'svn', tests_tmp_path, baseapp, test_repo) |
|
697 | 696 | |
|
698 | 697 | |
|
699 | 698 | @pytest.fixture |
|
700 | 699 | def vcsbackend_random(vcsbackend_git): |
|
701 | 700 | """ |
|
702 | 701 | Use this to express that your tests need "a vcsbackend". |
|
703 | 702 | |
|
704 | 703 | The fixture `vcsbackend` would run the test multiple times for each |
|
705 | 704 | available vcs backend which is a pure waste of time if the test is |
|
706 | 705 | independent of the vcs backend type. |
|
707 | 706 | """ |
|
708 | 707 | # TODO: johbo: Change this to pick a random backend |
|
709 | 708 | return vcsbackend_git |
|
710 | 709 | |
|
711 | 710 | |
|
712 | 711 | @pytest.fixture |
|
713 | 712 | def vcsbackend_stub(vcsbackend_git): |
|
714 | 713 | """ |
|
715 | 714 | Use this to express that your test just needs a stub of a vcsbackend. |
|
716 | 715 | |
|
717 | 716 | Plan is to eventually implement an in-memory stub to speed tests up. |
|
718 | 717 | """ |
|
719 | 718 | return vcsbackend_git |
|
720 | 719 | |
|
721 | 720 | |
|
722 | 721 | class VcsBackend(object): |
|
723 | 722 | """ |
|
724 | 723 | Represents the test configuration for one supported vcs backend. |
|
725 | 724 | """ |
|
726 | 725 | |
|
727 | 726 | invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+') |
|
728 | 727 | |
|
729 | 728 | def __init__(self, alias, repo_path, test_name, test_repo_container): |
|
730 | 729 | self.alias = alias |
|
731 | 730 | self._repo_path = repo_path |
|
732 | 731 | self._cleanup_repos = [] |
|
733 | 732 | self._test_name = test_name |
|
734 | 733 | self._test_repo_container = test_repo_container |
|
735 | 734 | |
|
736 | 735 | def __getitem__(self, key): |
|
737 | 736 | return self._test_repo_container(key, self.alias).scm_instance() |
|
738 | 737 | |
|
739 | 738 | @property |
|
740 | 739 | def repo(self): |
|
741 | 740 | """ |
|
742 | 741 | Returns the "current" repository. This is the vcs_test repo of the last |
|
743 | 742 | repo which has been created. |
|
744 | 743 | """ |
|
745 | 744 | Repository = get_backend(self.alias) |
|
746 | 745 | return Repository(self._repo_path) |
|
747 | 746 | |
|
748 | 747 | @property |
|
749 | 748 | def backend(self): |
|
750 | 749 | """ |
|
751 | 750 | Returns the backend implementation class. |
|
752 | 751 | """ |
|
753 | 752 | return get_backend(self.alias) |
|
754 | 753 | |
|
755 | 754 | def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None): |
|
756 | 755 | repo_name = self._next_repo_name() |
|
757 | 756 | self._repo_path = get_new_dir(repo_name) |
|
758 | 757 | repo_class = get_backend(self.alias) |
|
759 | 758 | src_url = None |
|
760 | 759 | if _clone_repo: |
|
761 | 760 | src_url = _clone_repo.path |
|
762 | 761 | repo = repo_class(self._repo_path, create=True, src_url=src_url) |
|
763 | 762 | self._cleanup_repos.append(repo) |
|
764 | 763 | |
|
765 | 764 | commits = commits or [ |
|
766 | 765 | {'message': 'Commit %s of %s' % (x, repo_name)} |
|
767 | 766 | for x in xrange(number_of_commits)] |
|
768 | 767 | _add_commits_to_repo(repo, commits) |
|
769 | 768 | return repo |
|
770 | 769 | |
|
771 | 770 | def clone_repo(self, repo): |
|
772 | 771 | return self.create_repo(_clone_repo=repo) |
|
773 | 772 | |
|
774 | 773 | def cleanup(self): |
|
775 | 774 | for repo in self._cleanup_repos: |
|
776 | 775 | shutil.rmtree(repo.path) |
|
777 | 776 | |
|
778 | 777 | def new_repo_path(self): |
|
779 | 778 | repo_name = self._next_repo_name() |
|
780 | 779 | self._repo_path = get_new_dir(repo_name) |
|
781 | 780 | return self._repo_path |
|
782 | 781 | |
|
783 | 782 | def _next_repo_name(self): |
|
784 | 783 | return "%s_%s" % ( |
|
785 | 784 | self.invalid_repo_name.sub('_', self._test_name), |
|
786 | 785 | len(self._cleanup_repos)) |
|
787 | 786 | |
|
788 | 787 | def add_file(self, repo, filename, content='Test content\n'): |
|
789 | 788 | imc = repo.in_memory_commit |
|
790 | 789 | imc.add(FileNode(filename, content=content)) |
|
791 | 790 | imc.commit( |
|
792 | 791 | message=u'Automatic commit from vcsbackend fixture', |
|
793 | 792 | author=u'Automatic') |
|
794 | 793 | |
|
795 | 794 | def ensure_file(self, filename, content='Test content\n'): |
|
796 | 795 | assert self._cleanup_repos, "Avoid writing into vcs_test repos" |
|
797 | 796 | self.add_file(self.repo, filename, content) |
|
798 | 797 | |
|
799 | 798 | |
|
800 | 799 | def _add_commits_to_repo(vcs_repo, commits): |
|
801 | 800 | commit_ids = {} |
|
802 | 801 | if not commits: |
|
803 | 802 | return commit_ids |
|
804 | 803 | |
|
805 | 804 | imc = vcs_repo.in_memory_commit |
|
806 | 805 | commit = None |
|
807 | 806 | |
|
808 | 807 | for idx, commit in enumerate(commits): |
|
809 | 808 | message = unicode(commit.get('message', 'Commit %s' % idx)) |
|
810 | 809 | |
|
811 | 810 | for node in commit.get('added', []): |
|
812 | 811 | imc.add(FileNode(node.path, content=node.content)) |
|
813 | 812 | for node in commit.get('changed', []): |
|
814 | 813 | imc.change(FileNode(node.path, content=node.content)) |
|
815 | 814 | for node in commit.get('removed', []): |
|
816 | 815 | imc.remove(FileNode(node.path)) |
|
817 | 816 | |
|
818 | 817 | parents = [ |
|
819 | 818 | vcs_repo.get_commit(commit_id=commit_ids[p]) |
|
820 | 819 | for p in commit.get('parents', [])] |
|
821 | 820 | |
|
822 | 821 | operations = ('added', 'changed', 'removed') |
|
823 | 822 | if not any((commit.get(o) for o in operations)): |
|
824 | 823 | imc.add(FileNode('file_%s' % idx, content=message)) |
|
825 | 824 | |
|
826 | 825 | commit = imc.commit( |
|
827 | 826 | message=message, |
|
828 | 827 | author=unicode(commit.get('author', 'Automatic')), |
|
829 | 828 | date=commit.get('date'), |
|
830 | 829 | branch=commit.get('branch'), |
|
831 | 830 | parents=parents) |
|
832 | 831 | |
|
833 | 832 | commit_ids[commit.message] = commit.raw_id |
|
834 | 833 | |
|
835 | 834 | return commit_ids |
|
836 | 835 | |
|
837 | 836 | |
|
838 | 837 | @pytest.fixture |
|
839 | 838 | def reposerver(request): |
|
840 | 839 | """ |
|
841 | 840 | Allows to serve a backend repository |
|
842 | 841 | """ |
|
843 | 842 | |
|
844 | 843 | repo_server = RepoServer() |
|
845 | 844 | request.addfinalizer(repo_server.cleanup) |
|
846 | 845 | return repo_server |
|
847 | 846 | |
|
848 | 847 | |
|
849 | 848 | class RepoServer(object): |
|
850 | 849 | """ |
|
851 | 850 | Utility to serve a local repository for the duration of a test case. |
|
852 | 851 | |
|
853 | 852 | Supports only Subversion so far. |
|
854 | 853 | """ |
|
855 | 854 | |
|
856 | 855 | url = None |
|
857 | 856 | |
|
858 | 857 | def __init__(self): |
|
859 | 858 | self._cleanup_servers = [] |
|
860 | 859 | |
|
861 | 860 | def serve(self, vcsrepo): |
|
862 | 861 | if vcsrepo.alias != 'svn': |
|
863 | 862 | raise TypeError("Backend %s not supported" % vcsrepo.alias) |
|
864 | 863 | |
|
865 | 864 | proc = subprocess32.Popen( |
|
866 | 865 | ['svnserve', '-d', '--foreground', '--listen-host', 'localhost', |
|
867 | 866 | '--root', vcsrepo.path]) |
|
868 | 867 | self._cleanup_servers.append(proc) |
|
869 | 868 | self.url = 'svn://localhost' |
|
870 | 869 | |
|
871 | 870 | def cleanup(self): |
|
872 | 871 | for proc in self._cleanup_servers: |
|
873 | 872 | proc.terminate() |
|
874 | 873 | |
|
875 | 874 | |
|
876 | 875 | @pytest.fixture |
|
877 | 876 | def pr_util(backend, request, config_stub): |
|
878 | 877 | """ |
|
879 | 878 | Utility for tests of models and for functional tests around pull requests. |
|
880 | 879 | |
|
881 | 880 | It gives an instance of :class:`PRTestUtility` which provides various |
|
882 | 881 | utility methods around one pull request. |
|
883 | 882 | |
|
884 | 883 | This fixture uses `backend` and inherits its parameterization. |
|
885 | 884 | """ |
|
886 | 885 | |
|
887 | 886 | util = PRTestUtility(backend) |
|
888 | 887 | request.addfinalizer(util.cleanup) |
|
889 | 888 | |
|
890 | 889 | return util |
|
891 | 890 | |
|
892 | 891 | |
|
893 | 892 | class PRTestUtility(object): |
|
894 | 893 | |
|
895 | 894 | pull_request = None |
|
896 | 895 | pull_request_id = None |
|
897 | 896 | mergeable_patcher = None |
|
898 | 897 | mergeable_mock = None |
|
899 | 898 | notification_patcher = None |
|
900 | 899 | |
|
901 | 900 | def __init__(self, backend): |
|
902 | 901 | self.backend = backend |
|
903 | 902 | |
|
904 | 903 | def create_pull_request( |
|
905 | 904 | self, commits=None, target_head=None, source_head=None, |
|
906 | 905 | revisions=None, approved=False, author=None, mergeable=False, |
|
907 | 906 | enable_notifications=True, name_suffix=u'', reviewers=None, |
|
908 | 907 | title=u"Test", description=u"Description"): |
|
909 | 908 | self.set_mergeable(mergeable) |
|
910 | 909 | if not enable_notifications: |
|
911 | 910 | # mock notification side effect |
|
912 | 911 | self.notification_patcher = mock.patch( |
|
913 | 912 | 'rhodecode.model.notification.NotificationModel.create') |
|
914 | 913 | self.notification_patcher.start() |
|
915 | 914 | |
|
916 | 915 | if not self.pull_request: |
|
917 | 916 | if not commits: |
|
918 | 917 | commits = [ |
|
919 | 918 | {'message': 'c1'}, |
|
920 | 919 | {'message': 'c2'}, |
|
921 | 920 | {'message': 'c3'}, |
|
922 | 921 | ] |
|
923 | 922 | target_head = 'c1' |
|
924 | 923 | source_head = 'c2' |
|
925 | 924 | revisions = ['c2'] |
|
926 | 925 | |
|
927 | 926 | self.commit_ids = self.backend.create_master_repo(commits) |
|
928 | 927 | self.target_repository = self.backend.create_repo( |
|
929 | 928 | heads=[target_head], name_suffix=name_suffix) |
|
930 | 929 | self.source_repository = self.backend.create_repo( |
|
931 | 930 | heads=[source_head], name_suffix=name_suffix) |
|
932 | 931 | self.author = author or UserModel().get_by_username( |
|
933 | 932 | TEST_USER_ADMIN_LOGIN) |
|
934 | 933 | |
|
935 | 934 | model = PullRequestModel() |
|
936 | 935 | self.create_parameters = { |
|
937 | 936 | 'created_by': self.author, |
|
938 | 937 | 'source_repo': self.source_repository.repo_name, |
|
939 | 938 | 'source_ref': self._default_branch_reference(source_head), |
|
940 | 939 | 'target_repo': self.target_repository.repo_name, |
|
941 | 940 | 'target_ref': self._default_branch_reference(target_head), |
|
942 | 941 | 'revisions': [self.commit_ids[r] for r in revisions], |
|
943 | 942 | 'reviewers': reviewers or self._get_reviewers(), |
|
944 | 943 | 'title': title, |
|
945 | 944 | 'description': description, |
|
946 | 945 | } |
|
947 | 946 | self.pull_request = model.create(**self.create_parameters) |
|
948 | 947 | assert model.get_versions(self.pull_request) == [] |
|
949 | 948 | |
|
950 | 949 | self.pull_request_id = self.pull_request.pull_request_id |
|
951 | 950 | |
|
952 | 951 | if approved: |
|
953 | 952 | self.approve() |
|
954 | 953 | |
|
955 | 954 | Session().add(self.pull_request) |
|
956 | 955 | Session().commit() |
|
957 | 956 | |
|
958 | 957 | return self.pull_request |
|
959 | 958 | |
|
960 | 959 | def approve(self): |
|
961 | 960 | self.create_status_votes( |
|
962 | 961 | ChangesetStatus.STATUS_APPROVED, |
|
963 | 962 | *self.pull_request.reviewers) |
|
964 | 963 | |
|
965 | 964 | def close(self): |
|
966 | 965 | PullRequestModel().close_pull_request(self.pull_request, self.author) |
|
967 | 966 | |
|
968 | 967 | def _default_branch_reference(self, commit_message): |
|
969 | 968 | reference = '%s:%s:%s' % ( |
|
970 | 969 | 'branch', |
|
971 | 970 | self.backend.default_branch_name, |
|
972 | 971 | self.commit_ids[commit_message]) |
|
973 | 972 | return reference |
|
974 | 973 | |
|
975 | 974 | def _get_reviewers(self): |
|
976 | 975 | return [ |
|
977 | 976 | (TEST_USER_REGULAR_LOGIN, ['default1'], False), |
|
978 | 977 | (TEST_USER_REGULAR2_LOGIN, ['default2'], False), |
|
979 | 978 | ] |
|
980 | 979 | |
|
981 | 980 | def update_source_repository(self, head=None): |
|
982 | 981 | heads = [head or 'c3'] |
|
983 | 982 | self.backend.pull_heads(self.source_repository, heads=heads) |
|
984 | 983 | |
|
985 | 984 | def add_one_commit(self, head=None): |
|
986 | 985 | self.update_source_repository(head=head) |
|
987 | 986 | old_commit_ids = set(self.pull_request.revisions) |
|
988 | 987 | PullRequestModel().update_commits(self.pull_request) |
|
989 | 988 | commit_ids = set(self.pull_request.revisions) |
|
990 | 989 | new_commit_ids = commit_ids - old_commit_ids |
|
991 | 990 | assert len(new_commit_ids) == 1 |
|
992 | 991 | return new_commit_ids.pop() |
|
993 | 992 | |
|
994 | 993 | def remove_one_commit(self): |
|
995 | 994 | assert len(self.pull_request.revisions) == 2 |
|
996 | 995 | source_vcs = self.source_repository.scm_instance() |
|
997 | 996 | removed_commit_id = source_vcs.commit_ids[-1] |
|
998 | 997 | |
|
999 | 998 | # TODO: johbo: Git and Mercurial have an inconsistent vcs api here, |
|
1000 | 999 | # remove the if once that's sorted out. |
|
1001 | 1000 | if self.backend.alias == "git": |
|
1002 | 1001 | kwargs = {'branch_name': self.backend.default_branch_name} |
|
1003 | 1002 | else: |
|
1004 | 1003 | kwargs = {} |
|
1005 | 1004 | source_vcs.strip(removed_commit_id, **kwargs) |
|
1006 | 1005 | |
|
1007 | 1006 | PullRequestModel().update_commits(self.pull_request) |
|
1008 | 1007 | assert len(self.pull_request.revisions) == 1 |
|
1009 | 1008 | return removed_commit_id |
|
1010 | 1009 | |
|
1011 | 1010 | def create_comment(self, linked_to=None): |
|
1012 | 1011 | comment = CommentsModel().create( |
|
1013 | 1012 | text=u"Test comment", |
|
1014 | 1013 | repo=self.target_repository.repo_name, |
|
1015 | 1014 | user=self.author, |
|
1016 | 1015 | pull_request=self.pull_request) |
|
1017 | 1016 | assert comment.pull_request_version_id is None |
|
1018 | 1017 | |
|
1019 | 1018 | if linked_to: |
|
1020 | 1019 | PullRequestModel()._link_comments_to_version(linked_to) |
|
1021 | 1020 | |
|
1022 | 1021 | return comment |
|
1023 | 1022 | |
|
1024 | 1023 | def create_inline_comment( |
|
1025 | 1024 | self, linked_to=None, line_no=u'n1', file_path='file_1'): |
|
1026 | 1025 | comment = CommentsModel().create( |
|
1027 | 1026 | text=u"Test comment", |
|
1028 | 1027 | repo=self.target_repository.repo_name, |
|
1029 | 1028 | user=self.author, |
|
1030 | 1029 | line_no=line_no, |
|
1031 | 1030 | f_path=file_path, |
|
1032 | 1031 | pull_request=self.pull_request) |
|
1033 | 1032 | assert comment.pull_request_version_id is None |
|
1034 | 1033 | |
|
1035 | 1034 | if linked_to: |
|
1036 | 1035 | PullRequestModel()._link_comments_to_version(linked_to) |
|
1037 | 1036 | |
|
1038 | 1037 | return comment |
|
1039 | 1038 | |
|
1040 | 1039 | def create_version_of_pull_request(self): |
|
1041 | 1040 | pull_request = self.create_pull_request() |
|
1042 | 1041 | version = PullRequestModel()._create_version_from_snapshot( |
|
1043 | 1042 | pull_request) |
|
1044 | 1043 | return version |
|
1045 | 1044 | |
|
1046 | 1045 | def create_status_votes(self, status, *reviewers): |
|
1047 | 1046 | for reviewer in reviewers: |
|
1048 | 1047 | ChangesetStatusModel().set_status( |
|
1049 | 1048 | repo=self.pull_request.target_repo, |
|
1050 | 1049 | status=status, |
|
1051 | 1050 | user=reviewer.user_id, |
|
1052 | 1051 | pull_request=self.pull_request) |
|
1053 | 1052 | |
|
1054 | 1053 | def set_mergeable(self, value): |
|
1055 | 1054 | if not self.mergeable_patcher: |
|
1056 | 1055 | self.mergeable_patcher = mock.patch.object( |
|
1057 | 1056 | VcsSettingsModel, 'get_general_settings') |
|
1058 | 1057 | self.mergeable_mock = self.mergeable_patcher.start() |
|
1059 | 1058 | self.mergeable_mock.return_value = { |
|
1060 | 1059 | 'rhodecode_pr_merge_enabled': value} |
|
1061 | 1060 | |
|
1062 | 1061 | def cleanup(self): |
|
1063 | 1062 | # In case the source repository is already cleaned up, the pull |
|
1064 | 1063 | # request will already be deleted. |
|
1065 | 1064 | pull_request = PullRequest().get(self.pull_request_id) |
|
1066 | 1065 | if pull_request: |
|
1067 | 1066 | PullRequestModel().delete(pull_request, pull_request.author) |
|
1068 | 1067 | Session().commit() |
|
1069 | 1068 | |
|
1070 | 1069 | if self.notification_patcher: |
|
1071 | 1070 | self.notification_patcher.stop() |
|
1072 | 1071 | |
|
1073 | 1072 | if self.mergeable_patcher: |
|
1074 | 1073 | self.mergeable_patcher.stop() |
|
1075 | 1074 | |
|
1076 | 1075 | |
|
1077 | 1076 | @pytest.fixture |
|
1078 | 1077 | def user_admin(baseapp): |
|
1079 | 1078 | """ |
|
1080 | 1079 | Provides the default admin test user as an instance of `db.User`. |
|
1081 | 1080 | """ |
|
1082 | 1081 | user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) |
|
1083 | 1082 | return user |
|
1084 | 1083 | |
|
1085 | 1084 | |
|
1086 | 1085 | @pytest.fixture |
|
1087 | 1086 | def user_regular(baseapp): |
|
1088 | 1087 | """ |
|
1089 | 1088 | Provides the default regular test user as an instance of `db.User`. |
|
1090 | 1089 | """ |
|
1091 | 1090 | user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN) |
|
1092 | 1091 | return user |
|
1093 | 1092 | |
|
1094 | 1093 | |
|
1095 | 1094 | @pytest.fixture |
|
1096 | 1095 | def user_util(request, baseapp): |
|
1097 | 1096 | """ |
|
1098 | 1097 | Provides a wired instance of `UserUtility` with integrated cleanup. |
|
1099 | 1098 | """ |
|
1100 | 1099 | utility = UserUtility(test_name=request.node.name) |
|
1101 | 1100 | request.addfinalizer(utility.cleanup) |
|
1102 | 1101 | return utility |
|
1103 | 1102 | |
|
1104 | 1103 | |
|
1105 | 1104 | # TODO: johbo: Split this up into utilities per domain or something similar |
|
1106 | 1105 | class UserUtility(object): |
|
1107 | 1106 | |
|
1108 | 1107 | def __init__(self, test_name="test"): |
|
1109 | 1108 | self._test_name = self._sanitize_name(test_name) |
|
1110 | 1109 | self.fixture = Fixture() |
|
1111 | 1110 | self.repo_group_ids = [] |
|
1112 | 1111 | self.repos_ids = [] |
|
1113 | 1112 | self.user_ids = [] |
|
1114 | 1113 | self.user_group_ids = [] |
|
1115 | 1114 | self.user_repo_permission_ids = [] |
|
1116 | 1115 | self.user_group_repo_permission_ids = [] |
|
1117 | 1116 | self.user_repo_group_permission_ids = [] |
|
1118 | 1117 | self.user_group_repo_group_permission_ids = [] |
|
1119 | 1118 | self.user_user_group_permission_ids = [] |
|
1120 | 1119 | self.user_group_user_group_permission_ids = [] |
|
1121 | 1120 | self.user_permissions = [] |
|
1122 | 1121 | |
|
1123 | 1122 | def _sanitize_name(self, name): |
|
1124 | 1123 | for char in ['[', ']']: |
|
1125 | 1124 | name = name.replace(char, '_') |
|
1126 | 1125 | return name |
|
1127 | 1126 | |
|
1128 | 1127 | def create_repo_group( |
|
1129 | 1128 | self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True): |
|
1130 | 1129 | group_name = "{prefix}_repogroup_{count}".format( |
|
1131 | 1130 | prefix=self._test_name, |
|
1132 | 1131 | count=len(self.repo_group_ids)) |
|
1133 | 1132 | repo_group = self.fixture.create_repo_group( |
|
1134 | 1133 | group_name, cur_user=owner) |
|
1135 | 1134 | if auto_cleanup: |
|
1136 | 1135 | self.repo_group_ids.append(repo_group.group_id) |
|
1137 | 1136 | return repo_group |
|
1138 | 1137 | |
|
1139 | 1138 | def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None, |
|
1140 | 1139 | auto_cleanup=True, repo_type='hg'): |
|
1141 | 1140 | repo_name = "{prefix}_repository_{count}".format( |
|
1142 | 1141 | prefix=self._test_name, |
|
1143 | 1142 | count=len(self.repos_ids)) |
|
1144 | 1143 | |
|
1145 | 1144 | repository = self.fixture.create_repo( |
|
1146 | 1145 | repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type) |
|
1147 | 1146 | if auto_cleanup: |
|
1148 | 1147 | self.repos_ids.append(repository.repo_id) |
|
1149 | 1148 | return repository |
|
1150 | 1149 | |
|
1151 | 1150 | def create_user(self, auto_cleanup=True, **kwargs): |
|
1152 | 1151 | user_name = "{prefix}_user_{count}".format( |
|
1153 | 1152 | prefix=self._test_name, |
|
1154 | 1153 | count=len(self.user_ids)) |
|
1155 | 1154 | user = self.fixture.create_user(user_name, **kwargs) |
|
1156 | 1155 | if auto_cleanup: |
|
1157 | 1156 | self.user_ids.append(user.user_id) |
|
1158 | 1157 | return user |
|
1159 | 1158 | |
|
1160 | 1159 | def create_user_with_group(self): |
|
1161 | 1160 | user = self.create_user() |
|
1162 | 1161 | user_group = self.create_user_group(members=[user]) |
|
1163 | 1162 | return user, user_group |
|
1164 | 1163 | |
|
1165 | 1164 | def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None, |
|
1166 | 1165 | auto_cleanup=True, **kwargs): |
|
1167 | 1166 | group_name = "{prefix}_usergroup_{count}".format( |
|
1168 | 1167 | prefix=self._test_name, |
|
1169 | 1168 | count=len(self.user_group_ids)) |
|
1170 | 1169 | user_group = self.fixture.create_user_group( |
|
1171 | 1170 | group_name, cur_user=owner, **kwargs) |
|
1172 | 1171 | |
|
1173 | 1172 | if auto_cleanup: |
|
1174 | 1173 | self.user_group_ids.append(user_group.users_group_id) |
|
1175 | 1174 | if members: |
|
1176 | 1175 | for user in members: |
|
1177 | 1176 | UserGroupModel().add_user_to_group(user_group, user) |
|
1178 | 1177 | return user_group |
|
1179 | 1178 | |
|
1180 | 1179 | def grant_user_permission(self, user_name, permission_name): |
|
1181 | 1180 | self._inherit_default_user_permissions(user_name, False) |
|
1182 | 1181 | self.user_permissions.append((user_name, permission_name)) |
|
1183 | 1182 | |
|
1184 | 1183 | def grant_user_permission_to_repo_group( |
|
1185 | 1184 | self, repo_group, user, permission_name): |
|
1186 | 1185 | permission = RepoGroupModel().grant_user_permission( |
|
1187 | 1186 | repo_group, user, permission_name) |
|
1188 | 1187 | self.user_repo_group_permission_ids.append( |
|
1189 | 1188 | (repo_group.group_id, user.user_id)) |
|
1190 | 1189 | return permission |
|
1191 | 1190 | |
|
1192 | 1191 | def grant_user_group_permission_to_repo_group( |
|
1193 | 1192 | self, repo_group, user_group, permission_name): |
|
1194 | 1193 | permission = RepoGroupModel().grant_user_group_permission( |
|
1195 | 1194 | repo_group, user_group, permission_name) |
|
1196 | 1195 | self.user_group_repo_group_permission_ids.append( |
|
1197 | 1196 | (repo_group.group_id, user_group.users_group_id)) |
|
1198 | 1197 | return permission |
|
1199 | 1198 | |
|
1200 | 1199 | def grant_user_permission_to_repo( |
|
1201 | 1200 | self, repo, user, permission_name): |
|
1202 | 1201 | permission = RepoModel().grant_user_permission( |
|
1203 | 1202 | repo, user, permission_name) |
|
1204 | 1203 | self.user_repo_permission_ids.append( |
|
1205 | 1204 | (repo.repo_id, user.user_id)) |
|
1206 | 1205 | return permission |
|
1207 | 1206 | |
|
1208 | 1207 | def grant_user_group_permission_to_repo( |
|
1209 | 1208 | self, repo, user_group, permission_name): |
|
1210 | 1209 | permission = RepoModel().grant_user_group_permission( |
|
1211 | 1210 | repo, user_group, permission_name) |
|
1212 | 1211 | self.user_group_repo_permission_ids.append( |
|
1213 | 1212 | (repo.repo_id, user_group.users_group_id)) |
|
1214 | 1213 | return permission |
|
1215 | 1214 | |
|
1216 | 1215 | def grant_user_permission_to_user_group( |
|
1217 | 1216 | self, target_user_group, user, permission_name): |
|
1218 | 1217 | permission = UserGroupModel().grant_user_permission( |
|
1219 | 1218 | target_user_group, user, permission_name) |
|
1220 | 1219 | self.user_user_group_permission_ids.append( |
|
1221 | 1220 | (target_user_group.users_group_id, user.user_id)) |
|
1222 | 1221 | return permission |
|
1223 | 1222 | |
|
1224 | 1223 | def grant_user_group_permission_to_user_group( |
|
1225 | 1224 | self, target_user_group, user_group, permission_name): |
|
1226 | 1225 | permission = UserGroupModel().grant_user_group_permission( |
|
1227 | 1226 | target_user_group, user_group, permission_name) |
|
1228 | 1227 | self.user_group_user_group_permission_ids.append( |
|
1229 | 1228 | (target_user_group.users_group_id, user_group.users_group_id)) |
|
1230 | 1229 | return permission |
|
1231 | 1230 | |
|
1232 | 1231 | def revoke_user_permission(self, user_name, permission_name): |
|
1233 | 1232 | self._inherit_default_user_permissions(user_name, True) |
|
1234 | 1233 | UserModel().revoke_perm(user_name, permission_name) |
|
1235 | 1234 | |
|
1236 | 1235 | def _inherit_default_user_permissions(self, user_name, value): |
|
1237 | 1236 | user = UserModel().get_by_username(user_name) |
|
1238 | 1237 | user.inherit_default_permissions = value |
|
1239 | 1238 | Session().add(user) |
|
1240 | 1239 | Session().commit() |
|
1241 | 1240 | |
|
1242 | 1241 | def cleanup(self): |
|
1243 | 1242 | self._cleanup_permissions() |
|
1244 | 1243 | self._cleanup_repos() |
|
1245 | 1244 | self._cleanup_repo_groups() |
|
1246 | 1245 | self._cleanup_user_groups() |
|
1247 | 1246 | self._cleanup_users() |
|
1248 | 1247 | |
|
1249 | 1248 | def _cleanup_permissions(self): |
|
1250 | 1249 | if self.user_permissions: |
|
1251 | 1250 | for user_name, permission_name in self.user_permissions: |
|
1252 | 1251 | self.revoke_user_permission(user_name, permission_name) |
|
1253 | 1252 | |
|
1254 | 1253 | for permission in self.user_repo_permission_ids: |
|
1255 | 1254 | RepoModel().revoke_user_permission(*permission) |
|
1256 | 1255 | |
|
1257 | 1256 | for permission in self.user_group_repo_permission_ids: |
|
1258 | 1257 | RepoModel().revoke_user_group_permission(*permission) |
|
1259 | 1258 | |
|
1260 | 1259 | for permission in self.user_repo_group_permission_ids: |
|
1261 | 1260 | RepoGroupModel().revoke_user_permission(*permission) |
|
1262 | 1261 | |
|
1263 | 1262 | for permission in self.user_group_repo_group_permission_ids: |
|
1264 | 1263 | RepoGroupModel().revoke_user_group_permission(*permission) |
|
1265 | 1264 | |
|
1266 | 1265 | for permission in self.user_user_group_permission_ids: |
|
1267 | 1266 | UserGroupModel().revoke_user_permission(*permission) |
|
1268 | 1267 | |
|
1269 | 1268 | for permission in self.user_group_user_group_permission_ids: |
|
1270 | 1269 | UserGroupModel().revoke_user_group_permission(*permission) |
|
1271 | 1270 | |
|
1272 | 1271 | def _cleanup_repo_groups(self): |
|
1273 | 1272 | def _repo_group_compare(first_group_id, second_group_id): |
|
1274 | 1273 | """ |
|
1275 | 1274 | Gives higher priority to the groups with the most complex paths |
|
1276 | 1275 | """ |
|
1277 | 1276 | first_group = RepoGroup.get(first_group_id) |
|
1278 | 1277 | second_group = RepoGroup.get(second_group_id) |
|
1279 | 1278 | first_group_parts = ( |
|
1280 | 1279 | len(first_group.group_name.split('/')) if first_group else 0) |
|
1281 | 1280 | second_group_parts = ( |
|
1282 | 1281 | len(second_group.group_name.split('/')) if second_group else 0) |
|
1283 | 1282 | return cmp(second_group_parts, first_group_parts) |
|
1284 | 1283 | |
|
1285 | 1284 | sorted_repo_group_ids = sorted( |
|
1286 | 1285 | self.repo_group_ids, cmp=_repo_group_compare) |
|
1287 | 1286 | for repo_group_id in sorted_repo_group_ids: |
|
1288 | 1287 | self.fixture.destroy_repo_group(repo_group_id) |
|
1289 | 1288 | |
|
1290 | 1289 | def _cleanup_repos(self): |
|
1291 | 1290 | sorted_repos_ids = sorted(self.repos_ids) |
|
1292 | 1291 | for repo_id in sorted_repos_ids: |
|
1293 | 1292 | self.fixture.destroy_repo(repo_id) |
|
1294 | 1293 | |
|
1295 | 1294 | def _cleanup_user_groups(self): |
|
1296 | 1295 | def _user_group_compare(first_group_id, second_group_id): |
|
1297 | 1296 | """ |
|
1298 | 1297 | Gives higher priority to the groups with the most complex paths |
|
1299 | 1298 | """ |
|
1300 | 1299 | first_group = UserGroup.get(first_group_id) |
|
1301 | 1300 | second_group = UserGroup.get(second_group_id) |
|
1302 | 1301 | first_group_parts = ( |
|
1303 | 1302 | len(first_group.users_group_name.split('/')) |
|
1304 | 1303 | if first_group else 0) |
|
1305 | 1304 | second_group_parts = ( |
|
1306 | 1305 | len(second_group.users_group_name.split('/')) |
|
1307 | 1306 | if second_group else 0) |
|
1308 | 1307 | return cmp(second_group_parts, first_group_parts) |
|
1309 | 1308 | |
|
1310 | 1309 | sorted_user_group_ids = sorted( |
|
1311 | 1310 | self.user_group_ids, cmp=_user_group_compare) |
|
1312 | 1311 | for user_group_id in sorted_user_group_ids: |
|
1313 | 1312 | self.fixture.destroy_user_group(user_group_id) |
|
1314 | 1313 | |
|
1315 | 1314 | def _cleanup_users(self): |
|
1316 | 1315 | for user_id in self.user_ids: |
|
1317 | 1316 | self.fixture.destroy_user(user_id) |
|
1318 | 1317 | |
|
1319 | 1318 | |
|
1320 | 1319 | # TODO: Think about moving this into a pytest-pyro package and make it a |
|
1321 | 1320 | # pytest plugin |
|
1322 | 1321 | @pytest.hookimpl(tryfirst=True, hookwrapper=True) |
|
1323 | 1322 | def pytest_runtest_makereport(item, call): |
|
1324 | 1323 | """ |
|
1325 | 1324 | Adding the remote traceback if the exception has this information. |
|
1326 | 1325 | |
|
1327 | 1326 | VCSServer attaches this information as the attribute `_vcs_server_traceback` |
|
1328 | 1327 | to the exception instance. |
|
1329 | 1328 | """ |
|
1330 | 1329 | outcome = yield |
|
1331 | 1330 | report = outcome.get_result() |
|
1332 | 1331 | if call.excinfo: |
|
1333 | 1332 | _add_vcsserver_remote_traceback(report, call.excinfo.value) |
|
1334 | 1333 | |
|
1335 | 1334 | |
|
1336 | 1335 | def _add_vcsserver_remote_traceback(report, exc): |
|
1337 | 1336 | vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None) |
|
1338 | 1337 | |
|
1339 | 1338 | if vcsserver_traceback: |
|
1340 | 1339 | section = 'VCSServer remote traceback ' + report.when |
|
1341 | 1340 | report.sections.append((section, vcsserver_traceback)) |
|
1342 | 1341 | |
|
1343 | 1342 | |
|
1344 | 1343 | @pytest.fixture(scope='session') |
|
1345 | 1344 | def testrun(): |
|
1346 | 1345 | return { |
|
1347 | 1346 | 'uuid': uuid.uuid4(), |
|
1348 | 1347 | 'start': datetime.datetime.utcnow().isoformat(), |
|
1349 | 1348 | 'timestamp': int(time.time()), |
|
1350 | 1349 | } |
|
1351 | 1350 | |
|
1352 | 1351 | |
|
1353 | 1352 | @pytest.fixture(autouse=True) |
|
1354 | 1353 | def collect_appenlight_stats(request, testrun): |
|
1355 | 1354 | """ |
|
1356 | 1355 | This fixture reports memory consumtion of single tests. |
|
1357 | 1356 | |
|
1358 | 1357 | It gathers data based on `psutil` and sends them to Appenlight. The option |
|
1359 | 1358 | ``--ae`` has te be used to enable this fixture and the API key for your |
|
1360 | 1359 | application has to be provided in ``--ae-key``. |
|
1361 | 1360 | """ |
|
1362 | 1361 | try: |
|
1363 | 1362 | # cygwin cannot have yet psutil support. |
|
1364 | 1363 | import psutil |
|
1365 | 1364 | except ImportError: |
|
1366 | 1365 | return |
|
1367 | 1366 | |
|
1368 | 1367 | if not request.config.getoption('--appenlight'): |
|
1369 | 1368 | return |
|
1370 | 1369 | else: |
|
1371 | 1370 | # Only request the baseapp fixture if appenlight tracking is |
|
1372 | 1371 | # enabled. This will speed up a test run of unit tests by 2 to 3 |
|
1373 | 1372 | # seconds if appenlight is not enabled. |
|
1374 | 1373 | baseapp = request.getfuncargvalue("baseapp") |
|
1375 | 1374 | url = '{}/api/logs'.format(request.config.getoption('--appenlight-url')) |
|
1376 | 1375 | client = AppenlightClient( |
|
1377 | 1376 | url=url, |
|
1378 | 1377 | api_key=request.config.getoption('--appenlight-api-key'), |
|
1379 | 1378 | namespace=request.node.nodeid, |
|
1380 | 1379 | request=str(testrun['uuid']), |
|
1381 | 1380 | testrun=testrun) |
|
1382 | 1381 | |
|
1383 | 1382 | client.collect({ |
|
1384 | 1383 | 'message': "Starting", |
|
1385 | 1384 | }) |
|
1386 | 1385 | |
|
1387 | 1386 | server_and_port = baseapp.config.get_settings()['vcs.server'] |
|
1388 | 1387 | protocol = baseapp.config.get_settings()['vcs.server.protocol'] |
|
1389 | 1388 | server = create_vcsserver_proxy(server_and_port, protocol) |
|
1390 | 1389 | with server: |
|
1391 | 1390 | vcs_pid = server.get_pid() |
|
1392 | 1391 | server.run_gc() |
|
1393 | 1392 | vcs_process = psutil.Process(vcs_pid) |
|
1394 | 1393 | mem = vcs_process.memory_info() |
|
1395 | 1394 | client.tag_before('vcsserver.rss', mem.rss) |
|
1396 | 1395 | client.tag_before('vcsserver.vms', mem.vms) |
|
1397 | 1396 | |
|
1398 | 1397 | test_process = psutil.Process() |
|
1399 | 1398 | mem = test_process.memory_info() |
|
1400 | 1399 | client.tag_before('test.rss', mem.rss) |
|
1401 | 1400 | client.tag_before('test.vms', mem.vms) |
|
1402 | 1401 | |
|
1403 | 1402 | client.tag_before('time', time.time()) |
|
1404 | 1403 | |
|
1405 | 1404 | @request.addfinalizer |
|
1406 | 1405 | def send_stats(): |
|
1407 | 1406 | client.tag_after('time', time.time()) |
|
1408 | 1407 | with server: |
|
1409 | 1408 | gc_stats = server.run_gc() |
|
1410 | 1409 | for tag, value in gc_stats.items(): |
|
1411 | 1410 | client.tag_after(tag, value) |
|
1412 | 1411 | mem = vcs_process.memory_info() |
|
1413 | 1412 | client.tag_after('vcsserver.rss', mem.rss) |
|
1414 | 1413 | client.tag_after('vcsserver.vms', mem.vms) |
|
1415 | 1414 | |
|
1416 | 1415 | mem = test_process.memory_info() |
|
1417 | 1416 | client.tag_after('test.rss', mem.rss) |
|
1418 | 1417 | client.tag_after('test.vms', mem.vms) |
|
1419 | 1418 | |
|
1420 | 1419 | client.collect({ |
|
1421 | 1420 | 'message': "Finished", |
|
1422 | 1421 | }) |
|
1423 | 1422 | client.send_stats() |
|
1424 | 1423 | |
|
1425 | 1424 | return client |
|
1426 | 1425 | |
|
1427 | 1426 | |
|
1428 | 1427 | class AppenlightClient(): |
|
1429 | 1428 | |
|
1430 | 1429 | url_template = '{url}?protocol_version=0.5' |
|
1431 | 1430 | |
|
1432 | 1431 | def __init__( |
|
1433 | 1432 | self, url, api_key, add_server=True, add_timestamp=True, |
|
1434 | 1433 | namespace=None, request=None, testrun=None): |
|
1435 | 1434 | self.url = self.url_template.format(url=url) |
|
1436 | 1435 | self.api_key = api_key |
|
1437 | 1436 | self.add_server = add_server |
|
1438 | 1437 | self.add_timestamp = add_timestamp |
|
1439 | 1438 | self.namespace = namespace |
|
1440 | 1439 | self.request = request |
|
1441 | 1440 | self.server = socket.getfqdn(socket.gethostname()) |
|
1442 | 1441 | self.tags_before = {} |
|
1443 | 1442 | self.tags_after = {} |
|
1444 | 1443 | self.stats = [] |
|
1445 | 1444 | self.testrun = testrun or {} |
|
1446 | 1445 | |
|
1447 | 1446 | def tag_before(self, tag, value): |
|
1448 | 1447 | self.tags_before[tag] = value |
|
1449 | 1448 | |
|
1450 | 1449 | def tag_after(self, tag, value): |
|
1451 | 1450 | self.tags_after[tag] = value |
|
1452 | 1451 | |
|
1453 | 1452 | def collect(self, data): |
|
1454 | 1453 | if self.add_server: |
|
1455 | 1454 | data.setdefault('server', self.server) |
|
1456 | 1455 | if self.add_timestamp: |
|
1457 | 1456 | data.setdefault('date', datetime.datetime.utcnow().isoformat()) |
|
1458 | 1457 | if self.namespace: |
|
1459 | 1458 | data.setdefault('namespace', self.namespace) |
|
1460 | 1459 | if self.request: |
|
1461 | 1460 | data.setdefault('request', self.request) |
|
1462 | 1461 | self.stats.append(data) |
|
1463 | 1462 | |
|
1464 | 1463 | def send_stats(self): |
|
1465 | 1464 | tags = [ |
|
1466 | 1465 | ('testrun', self.request), |
|
1467 | 1466 | ('testrun.start', self.testrun['start']), |
|
1468 | 1467 | ('testrun.timestamp', self.testrun['timestamp']), |
|
1469 | 1468 | ('test', self.namespace), |
|
1470 | 1469 | ] |
|
1471 | 1470 | for key, value in self.tags_before.items(): |
|
1472 | 1471 | tags.append((key + '.before', value)) |
|
1473 | 1472 | try: |
|
1474 | 1473 | delta = self.tags_after[key] - value |
|
1475 | 1474 | tags.append((key + '.delta', delta)) |
|
1476 | 1475 | except Exception: |
|
1477 | 1476 | pass |
|
1478 | 1477 | for key, value in self.tags_after.items(): |
|
1479 | 1478 | tags.append((key + '.after', value)) |
|
1480 | 1479 | self.collect({ |
|
1481 | 1480 | 'message': "Collected tags", |
|
1482 | 1481 | 'tags': tags, |
|
1483 | 1482 | }) |
|
1484 | 1483 | |
|
1485 | 1484 | response = requests.post( |
|
1486 | 1485 | self.url, |
|
1487 | 1486 | headers={ |
|
1488 | 1487 | 'X-appenlight-api-key': self.api_key}, |
|
1489 | 1488 | json=self.stats, |
|
1490 | 1489 | ) |
|
1491 | 1490 | |
|
1492 | 1491 | if not response.status_code == 200: |
|
1493 | 1492 | pprint.pprint(self.stats) |
|
1494 | 1493 | print(response.headers) |
|
1495 | 1494 | print(response.text) |
|
1496 | 1495 | raise Exception('Sending to appenlight failed') |
|
1497 | 1496 | |
|
1498 | 1497 | |
|
1499 | 1498 | @pytest.fixture |
|
1500 | 1499 | def gist_util(request, baseapp): |
|
1501 | 1500 | """ |
|
1502 | 1501 | Provides a wired instance of `GistUtility` with integrated cleanup. |
|
1503 | 1502 | """ |
|
1504 | 1503 | utility = GistUtility() |
|
1505 | 1504 | request.addfinalizer(utility.cleanup) |
|
1506 | 1505 | return utility |
|
1507 | 1506 | |
|
1508 | 1507 | |
|
1509 | 1508 | class GistUtility(object): |
|
1510 | 1509 | def __init__(self): |
|
1511 | 1510 | self.fixture = Fixture() |
|
1512 | 1511 | self.gist_ids = [] |
|
1513 | 1512 | |
|
1514 | 1513 | def create_gist(self, **kwargs): |
|
1515 | 1514 | gist = self.fixture.create_gist(**kwargs) |
|
1516 | 1515 | self.gist_ids.append(gist.gist_id) |
|
1517 | 1516 | return gist |
|
1518 | 1517 | |
|
1519 | 1518 | def cleanup(self): |
|
1520 | 1519 | for id_ in self.gist_ids: |
|
1521 | 1520 | self.fixture.destroy_gists(str(id_)) |
|
1522 | 1521 | |
|
1523 | 1522 | |
|
1524 | 1523 | @pytest.fixture |
|
1525 | 1524 | def enabled_backends(request): |
|
1526 | 1525 | backends = request.config.option.backends |
|
1527 | 1526 | return backends[:] |
|
1528 | 1527 | |
|
1529 | 1528 | |
|
1530 | 1529 | @pytest.fixture |
|
1531 | 1530 | def settings_util(request): |
|
1532 | 1531 | """ |
|
1533 | 1532 | Provides a wired instance of `SettingsUtility` with integrated cleanup. |
|
1534 | 1533 | """ |
|
1535 | 1534 | utility = SettingsUtility() |
|
1536 | 1535 | request.addfinalizer(utility.cleanup) |
|
1537 | 1536 | return utility |
|
1538 | 1537 | |
|
1539 | 1538 | |
|
1540 | 1539 | class SettingsUtility(object): |
|
1541 | 1540 | def __init__(self): |
|
1542 | 1541 | self.rhodecode_ui_ids = [] |
|
1543 | 1542 | self.rhodecode_setting_ids = [] |
|
1544 | 1543 | self.repo_rhodecode_ui_ids = [] |
|
1545 | 1544 | self.repo_rhodecode_setting_ids = [] |
|
1546 | 1545 | |
|
1547 | 1546 | def create_repo_rhodecode_ui( |
|
1548 | 1547 | self, repo, section, value, key=None, active=True, cleanup=True): |
|
1549 | 1548 | key = key or hashlib.sha1( |
|
1550 | 1549 | '{}{}{}'.format(section, value, repo.repo_id)).hexdigest() |
|
1551 | 1550 | |
|
1552 | 1551 | setting = RepoRhodeCodeUi() |
|
1553 | 1552 | setting.repository_id = repo.repo_id |
|
1554 | 1553 | setting.ui_section = section |
|
1555 | 1554 | setting.ui_value = value |
|
1556 | 1555 | setting.ui_key = key |
|
1557 | 1556 | setting.ui_active = active |
|
1558 | 1557 | Session().add(setting) |
|
1559 | 1558 | Session().commit() |
|
1560 | 1559 | |
|
1561 | 1560 | if cleanup: |
|
1562 | 1561 | self.repo_rhodecode_ui_ids.append(setting.ui_id) |
|
1563 | 1562 | return setting |
|
1564 | 1563 | |
|
1565 | 1564 | def create_rhodecode_ui( |
|
1566 | 1565 | self, section, value, key=None, active=True, cleanup=True): |
|
1567 | 1566 | key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest() |
|
1568 | 1567 | |
|
1569 | 1568 | setting = RhodeCodeUi() |
|
1570 | 1569 | setting.ui_section = section |
|
1571 | 1570 | setting.ui_value = value |
|
1572 | 1571 | setting.ui_key = key |
|
1573 | 1572 | setting.ui_active = active |
|
1574 | 1573 | Session().add(setting) |
|
1575 | 1574 | Session().commit() |
|
1576 | 1575 | |
|
1577 | 1576 | if cleanup: |
|
1578 | 1577 | self.rhodecode_ui_ids.append(setting.ui_id) |
|
1579 | 1578 | return setting |
|
1580 | 1579 | |
|
1581 | 1580 | def create_repo_rhodecode_setting( |
|
1582 | 1581 | self, repo, name, value, type_, cleanup=True): |
|
1583 | 1582 | setting = RepoRhodeCodeSetting( |
|
1584 | 1583 | repo.repo_id, key=name, val=value, type=type_) |
|
1585 | 1584 | Session().add(setting) |
|
1586 | 1585 | Session().commit() |
|
1587 | 1586 | |
|
1588 | 1587 | if cleanup: |
|
1589 | 1588 | self.repo_rhodecode_setting_ids.append(setting.app_settings_id) |
|
1590 | 1589 | return setting |
|
1591 | 1590 | |
|
1592 | 1591 | def create_rhodecode_setting(self, name, value, type_, cleanup=True): |
|
1593 | 1592 | setting = RhodeCodeSetting(key=name, val=value, type=type_) |
|
1594 | 1593 | Session().add(setting) |
|
1595 | 1594 | Session().commit() |
|
1596 | 1595 | |
|
1597 | 1596 | if cleanup: |
|
1598 | 1597 | self.rhodecode_setting_ids.append(setting.app_settings_id) |
|
1599 | 1598 | |
|
1600 | 1599 | return setting |
|
1601 | 1600 | |
|
1602 | 1601 | def cleanup(self): |
|
1603 | 1602 | for id_ in self.rhodecode_ui_ids: |
|
1604 | 1603 | setting = RhodeCodeUi.get(id_) |
|
1605 | 1604 | Session().delete(setting) |
|
1606 | 1605 | |
|
1607 | 1606 | for id_ in self.rhodecode_setting_ids: |
|
1608 | 1607 | setting = RhodeCodeSetting.get(id_) |
|
1609 | 1608 | Session().delete(setting) |
|
1610 | 1609 | |
|
1611 | 1610 | for id_ in self.repo_rhodecode_ui_ids: |
|
1612 | 1611 | setting = RepoRhodeCodeUi.get(id_) |
|
1613 | 1612 | Session().delete(setting) |
|
1614 | 1613 | |
|
1615 | 1614 | for id_ in self.repo_rhodecode_setting_ids: |
|
1616 | 1615 | setting = RepoRhodeCodeSetting.get(id_) |
|
1617 | 1616 | Session().delete(setting) |
|
1618 | 1617 | |
|
1619 | 1618 | Session().commit() |
|
1620 | 1619 | |
|
1621 | 1620 | |
|
1622 | 1621 | @pytest.fixture |
|
1623 | 1622 | def no_notifications(request): |
|
1624 | 1623 | notification_patcher = mock.patch( |
|
1625 | 1624 | 'rhodecode.model.notification.NotificationModel.create') |
|
1626 | 1625 | notification_patcher.start() |
|
1627 | 1626 | request.addfinalizer(notification_patcher.stop) |
|
1628 | 1627 | |
|
1629 | 1628 | |
|
1630 | 1629 | @pytest.fixture(scope='session') |
|
1631 | 1630 | def repeat(request): |
|
1632 | 1631 | """ |
|
1633 | 1632 | The number of repetitions is based on this fixture. |
|
1634 | 1633 | |
|
1635 | 1634 | Slower calls may divide it by 10 or 100. It is chosen in a way so that the |
|
1636 | 1635 | tests are not too slow in our default test suite. |
|
1637 | 1636 | """ |
|
1638 | 1637 | return request.config.getoption('--repeat') |
|
1639 | 1638 | |
|
1640 | 1639 | |
|
1641 | 1640 | @pytest.fixture |
|
1642 | 1641 | def rhodecode_fixtures(): |
|
1643 | 1642 | return Fixture() |
|
1644 | 1643 | |
|
1645 | 1644 | |
|
1646 | 1645 | @pytest.fixture |
|
1647 | 1646 | def context_stub(): |
|
1648 | 1647 | """ |
|
1649 | 1648 | Stub context object. |
|
1650 | 1649 | """ |
|
1651 | 1650 | context = pyramid.testing.DummyResource() |
|
1652 | 1651 | return context |
|
1653 | 1652 | |
|
1654 | 1653 | |
|
1655 | 1654 | @pytest.fixture |
|
1656 | 1655 | def request_stub(): |
|
1657 | 1656 | """ |
|
1658 | 1657 | Stub request object. |
|
1659 | 1658 | """ |
|
1660 | 1659 | from rhodecode.lib.base import bootstrap_request |
|
1661 | 1660 | request = bootstrap_request(scheme='https') |
|
1662 | 1661 | return request |
|
1663 | 1662 | |
|
1664 | 1663 | |
|
1665 | 1664 | @pytest.fixture |
|
1666 | 1665 | def config_stub(request, request_stub): |
|
1667 | 1666 | """ |
|
1668 | 1667 | Set up pyramid.testing and return the Configurator. |
|
1669 | 1668 | """ |
|
1670 | 1669 | from rhodecode.lib.base import bootstrap_config |
|
1671 | 1670 | config = bootstrap_config(request=request_stub) |
|
1672 | 1671 | |
|
1673 | 1672 | @request.addfinalizer |
|
1674 | 1673 | def cleanup(): |
|
1675 | 1674 | pyramid.testing.tearDown() |
|
1676 | 1675 | |
|
1677 | 1676 | return config |
|
1678 | 1677 | |
|
1679 | 1678 | |
|
1680 | 1679 | @pytest.fixture |
|
1681 | 1680 | def StubIntegrationType(): |
|
1682 | 1681 | class _StubIntegrationType(IntegrationTypeBase): |
|
1683 | 1682 | """ Test integration type class """ |
|
1684 | 1683 | |
|
1685 | 1684 | key = 'test' |
|
1686 | 1685 | display_name = 'Test integration type' |
|
1687 | 1686 | description = 'A test integration type for testing' |
|
1688 | 1687 | icon = 'test_icon_html_image' |
|
1689 | 1688 | |
|
1690 | 1689 | def __init__(self, settings): |
|
1691 | 1690 | super(_StubIntegrationType, self).__init__(settings) |
|
1692 | 1691 | self.sent_events = [] # for testing |
|
1693 | 1692 | |
|
1694 | 1693 | def send_event(self, event): |
|
1695 | 1694 | self.sent_events.append(event) |
|
1696 | 1695 | |
|
1697 | 1696 | def settings_schema(self): |
|
1698 | 1697 | class SettingsSchema(colander.Schema): |
|
1699 | 1698 | test_string_field = colander.SchemaNode( |
|
1700 | 1699 | colander.String(), |
|
1701 | 1700 | missing=colander.required, |
|
1702 | 1701 | title='test string field', |
|
1703 | 1702 | ) |
|
1704 | 1703 | test_int_field = colander.SchemaNode( |
|
1705 | 1704 | colander.Int(), |
|
1706 | 1705 | title='some integer setting', |
|
1707 | 1706 | ) |
|
1708 | 1707 | return SettingsSchema() |
|
1709 | 1708 | |
|
1710 | 1709 | |
|
1711 | 1710 | integration_type_registry.register_integration_type(_StubIntegrationType) |
|
1712 | 1711 | return _StubIntegrationType |
|
1713 | 1712 | |
|
1714 | 1713 | @pytest.fixture |
|
1715 | 1714 | def stub_integration_settings(): |
|
1716 | 1715 | return { |
|
1717 | 1716 | 'test_string_field': 'some data', |
|
1718 | 1717 | 'test_int_field': 100, |
|
1719 | 1718 | } |
|
1720 | 1719 | |
|
1721 | 1720 | |
|
1722 | 1721 | @pytest.fixture |
|
1723 | 1722 | def repo_integration_stub(request, repo_stub, StubIntegrationType, |
|
1724 | 1723 | stub_integration_settings): |
|
1725 | 1724 | integration = IntegrationModel().create( |
|
1726 | 1725 | StubIntegrationType, settings=stub_integration_settings, enabled=True, |
|
1727 | 1726 | name='test repo integration', |
|
1728 | 1727 | repo=repo_stub, repo_group=None, child_repos_only=None) |
|
1729 | 1728 | |
|
1730 | 1729 | @request.addfinalizer |
|
1731 | 1730 | def cleanup(): |
|
1732 | 1731 | IntegrationModel().delete(integration) |
|
1733 | 1732 | |
|
1734 | 1733 | return integration |
|
1735 | 1734 | |
|
1736 | 1735 | |
|
1737 | 1736 | @pytest.fixture |
|
1738 | 1737 | def repogroup_integration_stub(request, test_repo_group, StubIntegrationType, |
|
1739 | 1738 | stub_integration_settings): |
|
1740 | 1739 | integration = IntegrationModel().create( |
|
1741 | 1740 | StubIntegrationType, settings=stub_integration_settings, enabled=True, |
|
1742 | 1741 | name='test repogroup integration', |
|
1743 | 1742 | repo=None, repo_group=test_repo_group, child_repos_only=True) |
|
1744 | 1743 | |
|
1745 | 1744 | @request.addfinalizer |
|
1746 | 1745 | def cleanup(): |
|
1747 | 1746 | IntegrationModel().delete(integration) |
|
1748 | 1747 | |
|
1749 | 1748 | return integration |
|
1750 | 1749 | |
|
1751 | 1750 | |
|
1752 | 1751 | @pytest.fixture |
|
1753 | 1752 | def repogroup_recursive_integration_stub(request, test_repo_group, |
|
1754 | 1753 | StubIntegrationType, stub_integration_settings): |
|
1755 | 1754 | integration = IntegrationModel().create( |
|
1756 | 1755 | StubIntegrationType, settings=stub_integration_settings, enabled=True, |
|
1757 | 1756 | name='test recursive repogroup integration', |
|
1758 | 1757 | repo=None, repo_group=test_repo_group, child_repos_only=False) |
|
1759 | 1758 | |
|
1760 | 1759 | @request.addfinalizer |
|
1761 | 1760 | def cleanup(): |
|
1762 | 1761 | IntegrationModel().delete(integration) |
|
1763 | 1762 | |
|
1764 | 1763 | return integration |
|
1765 | 1764 | |
|
1766 | 1765 | |
|
1767 | 1766 | @pytest.fixture |
|
1768 | 1767 | def global_integration_stub(request, StubIntegrationType, |
|
1769 | 1768 | stub_integration_settings): |
|
1770 | 1769 | integration = IntegrationModel().create( |
|
1771 | 1770 | StubIntegrationType, settings=stub_integration_settings, enabled=True, |
|
1772 | 1771 | name='test global integration', |
|
1773 | 1772 | repo=None, repo_group=None, child_repos_only=None) |
|
1774 | 1773 | |
|
1775 | 1774 | @request.addfinalizer |
|
1776 | 1775 | def cleanup(): |
|
1777 | 1776 | IntegrationModel().delete(integration) |
|
1778 | 1777 | |
|
1779 | 1778 | return integration |
|
1780 | 1779 | |
|
1781 | 1780 | |
|
1782 | 1781 | @pytest.fixture |
|
1783 | 1782 | def root_repos_integration_stub(request, StubIntegrationType, |
|
1784 | 1783 | stub_integration_settings): |
|
1785 | 1784 | integration = IntegrationModel().create( |
|
1786 | 1785 | StubIntegrationType, settings=stub_integration_settings, enabled=True, |
|
1787 | 1786 | name='test global integration', |
|
1788 | 1787 | repo=None, repo_group=None, child_repos_only=True) |
|
1789 | 1788 | |
|
1790 | 1789 | @request.addfinalizer |
|
1791 | 1790 | def cleanup(): |
|
1792 | 1791 | IntegrationModel().delete(integration) |
|
1793 | 1792 | |
|
1794 | 1793 | return integration |
|
1795 | 1794 | |
|
1796 | 1795 | |
|
1797 | 1796 | @pytest.fixture |
|
1798 | 1797 | def local_dt_to_utc(): |
|
1799 | 1798 | def _factory(dt): |
|
1800 | 1799 | return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone( |
|
1801 | 1800 | dateutil.tz.tzutc()).replace(tzinfo=None) |
|
1802 | 1801 | return _factory |
|
1803 | 1802 | |
|
1804 | 1803 | |
|
1805 | 1804 | @pytest.fixture |
|
1806 | 1805 | def disable_anonymous_user(request, baseapp): |
|
1807 | 1806 | set_anonymous_access(False) |
|
1808 | 1807 | |
|
1809 | 1808 | @request.addfinalizer |
|
1810 | 1809 | def cleanup(): |
|
1811 | 1810 | set_anonymous_access(True) |
|
1812 | 1811 | |
|
1813 | 1812 | |
|
1814 | 1813 | @pytest.fixture |
|
1815 | 1814 | def rc_fixture(request): |
|
1816 | 1815 | return Fixture() |
|
1817 | 1816 | |
|
1818 | 1817 | |
|
1819 | 1818 | @pytest.fixture |
|
1820 | 1819 | def repo_groups(request): |
|
1821 | 1820 | fixture = Fixture() |
|
1822 | 1821 | |
|
1823 | 1822 | session = Session() |
|
1824 | 1823 | zombie_group = fixture.create_repo_group('zombie') |
|
1825 | 1824 | parent_group = fixture.create_repo_group('parent') |
|
1826 | 1825 | child_group = fixture.create_repo_group('parent/child') |
|
1827 | 1826 | groups_in_db = session.query(RepoGroup).all() |
|
1828 | 1827 | assert len(groups_in_db) == 3 |
|
1829 | 1828 | assert child_group.group_parent_id == parent_group.group_id |
|
1830 | 1829 | |
|
1831 | 1830 | @request.addfinalizer |
|
1832 | 1831 | def cleanup(): |
|
1833 | 1832 | fixture.destroy_repo_group(zombie_group) |
|
1834 | 1833 | fixture.destroy_repo_group(child_group) |
|
1835 | 1834 | fixture.destroy_repo_group(parent_group) |
|
1836 | 1835 | |
|
1837 | 1836 | return zombie_group, parent_group, child_group |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
General Comments 0
You need to be logged in to leave comments.
Login now