##// END OF EJS Templates
celery: celery 4.X support. Fixes #4169...
marcink -
r2359:246f5a4c default
parent child Browse files
Show More
@@ -0,0 +1,256 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 """
21 Celery loader, run with::
22
23 celery worker --beat --app rhodecode.lib.celerylib.loader --loglevel DEBUG --ini=._dev/dev.ini
24 """
25 import os
26 import logging
27
28 from celery import Celery
29 from celery import signals
30 from celery import Task
31 from kombu.serialization import register
32 from pyramid.threadlocal import get_current_request
33
34 import rhodecode
35
36 from rhodecode.lib.auth import AuthUser
37 from rhodecode.lib.celerylib.utils import get_ini_config, parse_ini_vars
38 from rhodecode.lib.ext_json import json
39 from rhodecode.lib.pyramid_utils import bootstrap, setup_logging, prepare_request
40 from rhodecode.lib.utils2 import str2bool
41 from rhodecode.model import meta
42
43
44 register('json_ext', json.dumps, json.loads,
45 content_type='application/x-json-ext',
46 content_encoding='utf-8')
47
48 log = logging.getLogger('celery.rhodecode.loader')
49
50
51 def add_preload_arguments(parser):
52 parser.add_argument(
53 '--ini', default=None,
54 help='Path to ini configuration file.'
55 )
56 parser.add_argument(
57 '--ini-var', default=None,
58 help='Comma separated list of key=value to pass to ini.'
59 )
60
61
62 def get_logger(obj):
63 custom_log = logging.getLogger(
64 'rhodecode.task.{}'.format(obj.__class__.__name__))
65
66 if rhodecode.CELERY_ENABLED:
67 try:
68 custom_log = obj.get_logger()
69 except Exception:
70 pass
71
72 return custom_log
73
74
75 base_celery_config = {
76 'result_backend': 'rpc://',
77 'result_expires': 60 * 60 * 24,
78 'result_persistent': True,
79 'imports': [],
80 'worker_max_tasks_per_child': 100,
81 'accept_content': ['json_ext'],
82 'task_serializer': 'json_ext',
83 'result_serializer': 'json_ext',
84 'worker_hijack_root_logger': False,
85 }
86 # init main celery app
87 celery_app = Celery()
88 celery_app.user_options['preload'].add(add_preload_arguments)
89 ini_file_glob = None
90
91
92 @signals.setup_logging.connect
93 def setup_logging_callback(**kwargs):
94 setup_logging(ini_file_glob)
95
96
97 @signals.user_preload_options.connect
98 def on_preload_parsed(options, **kwargs):
99 ini_location = options['ini']
100 ini_vars = options['ini_var']
101 celery_app.conf['INI_PYRAMID'] = options['ini']
102
103 if ini_location is None:
104 print('You must provide the paste --ini argument')
105 exit(-1)
106
107 options = None
108 if ini_vars is not None:
109 options = parse_ini_vars(ini_vars)
110
111 global ini_file_glob
112 ini_file_glob = ini_location
113
114 log.debug('Bootstrapping RhodeCode application...')
115 env = bootstrap(ini_location, options=options)
116
117 setup_celery_app(
118 app=env['app'], root=env['root'], request=env['request'],
119 registry=env['registry'], closer=env['closer'],
120 ini_location=ini_location)
121
122 # fix the global flag even if it's disabled via .ini file because this
123 # is a worker code that doesn't need this to be disabled.
124 rhodecode.CELERY_ENABLED = True
125
126
127 @signals.task_success.connect
128 def task_success_signal(result, **kwargs):
129 meta.Session.commit()
130 celery_app.conf['PYRAMID_CLOSER']()
131
132
133 @signals.task_retry.connect
134 def task_retry_signal(
135 request, reason, einfo, **kwargs):
136 meta.Session.remove()
137 celery_app.conf['PYRAMID_CLOSER']()
138
139
140 @signals.task_failure.connect
141 def task_failure_signal(
142 task_id, exception, args, kwargs, traceback, einfo, **kargs):
143 meta.Session.remove()
144 celery_app.conf['PYRAMID_CLOSER']()
145
146
147 @signals.task_revoked.connect
148 def task_revoked_signal(
149 request, terminated, signum, expired, **kwargs):
150 celery_app.conf['PYRAMID_CLOSER']()
151
152
153 def setup_celery_app(app, root, request, registry, closer, ini_location):
154 ini_dir = os.path.dirname(os.path.abspath(ini_location))
155 celery_config = base_celery_config
156 celery_config.update({
157 # store celerybeat scheduler db where the .ini file is
158 'beat_schedule_filename': os.path.join(ini_dir, 'celerybeat-schedule'),
159 })
160 ini_settings = get_ini_config(ini_location)
161 log.debug('Got custom celery conf: %s', ini_settings)
162
163 celery_config.update(ini_settings)
164 celery_app.config_from_object(celery_config)
165
166 celery_app.conf.update({'PYRAMID_APP': app})
167 celery_app.conf.update({'PYRAMID_ROOT': root})
168 celery_app.conf.update({'PYRAMID_REQUEST': request})
169 celery_app.conf.update({'PYRAMID_REGISTRY': registry})
170 celery_app.conf.update({'PYRAMID_CLOSER': closer})
171
172
173 def configure_celery(config, ini_location):
174 """
175 Helper that is called from our application creation logic. It gives
176 connection info into running webapp and allows execution of tasks from
177 RhodeCode itself
178 """
179 # store some globals into rhodecode
180 rhodecode.CELERY_ENABLED = str2bool(
181 config.registry.settings.get('use_celery'))
182 if rhodecode.CELERY_ENABLED:
183 log.info('Configuring celery based on `%s` file', ini_location)
184 setup_celery_app(
185 app=None, root=None, request=None, registry=config.registry,
186 closer=None, ini_location=ini_location)
187
188
189 class RequestContextTask(Task):
190 """
191 This is a celery task which will create a rhodecode app instance context
192 for the task, patch pyramid with the original request
193 that created the task and also add the user to the context.
194 """
195
196 def apply_async(self, args=None, kwargs=None, task_id=None, producer=None,
197 link=None, link_error=None, shadow=None, **options):
198 """ queue the job to run (we are in web request context here) """
199
200 req = get_current_request()
201
202 # web case
203 if hasattr(req, 'user'):
204 ip_addr = req.user.ip_addr
205 user_id = req.user.user_id
206
207 # api case
208 elif hasattr(req, 'rpc_user'):
209 ip_addr = req.rpc_user.ip_addr
210 user_id = req.rpc_user.user_id
211 else:
212 raise Exception(
213 'Unable to fetch required data from request: {}. \n'
214 'This task is required to be executed from context of '
215 'request in a webapp'.format(repr(req)))
216
217 if req:
218 # we hook into kwargs since it is the only way to pass our data to
219 # the celery worker
220 options['headers'] = options.get('headers', {})
221 options['headers'].update({
222 'rhodecode_proxy_data': {
223 'environ': {
224 'PATH_INFO': req.environ['PATH_INFO'],
225 'SCRIPT_NAME': req.environ['SCRIPT_NAME'],
226 'HTTP_HOST': req.environ.get('HTTP_HOST',
227 req.environ['SERVER_NAME']),
228 'SERVER_NAME': req.environ['SERVER_NAME'],
229 'SERVER_PORT': req.environ['SERVER_PORT'],
230 'wsgi.url_scheme': req.environ['wsgi.url_scheme'],
231 },
232 'auth_user': {
233 'ip_addr': ip_addr,
234 'user_id': user_id
235 },
236 }
237 })
238
239 return super(RequestContextTask, self).apply_async(
240 args, kwargs, task_id, producer, link, link_error, shadow, **options)
241
242 def __call__(self, *args, **kwargs):
243 """ rebuild the context and then run task on celery worker """
244
245 proxy_data = getattr(self.request, 'rhodecode_proxy_data', None)
246 if not proxy_data:
247 return super(RequestContextTask, self).__call__(*args, **kwargs)
248
249 log.debug('using celery proxy data to run task: %r', proxy_data)
250 # re-inject and register threadlocals for proper routing support
251 request = prepare_request(proxy_data['environ'])
252 request.user = AuthUser(user_id=proxy_data['auth_user']['user_id'],
253 ip_addr=proxy_data['auth_user']['ip_addr'])
254
255 return super(RequestContextTask, self).__call__(*args, **kwargs)
256
@@ -0,0 +1,156 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
21 import os
22 import json
23 import logging
24 import datetime
25
26 from functools import partial
27
28 from pyramid.compat import configparser
29 from celery.result import AsyncResult
30 import celery.loaders.base
31 import celery.schedules
32
33
34 log = logging.getLogger(__name__)
35
36
37 def get_task_id(task):
38 task_id = None
39 if isinstance(task, AsyncResult):
40 task_id = task.task_id
41
42 return task_id
43
44
45 def crontab(value):
46 return celery.schedules.crontab(**value)
47
48
49 def timedelta(value):
50 return datetime.timedelta(**value)
51
52
53 def safe_json(get, section, key):
54 value = ''
55 try:
56 value = get(key)
57 json_value = json.loads(value)
58 except ValueError:
59 msg = 'The %s=%s is not valid json in section %s' % (
60 key, value, section
61 )
62 raise ValueError(msg)
63
64 return json_value
65
66
67 def get_beat_config(parser, section):
68 SCHEDULE_TYPE_MAP = {
69 'crontab': crontab,
70 'timedelta': timedelta,
71 'integer': int
72 }
73 get = partial(parser.get, section)
74 has_option = partial(parser.has_option, section)
75
76 schedule_type = get('type')
77 schedule_value = safe_json(get, section, 'schedule')
78
79 scheduler_cls = SCHEDULE_TYPE_MAP.get(schedule_type)
80
81 if scheduler_cls is None:
82 raise ValueError(
83 'schedule type %s in section %s is invalid' % (
84 schedule_type,
85 section
86 )
87 )
88
89 schedule = scheduler_cls(schedule_value)
90
91 config = {
92 'task': get('task'),
93 'schedule': schedule,
94 }
95
96 if has_option('args'):
97 config['args'] = safe_json(get, section, 'args')
98
99 if has_option('kwargs'):
100 config['kwargs'] = safe_json(get, section, 'kwargs')
101
102 return config
103
104
105 def get_ini_config(ini_location):
106 """
107 Converts basic ini configuration into celery 4.X options
108 """
109 def key_converter(key_name):
110 pref = 'celery.'
111 if key_name.startswith(pref):
112 return key_name[len(pref):].replace('.', '_').lower()
113
114 def type_converter(parsed_key, value):
115 # cast to int
116 if value.isdigit():
117 return int(value)
118
119 # cast to bool
120 if value.lower() in ['true', 'false', 'True', 'False']:
121 return value.lower() == 'true'
122 return value
123
124 parser = configparser.SafeConfigParser(
125 defaults={'here': os.path.abspath(ini_location)})
126 parser.read(ini_location)
127
128 ini_config = {}
129 for k, v in parser.items('app:main'):
130 pref = 'celery.'
131 if k.startswith(pref):
132 ini_config[key_converter(k)] = type_converter(key_converter(k), v)
133
134 beat_config = {}
135 for section in parser.sections():
136 if section.startswith('celerybeat:'):
137 name = section.split(':', 1)[1]
138 beat_config[name] = get_beat_config(parser, section)
139
140 # final compose of settings
141 celery_settings = {}
142
143 if ini_config:
144 celery_settings.update(ini_config)
145 if beat_config:
146 celery_settings.update({'beat_schedule': beat_config})
147
148 return celery_settings
149
150
151 def parse_ini_vars(ini_vars):
152 options = {}
153 for pairs in ini_vars.split(','):
154 key, value = pairs.split('=')
155 options[key] = value
156 return options
@@ -1,725 +1,717 b''
1
1
2
2
3 ################################################################################
3 ################################################################################
4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
5 # The %(here)s variable will be replaced with the parent directory of this file#
5 # The %(here)s variable will be replaced with the parent directory of this file#
6 ################################################################################
6 ################################################################################
7
7
8 [DEFAULT]
8 [DEFAULT]
9 debug = true
9 debug = true
10
10
11 ################################################################################
11 ################################################################################
12 ## EMAIL CONFIGURATION ##
12 ## EMAIL CONFIGURATION ##
13 ## Uncomment and replace with the email address which should receive ##
13 ## Uncomment and replace with the email address which should receive ##
14 ## any error reports after an application crash ##
14 ## any error reports after an application crash ##
15 ## Additionally these settings will be used by the RhodeCode mailing system ##
15 ## Additionally these settings will be used by the RhodeCode mailing system ##
16 ################################################################################
16 ################################################################################
17
17
18 ## prefix all emails subjects with given prefix, helps filtering out emails
18 ## prefix all emails subjects with given prefix, helps filtering out emails
19 #email_prefix = [RhodeCode]
19 #email_prefix = [RhodeCode]
20
20
21 ## email FROM address all mails will be sent
21 ## email FROM address all mails will be sent
22 #app_email_from = rhodecode-noreply@localhost
22 #app_email_from = rhodecode-noreply@localhost
23
23
24 ## Uncomment and replace with the address which should receive any error report
24 ## Uncomment and replace with the address which should receive any error report
25 ## note: using appenlight for error handling doesn't need this to be uncommented
25 ## note: using appenlight for error handling doesn't need this to be uncommented
26 #email_to = admin@localhost
26 #email_to = admin@localhost
27
27
28 ## in case of Application errors, sent an error email form
28 ## in case of Application errors, sent an error email form
29 #error_email_from = rhodecode_error@localhost
29 #error_email_from = rhodecode_error@localhost
30
30
31 ## additional error message to be send in case of server crash
31 ## additional error message to be send in case of server crash
32 #error_message =
32 #error_message =
33
33
34
34
35 #smtp_server = mail.server.com
35 #smtp_server = mail.server.com
36 #smtp_username =
36 #smtp_username =
37 #smtp_password =
37 #smtp_password =
38 #smtp_port =
38 #smtp_port =
39 #smtp_use_tls = false
39 #smtp_use_tls = false
40 #smtp_use_ssl = true
40 #smtp_use_ssl = true
41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
42 #smtp_auth =
42 #smtp_auth =
43
43
44 [server:main]
44 [server:main]
45 ## COMMON ##
45 ## COMMON ##
46 host = 127.0.0.1
46 host = 127.0.0.1
47 port = 5000
47 port = 5000
48
48
49 ##################################
49 ##################################
50 ## WAITRESS WSGI SERVER ##
50 ## WAITRESS WSGI SERVER ##
51 ## Recommended for Development ##
51 ## Recommended for Development ##
52 ##################################
52 ##################################
53
53
54 use = egg:waitress#main
54 use = egg:waitress#main
55 ## number of worker threads
55 ## number of worker threads
56 threads = 5
56 threads = 5
57 ## MAX BODY SIZE 100GB
57 ## MAX BODY SIZE 100GB
58 max_request_body_size = 107374182400
58 max_request_body_size = 107374182400
59 ## Use poll instead of select, fixes file descriptors limits problems.
59 ## Use poll instead of select, fixes file descriptors limits problems.
60 ## May not work on old windows systems.
60 ## May not work on old windows systems.
61 asyncore_use_poll = true
61 asyncore_use_poll = true
62
62
63
63
64 ##########################
64 ##########################
65 ## GUNICORN WSGI SERVER ##
65 ## GUNICORN WSGI SERVER ##
66 ##########################
66 ##########################
67 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
67 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
68
68
69 #use = egg:gunicorn#main
69 #use = egg:gunicorn#main
70 ## Sets the number of process workers. You must set `instance_id = *`
70 ## Sets the number of process workers. You must set `instance_id = *`
71 ## when this option is set to more than one worker, recommended
71 ## when this option is set to more than one worker, recommended
72 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
72 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
73 ## The `instance_id = *` must be set in the [app:main] section below
73 ## The `instance_id = *` must be set in the [app:main] section below
74 #workers = 2
74 #workers = 2
75 ## number of threads for each of the worker, must be set to 1 for gevent
75 ## number of threads for each of the worker, must be set to 1 for gevent
76 ## generally recommended to be at 1
76 ## generally recommended to be at 1
77 #threads = 1
77 #threads = 1
78 ## process name
78 ## process name
79 #proc_name = rhodecode
79 #proc_name = rhodecode
80 ## type of worker class, one of sync, gevent
80 ## type of worker class, one of sync, gevent
81 ## recommended for bigger setup is using of of other than sync one
81 ## recommended for bigger setup is using of of other than sync one
82 #worker_class = sync
82 #worker_class = sync
83 ## The maximum number of simultaneous clients. Valid only for Gevent
83 ## The maximum number of simultaneous clients. Valid only for Gevent
84 #worker_connections = 10
84 #worker_connections = 10
85 ## max number of requests that worker will handle before being gracefully
85 ## max number of requests that worker will handle before being gracefully
86 ## restarted, could prevent memory leaks
86 ## restarted, could prevent memory leaks
87 #max_requests = 1000
87 #max_requests = 1000
88 #max_requests_jitter = 30
88 #max_requests_jitter = 30
89 ## amount of time a worker can spend with handling a request before it
89 ## amount of time a worker can spend with handling a request before it
90 ## gets killed and restarted. Set to 6hrs
90 ## gets killed and restarted. Set to 6hrs
91 #timeout = 21600
91 #timeout = 21600
92
92
93
93
94 ## prefix middleware for RhodeCode.
94 ## prefix middleware for RhodeCode.
95 ## recommended when using proxy setup.
95 ## recommended when using proxy setup.
96 ## allows to set RhodeCode under a prefix in server.
96 ## allows to set RhodeCode under a prefix in server.
97 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
97 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
98 ## And set your prefix like: `prefix = /custom_prefix`
98 ## And set your prefix like: `prefix = /custom_prefix`
99 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
99 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
100 ## to make your cookies only work on prefix url
100 ## to make your cookies only work on prefix url
101 [filter:proxy-prefix]
101 [filter:proxy-prefix]
102 use = egg:PasteDeploy#prefix
102 use = egg:PasteDeploy#prefix
103 prefix = /
103 prefix = /
104
104
105 [app:main]
105 [app:main]
106 use = egg:rhodecode-enterprise-ce
106 use = egg:rhodecode-enterprise-ce
107
107
108 ## enable proxy prefix middleware, defined above
108 ## enable proxy prefix middleware, defined above
109 #filter-with = proxy-prefix
109 #filter-with = proxy-prefix
110
110
111 # During development the we want to have the debug toolbar enabled
111 # During development the we want to have the debug toolbar enabled
112 pyramid.includes =
112 pyramid.includes =
113 pyramid_debugtoolbar
113 pyramid_debugtoolbar
114 rhodecode.lib.middleware.request_wrapper
114 rhodecode.lib.middleware.request_wrapper
115
115
116 pyramid.reload_templates = true
116 pyramid.reload_templates = true
117
117
118 debugtoolbar.hosts = 0.0.0.0/0
118 debugtoolbar.hosts = 0.0.0.0/0
119 debugtoolbar.exclude_prefixes =
119 debugtoolbar.exclude_prefixes =
120 /css
120 /css
121 /fonts
121 /fonts
122 /images
122 /images
123 /js
123 /js
124
124
125 ## RHODECODE PLUGINS ##
125 ## RHODECODE PLUGINS ##
126 rhodecode.includes =
126 rhodecode.includes =
127 rhodecode.api
127 rhodecode.api
128
128
129
129
130 # api prefix url
130 # api prefix url
131 rhodecode.api.url = /_admin/api
131 rhodecode.api.url = /_admin/api
132
132
133
133
134 ## END RHODECODE PLUGINS ##
134 ## END RHODECODE PLUGINS ##
135
135
136 ## encryption key used to encrypt social plugin tokens,
136 ## encryption key used to encrypt social plugin tokens,
137 ## remote_urls with credentials etc, if not set it defaults to
137 ## remote_urls with credentials etc, if not set it defaults to
138 ## `beaker.session.secret`
138 ## `beaker.session.secret`
139 #rhodecode.encrypted_values.secret =
139 #rhodecode.encrypted_values.secret =
140
140
141 ## decryption strict mode (enabled by default). It controls if decryption raises
141 ## decryption strict mode (enabled by default). It controls if decryption raises
142 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
142 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
143 #rhodecode.encrypted_values.strict = false
143 #rhodecode.encrypted_values.strict = false
144
144
145 ## return gzipped responses from Rhodecode (static files/application)
145 ## return gzipped responses from Rhodecode (static files/application)
146 gzip_responses = false
146 gzip_responses = false
147
147
148 ## autogenerate javascript routes file on startup
148 ## autogenerate javascript routes file on startup
149 generate_js_files = false
149 generate_js_files = false
150
150
151 ## Optional Languages
151 ## Optional Languages
152 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
152 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
153 lang = en
153 lang = en
154
154
155 ## perform a full repository scan on each server start, this should be
155 ## perform a full repository scan on each server start, this should be
156 ## set to false after first startup, to allow faster server restarts.
156 ## set to false after first startup, to allow faster server restarts.
157 startup.import_repos = false
157 startup.import_repos = false
158
158
159 ## Uncomment and set this path to use archive download cache.
159 ## Uncomment and set this path to use archive download cache.
160 ## Once enabled, generated archives will be cached at this location
160 ## Once enabled, generated archives will be cached at this location
161 ## and served from the cache during subsequent requests for the same archive of
161 ## and served from the cache during subsequent requests for the same archive of
162 ## the repository.
162 ## the repository.
163 #archive_cache_dir = /tmp/tarballcache
163 #archive_cache_dir = /tmp/tarballcache
164
164
165 ## URL at which the application is running. This is used for bootstraping
165 ## URL at which the application is running. This is used for bootstraping
166 ## requests in context when no web request is available. Used in ishell, or
166 ## requests in context when no web request is available. Used in ishell, or
167 ## SSH calls. Set this for events to receive proper url for SSH calls.
167 ## SSH calls. Set this for events to receive proper url for SSH calls.
168 app.base_url = http://rhodecode.local
168 app.base_url = http://rhodecode.local
169
169
170 ## change this to unique ID for security
170 ## change this to unique ID for security
171 app_instance_uuid = rc-production
171 app_instance_uuid = rc-production
172
172
173 ## cut off limit for large diffs (size in bytes). If overall diff size on
173 ## cut off limit for large diffs (size in bytes). If overall diff size on
174 ## commit, or pull request exceeds this limit this diff will be displayed
174 ## commit, or pull request exceeds this limit this diff will be displayed
175 ## partially. E.g 512000 == 512Kb
175 ## partially. E.g 512000 == 512Kb
176 cut_off_limit_diff = 512000
176 cut_off_limit_diff = 512000
177
177
178 ## cut off limit for large files inside diffs (size in bytes). Each individual
178 ## cut off limit for large files inside diffs (size in bytes). Each individual
179 ## file inside diff which exceeds this limit will be displayed partially.
179 ## file inside diff which exceeds this limit will be displayed partially.
180 ## E.g 128000 == 128Kb
180 ## E.g 128000 == 128Kb
181 cut_off_limit_file = 128000
181 cut_off_limit_file = 128000
182
182
183 ## use cache version of scm repo everywhere
183 ## use cache version of scm repo everywhere
184 vcs_full_cache = true
184 vcs_full_cache = true
185
185
186 ## force https in RhodeCode, fixes https redirects, assumes it's always https
186 ## force https in RhodeCode, fixes https redirects, assumes it's always https
187 ## Normally this is controlled by proper http flags sent from http server
187 ## Normally this is controlled by proper http flags sent from http server
188 force_https = false
188 force_https = false
189
189
190 ## use Strict-Transport-Security headers
190 ## use Strict-Transport-Security headers
191 use_htsts = false
191 use_htsts = false
192
192
193 ## number of commits stats will parse on each iteration
193 ## number of commits stats will parse on each iteration
194 commit_parse_limit = 25
194 commit_parse_limit = 25
195
195
196 ## git rev filter option, --all is the default filter, if you need to
196 ## git rev filter option, --all is the default filter, if you need to
197 ## hide all refs in changelog switch this to --branches --tags
197 ## hide all refs in changelog switch this to --branches --tags
198 git_rev_filter = --branches --tags
198 git_rev_filter = --branches --tags
199
199
200 # Set to true if your repos are exposed using the dumb protocol
200 # Set to true if your repos are exposed using the dumb protocol
201 git_update_server_info = false
201 git_update_server_info = false
202
202
203 ## RSS/ATOM feed options
203 ## RSS/ATOM feed options
204 rss_cut_off_limit = 256000
204 rss_cut_off_limit = 256000
205 rss_items_per_page = 10
205 rss_items_per_page = 10
206 rss_include_diff = false
206 rss_include_diff = false
207
207
208 ## gist URL alias, used to create nicer urls for gist. This should be an
208 ## gist URL alias, used to create nicer urls for gist. This should be an
209 ## url that does rewrites to _admin/gists/{gistid}.
209 ## url that does rewrites to _admin/gists/{gistid}.
210 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
210 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
211 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
211 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
212 gist_alias_url =
212 gist_alias_url =
213
213
214 ## List of views (using glob pattern syntax) that AUTH TOKENS could be
214 ## List of views (using glob pattern syntax) that AUTH TOKENS could be
215 ## used for access.
215 ## used for access.
216 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
216 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
217 ## came from the the logged in user who own this authentication token.
217 ## came from the the logged in user who own this authentication token.
218 ## Additionally @TOKEN syntaxt can be used to bound the view to specific
218 ## Additionally @TOKEN syntaxt can be used to bound the view to specific
219 ## authentication token. Such view would be only accessible when used together
219 ## authentication token. Such view would be only accessible when used together
220 ## with this authentication token
220 ## with this authentication token
221 ##
221 ##
222 ## list of all views can be found under `/_admin/permissions/auth_token_access`
222 ## list of all views can be found under `/_admin/permissions/auth_token_access`
223 ## The list should be "," separated and on a single line.
223 ## The list should be "," separated and on a single line.
224 ##
224 ##
225 ## Most common views to enable:
225 ## Most common views to enable:
226 # RepoCommitsView:repo_commit_download
226 # RepoCommitsView:repo_commit_download
227 # RepoCommitsView:repo_commit_patch
227 # RepoCommitsView:repo_commit_patch
228 # RepoCommitsView:repo_commit_raw
228 # RepoCommitsView:repo_commit_raw
229 # RepoCommitsView:repo_commit_raw@TOKEN
229 # RepoCommitsView:repo_commit_raw@TOKEN
230 # RepoFilesView:repo_files_diff
230 # RepoFilesView:repo_files_diff
231 # RepoFilesView:repo_archivefile
231 # RepoFilesView:repo_archivefile
232 # RepoFilesView:repo_file_raw
232 # RepoFilesView:repo_file_raw
233 # GistView:*
233 # GistView:*
234 api_access_controllers_whitelist =
234 api_access_controllers_whitelist =
235
235
236 ## default encoding used to convert from and to unicode
236 ## default encoding used to convert from and to unicode
237 ## can be also a comma separated list of encoding in case of mixed encodings
237 ## can be also a comma separated list of encoding in case of mixed encodings
238 default_encoding = UTF-8
238 default_encoding = UTF-8
239
239
240 ## instance-id prefix
240 ## instance-id prefix
241 ## a prefix key for this instance used for cache invalidation when running
241 ## a prefix key for this instance used for cache invalidation when running
242 ## multiple instances of rhodecode, make sure it's globally unique for
242 ## multiple instances of rhodecode, make sure it's globally unique for
243 ## all running rhodecode instances. Leave empty if you don't use it
243 ## all running rhodecode instances. Leave empty if you don't use it
244 instance_id =
244 instance_id =
245
245
246 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
246 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
247 ## of an authentication plugin also if it is disabled by it's settings.
247 ## of an authentication plugin also if it is disabled by it's settings.
248 ## This could be useful if you are unable to log in to the system due to broken
248 ## This could be useful if you are unable to log in to the system due to broken
249 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
249 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
250 ## module to log in again and fix the settings.
250 ## module to log in again and fix the settings.
251 ##
251 ##
252 ## Available builtin plugin IDs (hash is part of the ID):
252 ## Available builtin plugin IDs (hash is part of the ID):
253 ## egg:rhodecode-enterprise-ce#rhodecode
253 ## egg:rhodecode-enterprise-ce#rhodecode
254 ## egg:rhodecode-enterprise-ce#pam
254 ## egg:rhodecode-enterprise-ce#pam
255 ## egg:rhodecode-enterprise-ce#ldap
255 ## egg:rhodecode-enterprise-ce#ldap
256 ## egg:rhodecode-enterprise-ce#jasig_cas
256 ## egg:rhodecode-enterprise-ce#jasig_cas
257 ## egg:rhodecode-enterprise-ce#headers
257 ## egg:rhodecode-enterprise-ce#headers
258 ## egg:rhodecode-enterprise-ce#crowd
258 ## egg:rhodecode-enterprise-ce#crowd
259 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
259 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
260
260
261 ## alternative return HTTP header for failed authentication. Default HTTP
261 ## alternative return HTTP header for failed authentication. Default HTTP
262 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
262 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
263 ## handling that causing a series of failed authentication calls.
263 ## handling that causing a series of failed authentication calls.
264 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
264 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
265 ## This will be served instead of default 401 on bad authnetication
265 ## This will be served instead of default 401 on bad authnetication
266 auth_ret_code =
266 auth_ret_code =
267
267
268 ## use special detection method when serving auth_ret_code, instead of serving
268 ## use special detection method when serving auth_ret_code, instead of serving
269 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
269 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
270 ## and then serve auth_ret_code to clients
270 ## and then serve auth_ret_code to clients
271 auth_ret_code_detection = false
271 auth_ret_code_detection = false
272
272
273 ## locking return code. When repository is locked return this HTTP code. 2XX
273 ## locking return code. When repository is locked return this HTTP code. 2XX
274 ## codes don't break the transactions while 4XX codes do
274 ## codes don't break the transactions while 4XX codes do
275 lock_ret_code = 423
275 lock_ret_code = 423
276
276
277 ## allows to change the repository location in settings page
277 ## allows to change the repository location in settings page
278 allow_repo_location_change = true
278 allow_repo_location_change = true
279
279
280 ## allows to setup custom hooks in settings page
280 ## allows to setup custom hooks in settings page
281 allow_custom_hooks_settings = true
281 allow_custom_hooks_settings = true
282
282
283 ## generated license token, goto license page in RhodeCode settings to obtain
283 ## generated license token, goto license page in RhodeCode settings to obtain
284 ## new token
284 ## new token
285 license_token =
285 license_token =
286
286
287 ## supervisor connection uri, for managing supervisor and logs.
287 ## supervisor connection uri, for managing supervisor and logs.
288 supervisor.uri =
288 supervisor.uri =
289 ## supervisord group name/id we only want this RC instance to handle
289 ## supervisord group name/id we only want this RC instance to handle
290 supervisor.group_id = dev
290 supervisor.group_id = dev
291
291
292 ## Display extended labs settings
292 ## Display extended labs settings
293 labs_settings_active = true
293 labs_settings_active = true
294
294
295 ####################################
295 ####################################
296 ### CELERY CONFIG ####
296 ### CELERY CONFIG ####
297 ####################################
297 ####################################
298 use_celery = false
298 use_celery = false
299 broker.host = localhost
300 broker.vhost = rabbitmqhost
301 broker.port = 5672
302 broker.user = rabbitmq
303 broker.password = qweqwe
304
305 celery.imports = rhodecode.lib.celerylib.tasks
306
299
307 celery.result.backend = amqp
300 # connection url to the message broker (default rabbitmq)
308 celery.result.dburi = amqp://
301 celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
309 celery.result.serialier = json
310
302
311 #celery.send.task.error.emails = true
303 # maximum tasks to execute before worker restart
312 #celery.amqp.task.result.expires = 18000
304 celery.max_tasks_per_child = 100
313
314 celeryd.concurrency = 2
315 #celeryd.log.file = celeryd.log
316 celeryd.log.level = debug
317 celeryd.max.tasks.per.child = 1
318
305
319 ## tasks will never be sent to the queue, but executed locally instead.
306 ## tasks will never be sent to the queue, but executed locally instead.
320 celery.always.eager = false
307 celery.task_always_eager = false
321
308
322 ####################################
309 ####################################
323 ### BEAKER CACHE ####
310 ### BEAKER CACHE ####
324 ####################################
311 ####################################
325 # default cache dir for templates. Putting this into a ramdisk
312 # default cache dir for templates. Putting this into a ramdisk
326 ## can boost performance, eg. %(here)s/data_ramdisk
313 ## can boost performance, eg. %(here)s/data_ramdisk
327 cache_dir = %(here)s/data
314 cache_dir = %(here)s/data
328
315
329 ## locking and default file storage for Beaker. Putting this into a ramdisk
316 ## locking and default file storage for Beaker. Putting this into a ramdisk
330 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
317 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
331 beaker.cache.data_dir = %(here)s/data/cache/beaker_data
318 beaker.cache.data_dir = %(here)s/data/cache/beaker_data
332 beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock
319 beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock
333
320
334 beaker.cache.regions = super_short_term, short_term, long_term, sql_cache_short, auth_plugins, repo_cache_long
321 beaker.cache.regions = super_short_term, short_term, long_term, sql_cache_short, auth_plugins, repo_cache_long
335
322
336 beaker.cache.super_short_term.type = memory
323 beaker.cache.super_short_term.type = memory
337 beaker.cache.super_short_term.expire = 10
324 beaker.cache.super_short_term.expire = 10
338 beaker.cache.super_short_term.key_length = 256
325 beaker.cache.super_short_term.key_length = 256
339
326
340 beaker.cache.short_term.type = memory
327 beaker.cache.short_term.type = memory
341 beaker.cache.short_term.expire = 60
328 beaker.cache.short_term.expire = 60
342 beaker.cache.short_term.key_length = 256
329 beaker.cache.short_term.key_length = 256
343
330
344 beaker.cache.long_term.type = memory
331 beaker.cache.long_term.type = memory
345 beaker.cache.long_term.expire = 36000
332 beaker.cache.long_term.expire = 36000
346 beaker.cache.long_term.key_length = 256
333 beaker.cache.long_term.key_length = 256
347
334
348 beaker.cache.sql_cache_short.type = memory
335 beaker.cache.sql_cache_short.type = memory
349 beaker.cache.sql_cache_short.expire = 10
336 beaker.cache.sql_cache_short.expire = 10
350 beaker.cache.sql_cache_short.key_length = 256
337 beaker.cache.sql_cache_short.key_length = 256
351
338
352 ## default is memory cache, configure only if required
339 ## default is memory cache, configure only if required
353 ## using multi-node or multi-worker setup
340 ## using multi-node or multi-worker setup
354 #beaker.cache.auth_plugins.type = ext:database
341 #beaker.cache.auth_plugins.type = ext:database
355 #beaker.cache.auth_plugins.lock_dir = %(here)s/data/cache/auth_plugin_lock
342 #beaker.cache.auth_plugins.lock_dir = %(here)s/data/cache/auth_plugin_lock
356 #beaker.cache.auth_plugins.url = postgresql://postgres:secret@localhost/rhodecode
343 #beaker.cache.auth_plugins.url = postgresql://postgres:secret@localhost/rhodecode
357 #beaker.cache.auth_plugins.url = mysql://root:secret@127.0.0.1/rhodecode
344 #beaker.cache.auth_plugins.url = mysql://root:secret@127.0.0.1/rhodecode
358 #beaker.cache.auth_plugins.sa.pool_recycle = 3600
345 #beaker.cache.auth_plugins.sa.pool_recycle = 3600
359 #beaker.cache.auth_plugins.sa.pool_size = 10
346 #beaker.cache.auth_plugins.sa.pool_size = 10
360 #beaker.cache.auth_plugins.sa.max_overflow = 0
347 #beaker.cache.auth_plugins.sa.max_overflow = 0
361
348
362 beaker.cache.repo_cache_long.type = memorylru_base
349 beaker.cache.repo_cache_long.type = memorylru_base
363 beaker.cache.repo_cache_long.max_items = 4096
350 beaker.cache.repo_cache_long.max_items = 4096
364 beaker.cache.repo_cache_long.expire = 2592000
351 beaker.cache.repo_cache_long.expire = 2592000
365
352
366 ## default is memorylru_base cache, configure only if required
353 ## default is memorylru_base cache, configure only if required
367 ## using multi-node or multi-worker setup
354 ## using multi-node or multi-worker setup
368 #beaker.cache.repo_cache_long.type = ext:memcached
355 #beaker.cache.repo_cache_long.type = ext:memcached
369 #beaker.cache.repo_cache_long.url = localhost:11211
356 #beaker.cache.repo_cache_long.url = localhost:11211
370 #beaker.cache.repo_cache_long.expire = 1209600
357 #beaker.cache.repo_cache_long.expire = 1209600
371 #beaker.cache.repo_cache_long.key_length = 256
358 #beaker.cache.repo_cache_long.key_length = 256
372
359
373 ####################################
360 ####################################
374 ### BEAKER SESSION ####
361 ### BEAKER SESSION ####
375 ####################################
362 ####################################
376
363
377 ## .session.type is type of storage options for the session, current allowed
364 ## .session.type is type of storage options for the session, current allowed
378 ## types are file, ext:memcached, ext:database, and memory (default).
365 ## types are file, ext:memcached, ext:database, and memory (default).
379 beaker.session.type = file
366 beaker.session.type = file
380 beaker.session.data_dir = %(here)s/data/sessions/data
367 beaker.session.data_dir = %(here)s/data/sessions/data
381
368
382 ## db based session, fast, and allows easy management over logged in users
369 ## db based session, fast, and allows easy management over logged in users
383 #beaker.session.type = ext:database
370 #beaker.session.type = ext:database
384 #beaker.session.table_name = db_session
371 #beaker.session.table_name = db_session
385 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
372 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
386 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
373 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
387 #beaker.session.sa.pool_recycle = 3600
374 #beaker.session.sa.pool_recycle = 3600
388 #beaker.session.sa.echo = false
375 #beaker.session.sa.echo = false
389
376
390 beaker.session.key = rhodecode
377 beaker.session.key = rhodecode
391 beaker.session.secret = develop-rc-uytcxaz
378 beaker.session.secret = develop-rc-uytcxaz
392 beaker.session.lock_dir = %(here)s/data/sessions/lock
379 beaker.session.lock_dir = %(here)s/data/sessions/lock
393
380
394 ## Secure encrypted cookie. Requires AES and AES python libraries
381 ## Secure encrypted cookie. Requires AES and AES python libraries
395 ## you must disable beaker.session.secret to use this
382 ## you must disable beaker.session.secret to use this
396 #beaker.session.encrypt_key = key_for_encryption
383 #beaker.session.encrypt_key = key_for_encryption
397 #beaker.session.validate_key = validation_key
384 #beaker.session.validate_key = validation_key
398
385
399 ## sets session as invalid(also logging out user) if it haven not been
386 ## sets session as invalid(also logging out user) if it haven not been
400 ## accessed for given amount of time in seconds
387 ## accessed for given amount of time in seconds
401 beaker.session.timeout = 2592000
388 beaker.session.timeout = 2592000
402 beaker.session.httponly = true
389 beaker.session.httponly = true
403 ## Path to use for the cookie. Set to prefix if you use prefix middleware
390 ## Path to use for the cookie. Set to prefix if you use prefix middleware
404 #beaker.session.cookie_path = /custom_prefix
391 #beaker.session.cookie_path = /custom_prefix
405
392
406 ## uncomment for https secure cookie
393 ## uncomment for https secure cookie
407 beaker.session.secure = false
394 beaker.session.secure = false
408
395
409 ## auto save the session to not to use .save()
396 ## auto save the session to not to use .save()
410 beaker.session.auto = false
397 beaker.session.auto = false
411
398
412 ## default cookie expiration time in seconds, set to `true` to set expire
399 ## default cookie expiration time in seconds, set to `true` to set expire
413 ## at browser close
400 ## at browser close
414 #beaker.session.cookie_expires = 3600
401 #beaker.session.cookie_expires = 3600
415
402
416 ###################################
403 ###################################
417 ## SEARCH INDEXING CONFIGURATION ##
404 ## SEARCH INDEXING CONFIGURATION ##
418 ###################################
405 ###################################
419 ## Full text search indexer is available in rhodecode-tools under
406 ## Full text search indexer is available in rhodecode-tools under
420 ## `rhodecode-tools index` command
407 ## `rhodecode-tools index` command
421
408
422 ## WHOOSH Backend, doesn't require additional services to run
409 ## WHOOSH Backend, doesn't require additional services to run
423 ## it works good with few dozen repos
410 ## it works good with few dozen repos
424 search.module = rhodecode.lib.index.whoosh
411 search.module = rhodecode.lib.index.whoosh
425 search.location = %(here)s/data/index
412 search.location = %(here)s/data/index
426
413
427 ########################################
414 ########################################
428 ### CHANNELSTREAM CONFIG ####
415 ### CHANNELSTREAM CONFIG ####
429 ########################################
416 ########################################
430 ## channelstream enables persistent connections and live notification
417 ## channelstream enables persistent connections and live notification
431 ## in the system. It's also used by the chat system
418 ## in the system. It's also used by the chat system
432 channelstream.enabled = false
419 channelstream.enabled = false
433
420
434 ## server address for channelstream server on the backend
421 ## server address for channelstream server on the backend
435 channelstream.server = 127.0.0.1:9800
422 channelstream.server = 127.0.0.1:9800
436
423
437 ## location of the channelstream server from outside world
424 ## location of the channelstream server from outside world
438 ## use ws:// for http or wss:// for https. This address needs to be handled
425 ## use ws:// for http or wss:// for https. This address needs to be handled
439 ## by external HTTP server such as Nginx or Apache
426 ## by external HTTP server such as Nginx or Apache
440 ## see nginx/apache configuration examples in our docs
427 ## see nginx/apache configuration examples in our docs
441 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
428 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
442 channelstream.secret = secret
429 channelstream.secret = secret
443 channelstream.history.location = %(here)s/channelstream_history
430 channelstream.history.location = %(here)s/channelstream_history
444
431
445 ## Internal application path that Javascript uses to connect into.
432 ## Internal application path that Javascript uses to connect into.
446 ## If you use proxy-prefix the prefix should be added before /_channelstream
433 ## If you use proxy-prefix the prefix should be added before /_channelstream
447 channelstream.proxy_path = /_channelstream
434 channelstream.proxy_path = /_channelstream
448
435
449
436
450 ###################################
437 ###################################
451 ## APPENLIGHT CONFIG ##
438 ## APPENLIGHT CONFIG ##
452 ###################################
439 ###################################
453
440
454 ## Appenlight is tailored to work with RhodeCode, see
441 ## Appenlight is tailored to work with RhodeCode, see
455 ## http://appenlight.com for details how to obtain an account
442 ## http://appenlight.com for details how to obtain an account
456
443
457 ## appenlight integration enabled
444 ## appenlight integration enabled
458 appenlight = false
445 appenlight = false
459
446
460 appenlight.server_url = https://api.appenlight.com
447 appenlight.server_url = https://api.appenlight.com
461 appenlight.api_key = YOUR_API_KEY
448 appenlight.api_key = YOUR_API_KEY
462 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
449 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
463
450
464 # used for JS client
451 # used for JS client
465 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
452 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
466
453
467 ## TWEAK AMOUNT OF INFO SENT HERE
454 ## TWEAK AMOUNT OF INFO SENT HERE
468
455
469 ## enables 404 error logging (default False)
456 ## enables 404 error logging (default False)
470 appenlight.report_404 = false
457 appenlight.report_404 = false
471
458
472 ## time in seconds after request is considered being slow (default 1)
459 ## time in seconds after request is considered being slow (default 1)
473 appenlight.slow_request_time = 1
460 appenlight.slow_request_time = 1
474
461
475 ## record slow requests in application
462 ## record slow requests in application
476 ## (needs to be enabled for slow datastore recording and time tracking)
463 ## (needs to be enabled for slow datastore recording and time tracking)
477 appenlight.slow_requests = true
464 appenlight.slow_requests = true
478
465
479 ## enable hooking to application loggers
466 ## enable hooking to application loggers
480 appenlight.logging = true
467 appenlight.logging = true
481
468
482 ## minimum log level for log capture
469 ## minimum log level for log capture
483 appenlight.logging.level = WARNING
470 appenlight.logging.level = WARNING
484
471
485 ## send logs only from erroneous/slow requests
472 ## send logs only from erroneous/slow requests
486 ## (saves API quota for intensive logging)
473 ## (saves API quota for intensive logging)
487 appenlight.logging_on_error = false
474 appenlight.logging_on_error = false
488
475
489 ## list of additonal keywords that should be grabbed from environ object
476 ## list of additonal keywords that should be grabbed from environ object
490 ## can be string with comma separated list of words in lowercase
477 ## can be string with comma separated list of words in lowercase
491 ## (by default client will always send following info:
478 ## (by default client will always send following info:
492 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
479 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
493 ## start with HTTP* this list be extended with additional keywords here
480 ## start with HTTP* this list be extended with additional keywords here
494 appenlight.environ_keys_whitelist =
481 appenlight.environ_keys_whitelist =
495
482
496 ## list of keywords that should be blanked from request object
483 ## list of keywords that should be blanked from request object
497 ## can be string with comma separated list of words in lowercase
484 ## can be string with comma separated list of words in lowercase
498 ## (by default client will always blank keys that contain following words
485 ## (by default client will always blank keys that contain following words
499 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
486 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
500 ## this list be extended with additional keywords set here
487 ## this list be extended with additional keywords set here
501 appenlight.request_keys_blacklist =
488 appenlight.request_keys_blacklist =
502
489
503 ## list of namespaces that should be ignores when gathering log entries
490 ## list of namespaces that should be ignores when gathering log entries
504 ## can be string with comma separated list of namespaces
491 ## can be string with comma separated list of namespaces
505 ## (by default the client ignores own entries: appenlight_client.client)
492 ## (by default the client ignores own entries: appenlight_client.client)
506 appenlight.log_namespace_blacklist =
493 appenlight.log_namespace_blacklist =
507
494
508
495
509 ################################################################################
496 ################################################################################
510 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
497 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
511 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
498 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
512 ## execute malicious code after an exception is raised. ##
499 ## execute malicious code after an exception is raised. ##
513 ################################################################################
500 ################################################################################
514 #set debug = false
501 #set debug = false
515
502
516
503
517 ##############
504 ##############
518 ## STYLING ##
505 ## STYLING ##
519 ##############
506 ##############
520 debug_style = true
507 debug_style = true
521
508
522 ###########################################
509 ###########################################
523 ### MAIN RHODECODE DATABASE CONFIG ###
510 ### MAIN RHODECODE DATABASE CONFIG ###
524 ###########################################
511 ###########################################
525 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
512 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
526 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
513 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
527 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode
514 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode
528 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
515 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
529
516
530 # see sqlalchemy docs for other advanced settings
517 # see sqlalchemy docs for other advanced settings
531
518
532 ## print the sql statements to output
519 ## print the sql statements to output
533 sqlalchemy.db1.echo = false
520 sqlalchemy.db1.echo = false
534 ## recycle the connections after this amount of seconds
521 ## recycle the connections after this amount of seconds
535 sqlalchemy.db1.pool_recycle = 3600
522 sqlalchemy.db1.pool_recycle = 3600
536 sqlalchemy.db1.convert_unicode = true
523 sqlalchemy.db1.convert_unicode = true
537
524
538 ## the number of connections to keep open inside the connection pool.
525 ## the number of connections to keep open inside the connection pool.
539 ## 0 indicates no limit
526 ## 0 indicates no limit
540 #sqlalchemy.db1.pool_size = 5
527 #sqlalchemy.db1.pool_size = 5
541
528
542 ## the number of connections to allow in connection pool "overflow", that is
529 ## the number of connections to allow in connection pool "overflow", that is
543 ## connections that can be opened above and beyond the pool_size setting,
530 ## connections that can be opened above and beyond the pool_size setting,
544 ## which defaults to five.
531 ## which defaults to five.
545 #sqlalchemy.db1.max_overflow = 10
532 #sqlalchemy.db1.max_overflow = 10
546
533
547
534
548 ##################
535 ##################
549 ### VCS CONFIG ###
536 ### VCS CONFIG ###
550 ##################
537 ##################
551 vcs.server.enable = true
538 vcs.server.enable = true
552 vcs.server = localhost:9900
539 vcs.server = localhost:9900
553
540
554 ## Web server connectivity protocol, responsible for web based VCS operatations
541 ## Web server connectivity protocol, responsible for web based VCS operatations
555 ## Available protocols are:
542 ## Available protocols are:
556 ## `http` - use http-rpc backend (default)
543 ## `http` - use http-rpc backend (default)
557 vcs.server.protocol = http
544 vcs.server.protocol = http
558
545
559 ## Push/Pull operations protocol, available options are:
546 ## Push/Pull operations protocol, available options are:
560 ## `http` - use http-rpc backend (default)
547 ## `http` - use http-rpc backend (default)
561 ##
548 ##
562 vcs.scm_app_implementation = http
549 vcs.scm_app_implementation = http
563
550
564 ## Push/Pull operations hooks protocol, available options are:
551 ## Push/Pull operations hooks protocol, available options are:
565 ## `http` - use http-rpc backend (default)
552 ## `http` - use http-rpc backend (default)
566 vcs.hooks.protocol = http
553 vcs.hooks.protocol = http
567
554
568 vcs.server.log_level = debug
555 vcs.server.log_level = debug
569 ## Start VCSServer with this instance as a subprocess, usefull for development
556 ## Start VCSServer with this instance as a subprocess, usefull for development
570 vcs.start_server = true
557 vcs.start_server = true
571
558
572 ## List of enabled VCS backends, available options are:
559 ## List of enabled VCS backends, available options are:
573 ## `hg` - mercurial
560 ## `hg` - mercurial
574 ## `git` - git
561 ## `git` - git
575 ## `svn` - subversion
562 ## `svn` - subversion
576 vcs.backends = hg, git, svn
563 vcs.backends = hg, git, svn
577
564
578 vcs.connection_timeout = 3600
565 vcs.connection_timeout = 3600
579 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
566 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
580 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
567 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
581 #vcs.svn.compatible_version = pre-1.8-compatible
568 #vcs.svn.compatible_version = pre-1.8-compatible
582
569
583
570
584 ############################################################
571 ############################################################
585 ### Subversion proxy support (mod_dav_svn) ###
572 ### Subversion proxy support (mod_dav_svn) ###
586 ### Maps RhodeCode repo groups into SVN paths for Apache ###
573 ### Maps RhodeCode repo groups into SVN paths for Apache ###
587 ############################################################
574 ############################################################
588 ## Enable or disable the config file generation.
575 ## Enable or disable the config file generation.
589 svn.proxy.generate_config = false
576 svn.proxy.generate_config = false
590 ## Generate config file with `SVNListParentPath` set to `On`.
577 ## Generate config file with `SVNListParentPath` set to `On`.
591 svn.proxy.list_parent_path = true
578 svn.proxy.list_parent_path = true
592 ## Set location and file name of generated config file.
579 ## Set location and file name of generated config file.
593 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
580 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
594 ## alternative mod_dav config template. This needs to be a mako template
581 ## alternative mod_dav config template. This needs to be a mako template
595 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
582 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
596 ## Used as a prefix to the `Location` block in the generated config file.
583 ## Used as a prefix to the `Location` block in the generated config file.
597 ## In most cases it should be set to `/`.
584 ## In most cases it should be set to `/`.
598 svn.proxy.location_root = /
585 svn.proxy.location_root = /
599 ## Command to reload the mod dav svn configuration on change.
586 ## Command to reload the mod dav svn configuration on change.
600 ## Example: `/etc/init.d/apache2 reload`
587 ## Example: `/etc/init.d/apache2 reload`
601 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
588 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
602 ## If the timeout expires before the reload command finishes, the command will
589 ## If the timeout expires before the reload command finishes, the command will
603 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
590 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
604 #svn.proxy.reload_timeout = 10
591 #svn.proxy.reload_timeout = 10
605
592
606 ############################################################
593 ############################################################
607 ### SSH Support Settings ###
594 ### SSH Support Settings ###
608 ############################################################
595 ############################################################
609
596
610 ## Defines if a custom authorized_keys file should be created and written on
597 ## Defines if a custom authorized_keys file should be created and written on
611 ## any change user ssh keys. Setting this to false also disables posibility
598 ## any change user ssh keys. Setting this to false also disables posibility
612 ## of adding SSH keys by users from web interface. Super admins can still
599 ## of adding SSH keys by users from web interface. Super admins can still
613 ## manage SSH Keys.
600 ## manage SSH Keys.
614 ssh.generate_authorized_keyfile = false
601 ssh.generate_authorized_keyfile = false
615
602
616 ## Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
603 ## Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
617 # ssh.authorized_keys_ssh_opts =
604 # ssh.authorized_keys_ssh_opts =
618
605
619 ## Path to the authrozied_keys file where the generate entries are placed.
606 ## Path to the authrozied_keys file where the generate entries are placed.
620 ## It is possible to have multiple key files specified in `sshd_config` e.g.
607 ## It is possible to have multiple key files specified in `sshd_config` e.g.
621 ## AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
608 ## AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
622 ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode
609 ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode
623
610
624 ## Command to execute the SSH wrapper. The binary is available in the
611 ## Command to execute the SSH wrapper. The binary is available in the
625 ## rhodecode installation directory.
612 ## rhodecode installation directory.
626 ## e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
613 ## e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
627 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
614 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
628
615
629 ## Allow shell when executing the ssh-wrapper command
616 ## Allow shell when executing the ssh-wrapper command
630 ssh.wrapper_cmd_allow_shell = false
617 ssh.wrapper_cmd_allow_shell = false
631
618
632 ## Enables logging, and detailed output send back to the client during SSH
619 ## Enables logging, and detailed output send back to the client during SSH
633 ## operations. Usefull for debugging, shouldn't be used in production.
620 ## operations. Usefull for debugging, shouldn't be used in production.
634 ssh.enable_debug_logging = true
621 ssh.enable_debug_logging = true
635
622
636 ## Paths to binary executable, by default they are the names, but we can
623 ## Paths to binary executable, by default they are the names, but we can
637 ## override them if we want to use a custom one
624 ## override them if we want to use a custom one
638 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
625 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
639 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
626 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
640 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
627 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
641
628
642
629
643 ## Dummy marker to add new entries after.
630 ## Dummy marker to add new entries after.
644 ## Add any custom entries below. Please don't remove.
631 ## Add any custom entries below. Please don't remove.
645 custom.conf = 1
632 custom.conf = 1
646
633
647
634
648 ################################
635 ################################
649 ### LOGGING CONFIGURATION ####
636 ### LOGGING CONFIGURATION ####
650 ################################
637 ################################
651 [loggers]
638 [loggers]
652 keys = root, sqlalchemy, beaker, rhodecode, ssh_wrapper
639 keys = root, sqlalchemy, beaker, rhodecode, ssh_wrapper, celery
653
640
654 [handlers]
641 [handlers]
655 keys = console, console_sql
642 keys = console, console_sql
656
643
657 [formatters]
644 [formatters]
658 keys = generic, color_formatter, color_formatter_sql
645 keys = generic, color_formatter, color_formatter_sql
659
646
660 #############
647 #############
661 ## LOGGERS ##
648 ## LOGGERS ##
662 #############
649 #############
663 [logger_root]
650 [logger_root]
664 level = NOTSET
651 level = NOTSET
665 handlers = console
652 handlers = console
666
653
667 [logger_sqlalchemy]
654 [logger_sqlalchemy]
668 level = INFO
655 level = INFO
669 handlers = console_sql
656 handlers = console_sql
670 qualname = sqlalchemy.engine
657 qualname = sqlalchemy.engine
671 propagate = 0
658 propagate = 0
672
659
673 [logger_beaker]
660 [logger_beaker]
674 level = DEBUG
661 level = DEBUG
675 handlers =
662 handlers =
676 qualname = beaker.container
663 qualname = beaker.container
677 propagate = 1
664 propagate = 1
678
665
679 [logger_rhodecode]
666 [logger_rhodecode]
680 level = DEBUG
667 level = DEBUG
681 handlers =
668 handlers =
682 qualname = rhodecode
669 qualname = rhodecode
683 propagate = 1
670 propagate = 1
684
671
685 [logger_ssh_wrapper]
672 [logger_ssh_wrapper]
686 level = DEBUG
673 level = DEBUG
687 handlers =
674 handlers =
688 qualname = ssh_wrapper
675 qualname = ssh_wrapper
689 propagate = 1
676 propagate = 1
690
677
678 [logger_celery]
679 level = DEBUG
680 handlers =
681 qualname = celery
682
691
683
692 ##############
684 ##############
693 ## HANDLERS ##
685 ## HANDLERS ##
694 ##############
686 ##############
695
687
696 [handler_console]
688 [handler_console]
697 class = StreamHandler
689 class = StreamHandler
698 args = (sys.stderr, )
690 args = (sys.stderr, )
699 level = DEBUG
691 level = DEBUG
700 formatter = color_formatter
692 formatter = color_formatter
701
693
702 [handler_console_sql]
694 [handler_console_sql]
703 class = StreamHandler
695 class = StreamHandler
704 args = (sys.stderr, )
696 args = (sys.stderr, )
705 level = DEBUG
697 level = DEBUG
706 formatter = color_formatter_sql
698 formatter = color_formatter_sql
707
699
708 ################
700 ################
709 ## FORMATTERS ##
701 ## FORMATTERS ##
710 ################
702 ################
711
703
712 [formatter_generic]
704 [formatter_generic]
713 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
705 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
714 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
706 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
715 datefmt = %Y-%m-%d %H:%M:%S
707 datefmt = %Y-%m-%d %H:%M:%S
716
708
717 [formatter_color_formatter]
709 [formatter_color_formatter]
718 class = rhodecode.lib.logging_formatter.ColorFormatter
710 class = rhodecode.lib.logging_formatter.ColorFormatter
719 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
711 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
720 datefmt = %Y-%m-%d %H:%M:%S
712 datefmt = %Y-%m-%d %H:%M:%S
721
713
722 [formatter_color_formatter_sql]
714 [formatter_color_formatter_sql]
723 class = rhodecode.lib.logging_formatter.ColorFormatterSql
715 class = rhodecode.lib.logging_formatter.ColorFormatterSql
724 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
716 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
725 datefmt = %Y-%m-%d %H:%M:%S
717 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,695 +1,687 b''
1
1
2
2
3 ################################################################################
3 ################################################################################
4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
5 # The %(here)s variable will be replaced with the parent directory of this file#
5 # The %(here)s variable will be replaced with the parent directory of this file#
6 ################################################################################
6 ################################################################################
7
7
8 [DEFAULT]
8 [DEFAULT]
9 debug = true
9 debug = true
10
10
11 ################################################################################
11 ################################################################################
12 ## EMAIL CONFIGURATION ##
12 ## EMAIL CONFIGURATION ##
13 ## Uncomment and replace with the email address which should receive ##
13 ## Uncomment and replace with the email address which should receive ##
14 ## any error reports after an application crash ##
14 ## any error reports after an application crash ##
15 ## Additionally these settings will be used by the RhodeCode mailing system ##
15 ## Additionally these settings will be used by the RhodeCode mailing system ##
16 ################################################################################
16 ################################################################################
17
17
18 ## prefix all emails subjects with given prefix, helps filtering out emails
18 ## prefix all emails subjects with given prefix, helps filtering out emails
19 #email_prefix = [RhodeCode]
19 #email_prefix = [RhodeCode]
20
20
21 ## email FROM address all mails will be sent
21 ## email FROM address all mails will be sent
22 #app_email_from = rhodecode-noreply@localhost
22 #app_email_from = rhodecode-noreply@localhost
23
23
24 ## Uncomment and replace with the address which should receive any error report
24 ## Uncomment and replace with the address which should receive any error report
25 ## note: using appenlight for error handling doesn't need this to be uncommented
25 ## note: using appenlight for error handling doesn't need this to be uncommented
26 #email_to = admin@localhost
26 #email_to = admin@localhost
27
27
28 ## in case of Application errors, sent an error email form
28 ## in case of Application errors, sent an error email form
29 #error_email_from = rhodecode_error@localhost
29 #error_email_from = rhodecode_error@localhost
30
30
31 ## additional error message to be send in case of server crash
31 ## additional error message to be send in case of server crash
32 #error_message =
32 #error_message =
33
33
34
34
35 #smtp_server = mail.server.com
35 #smtp_server = mail.server.com
36 #smtp_username =
36 #smtp_username =
37 #smtp_password =
37 #smtp_password =
38 #smtp_port =
38 #smtp_port =
39 #smtp_use_tls = false
39 #smtp_use_tls = false
40 #smtp_use_ssl = true
40 #smtp_use_ssl = true
41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
42 #smtp_auth =
42 #smtp_auth =
43
43
44 [server:main]
44 [server:main]
45 ## COMMON ##
45 ## COMMON ##
46 host = 127.0.0.1
46 host = 127.0.0.1
47 port = 5000
47 port = 5000
48
48
49 ##################################
49 ##################################
50 ## WAITRESS WSGI SERVER ##
50 ## WAITRESS WSGI SERVER ##
51 ## Recommended for Development ##
51 ## Recommended for Development ##
52 ##################################
52 ##################################
53
53
54 #use = egg:waitress#main
54 #use = egg:waitress#main
55 ## number of worker threads
55 ## number of worker threads
56 #threads = 5
56 #threads = 5
57 ## MAX BODY SIZE 100GB
57 ## MAX BODY SIZE 100GB
58 #max_request_body_size = 107374182400
58 #max_request_body_size = 107374182400
59 ## Use poll instead of select, fixes file descriptors limits problems.
59 ## Use poll instead of select, fixes file descriptors limits problems.
60 ## May not work on old windows systems.
60 ## May not work on old windows systems.
61 #asyncore_use_poll = true
61 #asyncore_use_poll = true
62
62
63
63
64 ##########################
64 ##########################
65 ## GUNICORN WSGI SERVER ##
65 ## GUNICORN WSGI SERVER ##
66 ##########################
66 ##########################
67 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
67 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
68
68
69 use = egg:gunicorn#main
69 use = egg:gunicorn#main
70 ## Sets the number of process workers. You must set `instance_id = *`
70 ## Sets the number of process workers. You must set `instance_id = *`
71 ## when this option is set to more than one worker, recommended
71 ## when this option is set to more than one worker, recommended
72 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
72 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
73 ## The `instance_id = *` must be set in the [app:main] section below
73 ## The `instance_id = *` must be set in the [app:main] section below
74 workers = 2
74 workers = 2
75 ## number of threads for each of the worker, must be set to 1 for gevent
75 ## number of threads for each of the worker, must be set to 1 for gevent
76 ## generally recommended to be at 1
76 ## generally recommended to be at 1
77 #threads = 1
77 #threads = 1
78 ## process name
78 ## process name
79 proc_name = rhodecode
79 proc_name = rhodecode
80 ## type of worker class, one of sync, gevent
80 ## type of worker class, one of sync, gevent
81 ## recommended for bigger setup is using of of other than sync one
81 ## recommended for bigger setup is using of of other than sync one
82 worker_class = sync
82 worker_class = sync
83 ## The maximum number of simultaneous clients. Valid only for Gevent
83 ## The maximum number of simultaneous clients. Valid only for Gevent
84 #worker_connections = 10
84 #worker_connections = 10
85 ## max number of requests that worker will handle before being gracefully
85 ## max number of requests that worker will handle before being gracefully
86 ## restarted, could prevent memory leaks
86 ## restarted, could prevent memory leaks
87 max_requests = 1000
87 max_requests = 1000
88 max_requests_jitter = 30
88 max_requests_jitter = 30
89 ## amount of time a worker can spend with handling a request before it
89 ## amount of time a worker can spend with handling a request before it
90 ## gets killed and restarted. Set to 6hrs
90 ## gets killed and restarted. Set to 6hrs
91 timeout = 21600
91 timeout = 21600
92
92
93
93
94 ## prefix middleware for RhodeCode.
94 ## prefix middleware for RhodeCode.
95 ## recommended when using proxy setup.
95 ## recommended when using proxy setup.
96 ## allows to set RhodeCode under a prefix in server.
96 ## allows to set RhodeCode under a prefix in server.
97 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
97 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
98 ## And set your prefix like: `prefix = /custom_prefix`
98 ## And set your prefix like: `prefix = /custom_prefix`
99 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
99 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
100 ## to make your cookies only work on prefix url
100 ## to make your cookies only work on prefix url
101 [filter:proxy-prefix]
101 [filter:proxy-prefix]
102 use = egg:PasteDeploy#prefix
102 use = egg:PasteDeploy#prefix
103 prefix = /
103 prefix = /
104
104
105 [app:main]
105 [app:main]
106 use = egg:rhodecode-enterprise-ce
106 use = egg:rhodecode-enterprise-ce
107
107
108 ## enable proxy prefix middleware, defined above
108 ## enable proxy prefix middleware, defined above
109 #filter-with = proxy-prefix
109 #filter-with = proxy-prefix
110
110
111 ## encryption key used to encrypt social plugin tokens,
111 ## encryption key used to encrypt social plugin tokens,
112 ## remote_urls with credentials etc, if not set it defaults to
112 ## remote_urls with credentials etc, if not set it defaults to
113 ## `beaker.session.secret`
113 ## `beaker.session.secret`
114 #rhodecode.encrypted_values.secret =
114 #rhodecode.encrypted_values.secret =
115
115
116 ## decryption strict mode (enabled by default). It controls if decryption raises
116 ## decryption strict mode (enabled by default). It controls if decryption raises
117 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
117 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
118 #rhodecode.encrypted_values.strict = false
118 #rhodecode.encrypted_values.strict = false
119
119
120 ## return gzipped responses from Rhodecode (static files/application)
120 ## return gzipped responses from Rhodecode (static files/application)
121 gzip_responses = false
121 gzip_responses = false
122
122
123 ## autogenerate javascript routes file on startup
123 ## autogenerate javascript routes file on startup
124 generate_js_files = false
124 generate_js_files = false
125
125
126 ## Optional Languages
126 ## Optional Languages
127 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
127 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
128 lang = en
128 lang = en
129
129
130 ## perform a full repository scan on each server start, this should be
130 ## perform a full repository scan on each server start, this should be
131 ## set to false after first startup, to allow faster server restarts.
131 ## set to false after first startup, to allow faster server restarts.
132 startup.import_repos = false
132 startup.import_repos = false
133
133
134 ## Uncomment and set this path to use archive download cache.
134 ## Uncomment and set this path to use archive download cache.
135 ## Once enabled, generated archives will be cached at this location
135 ## Once enabled, generated archives will be cached at this location
136 ## and served from the cache during subsequent requests for the same archive of
136 ## and served from the cache during subsequent requests for the same archive of
137 ## the repository.
137 ## the repository.
138 #archive_cache_dir = /tmp/tarballcache
138 #archive_cache_dir = /tmp/tarballcache
139
139
140 ## URL at which the application is running. This is used for bootstraping
140 ## URL at which the application is running. This is used for bootstraping
141 ## requests in context when no web request is available. Used in ishell, or
141 ## requests in context when no web request is available. Used in ishell, or
142 ## SSH calls. Set this for events to receive proper url for SSH calls.
142 ## SSH calls. Set this for events to receive proper url for SSH calls.
143 app.base_url = http://rhodecode.local
143 app.base_url = http://rhodecode.local
144
144
145 ## change this to unique ID for security
145 ## change this to unique ID for security
146 app_instance_uuid = rc-production
146 app_instance_uuid = rc-production
147
147
148 ## cut off limit for large diffs (size in bytes). If overall diff size on
148 ## cut off limit for large diffs (size in bytes). If overall diff size on
149 ## commit, or pull request exceeds this limit this diff will be displayed
149 ## commit, or pull request exceeds this limit this diff will be displayed
150 ## partially. E.g 512000 == 512Kb
150 ## partially. E.g 512000 == 512Kb
151 cut_off_limit_diff = 512000
151 cut_off_limit_diff = 512000
152
152
153 ## cut off limit for large files inside diffs (size in bytes). Each individual
153 ## cut off limit for large files inside diffs (size in bytes). Each individual
154 ## file inside diff which exceeds this limit will be displayed partially.
154 ## file inside diff which exceeds this limit will be displayed partially.
155 ## E.g 128000 == 128Kb
155 ## E.g 128000 == 128Kb
156 cut_off_limit_file = 128000
156 cut_off_limit_file = 128000
157
157
158 ## use cache version of scm repo everywhere
158 ## use cache version of scm repo everywhere
159 vcs_full_cache = true
159 vcs_full_cache = true
160
160
161 ## force https in RhodeCode, fixes https redirects, assumes it's always https
161 ## force https in RhodeCode, fixes https redirects, assumes it's always https
162 ## Normally this is controlled by proper http flags sent from http server
162 ## Normally this is controlled by proper http flags sent from http server
163 force_https = false
163 force_https = false
164
164
165 ## use Strict-Transport-Security headers
165 ## use Strict-Transport-Security headers
166 use_htsts = false
166 use_htsts = false
167
167
168 ## number of commits stats will parse on each iteration
168 ## number of commits stats will parse on each iteration
169 commit_parse_limit = 25
169 commit_parse_limit = 25
170
170
171 ## git rev filter option, --all is the default filter, if you need to
171 ## git rev filter option, --all is the default filter, if you need to
172 ## hide all refs in changelog switch this to --branches --tags
172 ## hide all refs in changelog switch this to --branches --tags
173 git_rev_filter = --branches --tags
173 git_rev_filter = --branches --tags
174
174
175 # Set to true if your repos are exposed using the dumb protocol
175 # Set to true if your repos are exposed using the dumb protocol
176 git_update_server_info = false
176 git_update_server_info = false
177
177
178 ## RSS/ATOM feed options
178 ## RSS/ATOM feed options
179 rss_cut_off_limit = 256000
179 rss_cut_off_limit = 256000
180 rss_items_per_page = 10
180 rss_items_per_page = 10
181 rss_include_diff = false
181 rss_include_diff = false
182
182
183 ## gist URL alias, used to create nicer urls for gist. This should be an
183 ## gist URL alias, used to create nicer urls for gist. This should be an
184 ## url that does rewrites to _admin/gists/{gistid}.
184 ## url that does rewrites to _admin/gists/{gistid}.
185 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
185 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
186 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
186 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
187 gist_alias_url =
187 gist_alias_url =
188
188
189 ## List of views (using glob pattern syntax) that AUTH TOKENS could be
189 ## List of views (using glob pattern syntax) that AUTH TOKENS could be
190 ## used for access.
190 ## used for access.
191 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
191 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
192 ## came from the the logged in user who own this authentication token.
192 ## came from the the logged in user who own this authentication token.
193 ## Additionally @TOKEN syntaxt can be used to bound the view to specific
193 ## Additionally @TOKEN syntaxt can be used to bound the view to specific
194 ## authentication token. Such view would be only accessible when used together
194 ## authentication token. Such view would be only accessible when used together
195 ## with this authentication token
195 ## with this authentication token
196 ##
196 ##
197 ## list of all views can be found under `/_admin/permissions/auth_token_access`
197 ## list of all views can be found under `/_admin/permissions/auth_token_access`
198 ## The list should be "," separated and on a single line.
198 ## The list should be "," separated and on a single line.
199 ##
199 ##
200 ## Most common views to enable:
200 ## Most common views to enable:
201 # RepoCommitsView:repo_commit_download
201 # RepoCommitsView:repo_commit_download
202 # RepoCommitsView:repo_commit_patch
202 # RepoCommitsView:repo_commit_patch
203 # RepoCommitsView:repo_commit_raw
203 # RepoCommitsView:repo_commit_raw
204 # RepoCommitsView:repo_commit_raw@TOKEN
204 # RepoCommitsView:repo_commit_raw@TOKEN
205 # RepoFilesView:repo_files_diff
205 # RepoFilesView:repo_files_diff
206 # RepoFilesView:repo_archivefile
206 # RepoFilesView:repo_archivefile
207 # RepoFilesView:repo_file_raw
207 # RepoFilesView:repo_file_raw
208 # GistView:*
208 # GistView:*
209 api_access_controllers_whitelist =
209 api_access_controllers_whitelist =
210
210
211 ## default encoding used to convert from and to unicode
211 ## default encoding used to convert from and to unicode
212 ## can be also a comma separated list of encoding in case of mixed encodings
212 ## can be also a comma separated list of encoding in case of mixed encodings
213 default_encoding = UTF-8
213 default_encoding = UTF-8
214
214
215 ## instance-id prefix
215 ## instance-id prefix
216 ## a prefix key for this instance used for cache invalidation when running
216 ## a prefix key for this instance used for cache invalidation when running
217 ## multiple instances of rhodecode, make sure it's globally unique for
217 ## multiple instances of rhodecode, make sure it's globally unique for
218 ## all running rhodecode instances. Leave empty if you don't use it
218 ## all running rhodecode instances. Leave empty if you don't use it
219 instance_id =
219 instance_id =
220
220
221 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
221 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
222 ## of an authentication plugin also if it is disabled by it's settings.
222 ## of an authentication plugin also if it is disabled by it's settings.
223 ## This could be useful if you are unable to log in to the system due to broken
223 ## This could be useful if you are unable to log in to the system due to broken
224 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
224 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
225 ## module to log in again and fix the settings.
225 ## module to log in again and fix the settings.
226 ##
226 ##
227 ## Available builtin plugin IDs (hash is part of the ID):
227 ## Available builtin plugin IDs (hash is part of the ID):
228 ## egg:rhodecode-enterprise-ce#rhodecode
228 ## egg:rhodecode-enterprise-ce#rhodecode
229 ## egg:rhodecode-enterprise-ce#pam
229 ## egg:rhodecode-enterprise-ce#pam
230 ## egg:rhodecode-enterprise-ce#ldap
230 ## egg:rhodecode-enterprise-ce#ldap
231 ## egg:rhodecode-enterprise-ce#jasig_cas
231 ## egg:rhodecode-enterprise-ce#jasig_cas
232 ## egg:rhodecode-enterprise-ce#headers
232 ## egg:rhodecode-enterprise-ce#headers
233 ## egg:rhodecode-enterprise-ce#crowd
233 ## egg:rhodecode-enterprise-ce#crowd
234 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
234 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
235
235
236 ## alternative return HTTP header for failed authentication. Default HTTP
236 ## alternative return HTTP header for failed authentication. Default HTTP
237 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
237 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
238 ## handling that causing a series of failed authentication calls.
238 ## handling that causing a series of failed authentication calls.
239 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
239 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
240 ## This will be served instead of default 401 on bad authnetication
240 ## This will be served instead of default 401 on bad authnetication
241 auth_ret_code =
241 auth_ret_code =
242
242
243 ## use special detection method when serving auth_ret_code, instead of serving
243 ## use special detection method when serving auth_ret_code, instead of serving
244 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
244 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
245 ## and then serve auth_ret_code to clients
245 ## and then serve auth_ret_code to clients
246 auth_ret_code_detection = false
246 auth_ret_code_detection = false
247
247
248 ## locking return code. When repository is locked return this HTTP code. 2XX
248 ## locking return code. When repository is locked return this HTTP code. 2XX
249 ## codes don't break the transactions while 4XX codes do
249 ## codes don't break the transactions while 4XX codes do
250 lock_ret_code = 423
250 lock_ret_code = 423
251
251
252 ## allows to change the repository location in settings page
252 ## allows to change the repository location in settings page
253 allow_repo_location_change = true
253 allow_repo_location_change = true
254
254
255 ## allows to setup custom hooks in settings page
255 ## allows to setup custom hooks in settings page
256 allow_custom_hooks_settings = true
256 allow_custom_hooks_settings = true
257
257
258 ## generated license token, goto license page in RhodeCode settings to obtain
258 ## generated license token, goto license page in RhodeCode settings to obtain
259 ## new token
259 ## new token
260 license_token =
260 license_token =
261
261
262 ## supervisor connection uri, for managing supervisor and logs.
262 ## supervisor connection uri, for managing supervisor and logs.
263 supervisor.uri =
263 supervisor.uri =
264 ## supervisord group name/id we only want this RC instance to handle
264 ## supervisord group name/id we only want this RC instance to handle
265 supervisor.group_id = prod
265 supervisor.group_id = prod
266
266
267 ## Display extended labs settings
267 ## Display extended labs settings
268 labs_settings_active = true
268 labs_settings_active = true
269
269
270 ####################################
270 ####################################
271 ### CELERY CONFIG ####
271 ### CELERY CONFIG ####
272 ####################################
272 ####################################
273 use_celery = false
273 use_celery = false
274 broker.host = localhost
275 broker.vhost = rabbitmqhost
276 broker.port = 5672
277 broker.user = rabbitmq
278 broker.password = qweqwe
279
280 celery.imports = rhodecode.lib.celerylib.tasks
281
274
282 celery.result.backend = amqp
275 # connection url to the message broker (default rabbitmq)
283 celery.result.dburi = amqp://
276 celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
284 celery.result.serialier = json
285
277
286 #celery.send.task.error.emails = true
278 # maximum tasks to execute before worker restart
287 #celery.amqp.task.result.expires = 18000
279 celery.max_tasks_per_child = 100
288
289 celeryd.concurrency = 2
290 #celeryd.log.file = celeryd.log
291 celeryd.log.level = debug
292 celeryd.max.tasks.per.child = 1
293
280
294 ## tasks will never be sent to the queue, but executed locally instead.
281 ## tasks will never be sent to the queue, but executed locally instead.
295 celery.always.eager = false
282 celery.task_always_eager = false
296
283
297 ####################################
284 ####################################
298 ### BEAKER CACHE ####
285 ### BEAKER CACHE ####
299 ####################################
286 ####################################
300 # default cache dir for templates. Putting this into a ramdisk
287 # default cache dir for templates. Putting this into a ramdisk
301 ## can boost performance, eg. %(here)s/data_ramdisk
288 ## can boost performance, eg. %(here)s/data_ramdisk
302 cache_dir = %(here)s/data
289 cache_dir = %(here)s/data
303
290
304 ## locking and default file storage for Beaker. Putting this into a ramdisk
291 ## locking and default file storage for Beaker. Putting this into a ramdisk
305 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
292 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
306 beaker.cache.data_dir = %(here)s/data/cache/beaker_data
293 beaker.cache.data_dir = %(here)s/data/cache/beaker_data
307 beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock
294 beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock
308
295
309 beaker.cache.regions = super_short_term, short_term, long_term, sql_cache_short, auth_plugins, repo_cache_long
296 beaker.cache.regions = super_short_term, short_term, long_term, sql_cache_short, auth_plugins, repo_cache_long
310
297
311 beaker.cache.super_short_term.type = memory
298 beaker.cache.super_short_term.type = memory
312 beaker.cache.super_short_term.expire = 10
299 beaker.cache.super_short_term.expire = 10
313 beaker.cache.super_short_term.key_length = 256
300 beaker.cache.super_short_term.key_length = 256
314
301
315 beaker.cache.short_term.type = memory
302 beaker.cache.short_term.type = memory
316 beaker.cache.short_term.expire = 60
303 beaker.cache.short_term.expire = 60
317 beaker.cache.short_term.key_length = 256
304 beaker.cache.short_term.key_length = 256
318
305
319 beaker.cache.long_term.type = memory
306 beaker.cache.long_term.type = memory
320 beaker.cache.long_term.expire = 36000
307 beaker.cache.long_term.expire = 36000
321 beaker.cache.long_term.key_length = 256
308 beaker.cache.long_term.key_length = 256
322
309
323 beaker.cache.sql_cache_short.type = memory
310 beaker.cache.sql_cache_short.type = memory
324 beaker.cache.sql_cache_short.expire = 10
311 beaker.cache.sql_cache_short.expire = 10
325 beaker.cache.sql_cache_short.key_length = 256
312 beaker.cache.sql_cache_short.key_length = 256
326
313
327 ## default is memory cache, configure only if required
314 ## default is memory cache, configure only if required
328 ## using multi-node or multi-worker setup
315 ## using multi-node or multi-worker setup
329 #beaker.cache.auth_plugins.type = ext:database
316 #beaker.cache.auth_plugins.type = ext:database
330 #beaker.cache.auth_plugins.lock_dir = %(here)s/data/cache/auth_plugin_lock
317 #beaker.cache.auth_plugins.lock_dir = %(here)s/data/cache/auth_plugin_lock
331 #beaker.cache.auth_plugins.url = postgresql://postgres:secret@localhost/rhodecode
318 #beaker.cache.auth_plugins.url = postgresql://postgres:secret@localhost/rhodecode
332 #beaker.cache.auth_plugins.url = mysql://root:secret@127.0.0.1/rhodecode
319 #beaker.cache.auth_plugins.url = mysql://root:secret@127.0.0.1/rhodecode
333 #beaker.cache.auth_plugins.sa.pool_recycle = 3600
320 #beaker.cache.auth_plugins.sa.pool_recycle = 3600
334 #beaker.cache.auth_plugins.sa.pool_size = 10
321 #beaker.cache.auth_plugins.sa.pool_size = 10
335 #beaker.cache.auth_plugins.sa.max_overflow = 0
322 #beaker.cache.auth_plugins.sa.max_overflow = 0
336
323
337 beaker.cache.repo_cache_long.type = memorylru_base
324 beaker.cache.repo_cache_long.type = memorylru_base
338 beaker.cache.repo_cache_long.max_items = 4096
325 beaker.cache.repo_cache_long.max_items = 4096
339 beaker.cache.repo_cache_long.expire = 2592000
326 beaker.cache.repo_cache_long.expire = 2592000
340
327
341 ## default is memorylru_base cache, configure only if required
328 ## default is memorylru_base cache, configure only if required
342 ## using multi-node or multi-worker setup
329 ## using multi-node or multi-worker setup
343 #beaker.cache.repo_cache_long.type = ext:memcached
330 #beaker.cache.repo_cache_long.type = ext:memcached
344 #beaker.cache.repo_cache_long.url = localhost:11211
331 #beaker.cache.repo_cache_long.url = localhost:11211
345 #beaker.cache.repo_cache_long.expire = 1209600
332 #beaker.cache.repo_cache_long.expire = 1209600
346 #beaker.cache.repo_cache_long.key_length = 256
333 #beaker.cache.repo_cache_long.key_length = 256
347
334
348 ####################################
335 ####################################
349 ### BEAKER SESSION ####
336 ### BEAKER SESSION ####
350 ####################################
337 ####################################
351
338
352 ## .session.type is type of storage options for the session, current allowed
339 ## .session.type is type of storage options for the session, current allowed
353 ## types are file, ext:memcached, ext:database, and memory (default).
340 ## types are file, ext:memcached, ext:database, and memory (default).
354 beaker.session.type = file
341 beaker.session.type = file
355 beaker.session.data_dir = %(here)s/data/sessions/data
342 beaker.session.data_dir = %(here)s/data/sessions/data
356
343
357 ## db based session, fast, and allows easy management over logged in users
344 ## db based session, fast, and allows easy management over logged in users
358 #beaker.session.type = ext:database
345 #beaker.session.type = ext:database
359 #beaker.session.table_name = db_session
346 #beaker.session.table_name = db_session
360 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
347 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
361 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
348 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
362 #beaker.session.sa.pool_recycle = 3600
349 #beaker.session.sa.pool_recycle = 3600
363 #beaker.session.sa.echo = false
350 #beaker.session.sa.echo = false
364
351
365 beaker.session.key = rhodecode
352 beaker.session.key = rhodecode
366 beaker.session.secret = production-rc-uytcxaz
353 beaker.session.secret = production-rc-uytcxaz
367 beaker.session.lock_dir = %(here)s/data/sessions/lock
354 beaker.session.lock_dir = %(here)s/data/sessions/lock
368
355
369 ## Secure encrypted cookie. Requires AES and AES python libraries
356 ## Secure encrypted cookie. Requires AES and AES python libraries
370 ## you must disable beaker.session.secret to use this
357 ## you must disable beaker.session.secret to use this
371 #beaker.session.encrypt_key = key_for_encryption
358 #beaker.session.encrypt_key = key_for_encryption
372 #beaker.session.validate_key = validation_key
359 #beaker.session.validate_key = validation_key
373
360
374 ## sets session as invalid(also logging out user) if it haven not been
361 ## sets session as invalid(also logging out user) if it haven not been
375 ## accessed for given amount of time in seconds
362 ## accessed for given amount of time in seconds
376 beaker.session.timeout = 2592000
363 beaker.session.timeout = 2592000
377 beaker.session.httponly = true
364 beaker.session.httponly = true
378 ## Path to use for the cookie. Set to prefix if you use prefix middleware
365 ## Path to use for the cookie. Set to prefix if you use prefix middleware
379 #beaker.session.cookie_path = /custom_prefix
366 #beaker.session.cookie_path = /custom_prefix
380
367
381 ## uncomment for https secure cookie
368 ## uncomment for https secure cookie
382 beaker.session.secure = false
369 beaker.session.secure = false
383
370
384 ## auto save the session to not to use .save()
371 ## auto save the session to not to use .save()
385 beaker.session.auto = false
372 beaker.session.auto = false
386
373
387 ## default cookie expiration time in seconds, set to `true` to set expire
374 ## default cookie expiration time in seconds, set to `true` to set expire
388 ## at browser close
375 ## at browser close
389 #beaker.session.cookie_expires = 3600
376 #beaker.session.cookie_expires = 3600
390
377
391 ###################################
378 ###################################
392 ## SEARCH INDEXING CONFIGURATION ##
379 ## SEARCH INDEXING CONFIGURATION ##
393 ###################################
380 ###################################
394 ## Full text search indexer is available in rhodecode-tools under
381 ## Full text search indexer is available in rhodecode-tools under
395 ## `rhodecode-tools index` command
382 ## `rhodecode-tools index` command
396
383
397 ## WHOOSH Backend, doesn't require additional services to run
384 ## WHOOSH Backend, doesn't require additional services to run
398 ## it works good with few dozen repos
385 ## it works good with few dozen repos
399 search.module = rhodecode.lib.index.whoosh
386 search.module = rhodecode.lib.index.whoosh
400 search.location = %(here)s/data/index
387 search.location = %(here)s/data/index
401
388
402 ########################################
389 ########################################
403 ### CHANNELSTREAM CONFIG ####
390 ### CHANNELSTREAM CONFIG ####
404 ########################################
391 ########################################
405 ## channelstream enables persistent connections and live notification
392 ## channelstream enables persistent connections and live notification
406 ## in the system. It's also used by the chat system
393 ## in the system. It's also used by the chat system
407 channelstream.enabled = false
394 channelstream.enabled = false
408
395
409 ## server address for channelstream server on the backend
396 ## server address for channelstream server on the backend
410 channelstream.server = 127.0.0.1:9800
397 channelstream.server = 127.0.0.1:9800
411
398
412 ## location of the channelstream server from outside world
399 ## location of the channelstream server from outside world
413 ## use ws:// for http or wss:// for https. This address needs to be handled
400 ## use ws:// for http or wss:// for https. This address needs to be handled
414 ## by external HTTP server such as Nginx or Apache
401 ## by external HTTP server such as Nginx or Apache
415 ## see nginx/apache configuration examples in our docs
402 ## see nginx/apache configuration examples in our docs
416 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
403 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
417 channelstream.secret = secret
404 channelstream.secret = secret
418 channelstream.history.location = %(here)s/channelstream_history
405 channelstream.history.location = %(here)s/channelstream_history
419
406
420 ## Internal application path that Javascript uses to connect into.
407 ## Internal application path that Javascript uses to connect into.
421 ## If you use proxy-prefix the prefix should be added before /_channelstream
408 ## If you use proxy-prefix the prefix should be added before /_channelstream
422 channelstream.proxy_path = /_channelstream
409 channelstream.proxy_path = /_channelstream
423
410
424
411
425 ###################################
412 ###################################
426 ## APPENLIGHT CONFIG ##
413 ## APPENLIGHT CONFIG ##
427 ###################################
414 ###################################
428
415
429 ## Appenlight is tailored to work with RhodeCode, see
416 ## Appenlight is tailored to work with RhodeCode, see
430 ## http://appenlight.com for details how to obtain an account
417 ## http://appenlight.com for details how to obtain an account
431
418
432 ## appenlight integration enabled
419 ## appenlight integration enabled
433 appenlight = false
420 appenlight = false
434
421
435 appenlight.server_url = https://api.appenlight.com
422 appenlight.server_url = https://api.appenlight.com
436 appenlight.api_key = YOUR_API_KEY
423 appenlight.api_key = YOUR_API_KEY
437 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
424 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
438
425
439 # used for JS client
426 # used for JS client
440 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
427 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
441
428
442 ## TWEAK AMOUNT OF INFO SENT HERE
429 ## TWEAK AMOUNT OF INFO SENT HERE
443
430
444 ## enables 404 error logging (default False)
431 ## enables 404 error logging (default False)
445 appenlight.report_404 = false
432 appenlight.report_404 = false
446
433
447 ## time in seconds after request is considered being slow (default 1)
434 ## time in seconds after request is considered being slow (default 1)
448 appenlight.slow_request_time = 1
435 appenlight.slow_request_time = 1
449
436
450 ## record slow requests in application
437 ## record slow requests in application
451 ## (needs to be enabled for slow datastore recording and time tracking)
438 ## (needs to be enabled for slow datastore recording and time tracking)
452 appenlight.slow_requests = true
439 appenlight.slow_requests = true
453
440
454 ## enable hooking to application loggers
441 ## enable hooking to application loggers
455 appenlight.logging = true
442 appenlight.logging = true
456
443
457 ## minimum log level for log capture
444 ## minimum log level for log capture
458 appenlight.logging.level = WARNING
445 appenlight.logging.level = WARNING
459
446
460 ## send logs only from erroneous/slow requests
447 ## send logs only from erroneous/slow requests
461 ## (saves API quota for intensive logging)
448 ## (saves API quota for intensive logging)
462 appenlight.logging_on_error = false
449 appenlight.logging_on_error = false
463
450
464 ## list of additonal keywords that should be grabbed from environ object
451 ## list of additonal keywords that should be grabbed from environ object
465 ## can be string with comma separated list of words in lowercase
452 ## can be string with comma separated list of words in lowercase
466 ## (by default client will always send following info:
453 ## (by default client will always send following info:
467 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
454 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
468 ## start with HTTP* this list be extended with additional keywords here
455 ## start with HTTP* this list be extended with additional keywords here
469 appenlight.environ_keys_whitelist =
456 appenlight.environ_keys_whitelist =
470
457
471 ## list of keywords that should be blanked from request object
458 ## list of keywords that should be blanked from request object
472 ## can be string with comma separated list of words in lowercase
459 ## can be string with comma separated list of words in lowercase
473 ## (by default client will always blank keys that contain following words
460 ## (by default client will always blank keys that contain following words
474 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
461 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
475 ## this list be extended with additional keywords set here
462 ## this list be extended with additional keywords set here
476 appenlight.request_keys_blacklist =
463 appenlight.request_keys_blacklist =
477
464
478 ## list of namespaces that should be ignores when gathering log entries
465 ## list of namespaces that should be ignores when gathering log entries
479 ## can be string with comma separated list of namespaces
466 ## can be string with comma separated list of namespaces
480 ## (by default the client ignores own entries: appenlight_client.client)
467 ## (by default the client ignores own entries: appenlight_client.client)
481 appenlight.log_namespace_blacklist =
468 appenlight.log_namespace_blacklist =
482
469
483
470
484 ################################################################################
471 ################################################################################
485 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
472 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
486 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
473 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
487 ## execute malicious code after an exception is raised. ##
474 ## execute malicious code after an exception is raised. ##
488 ################################################################################
475 ################################################################################
489 set debug = false
476 set debug = false
490
477
491
478
492 ###########################################
479 ###########################################
493 ### MAIN RHODECODE DATABASE CONFIG ###
480 ### MAIN RHODECODE DATABASE CONFIG ###
494 ###########################################
481 ###########################################
495 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
482 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
496 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
483 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
497 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode
484 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode
498 sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
485 sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
499
486
500 # see sqlalchemy docs for other advanced settings
487 # see sqlalchemy docs for other advanced settings
501
488
502 ## print the sql statements to output
489 ## print the sql statements to output
503 sqlalchemy.db1.echo = false
490 sqlalchemy.db1.echo = false
504 ## recycle the connections after this amount of seconds
491 ## recycle the connections after this amount of seconds
505 sqlalchemy.db1.pool_recycle = 3600
492 sqlalchemy.db1.pool_recycle = 3600
506 sqlalchemy.db1.convert_unicode = true
493 sqlalchemy.db1.convert_unicode = true
507
494
508 ## the number of connections to keep open inside the connection pool.
495 ## the number of connections to keep open inside the connection pool.
509 ## 0 indicates no limit
496 ## 0 indicates no limit
510 #sqlalchemy.db1.pool_size = 5
497 #sqlalchemy.db1.pool_size = 5
511
498
512 ## the number of connections to allow in connection pool "overflow", that is
499 ## the number of connections to allow in connection pool "overflow", that is
513 ## connections that can be opened above and beyond the pool_size setting,
500 ## connections that can be opened above and beyond the pool_size setting,
514 ## which defaults to five.
501 ## which defaults to five.
515 #sqlalchemy.db1.max_overflow = 10
502 #sqlalchemy.db1.max_overflow = 10
516
503
517
504
518 ##################
505 ##################
519 ### VCS CONFIG ###
506 ### VCS CONFIG ###
520 ##################
507 ##################
521 vcs.server.enable = true
508 vcs.server.enable = true
522 vcs.server = localhost:9900
509 vcs.server = localhost:9900
523
510
524 ## Web server connectivity protocol, responsible for web based VCS operatations
511 ## Web server connectivity protocol, responsible for web based VCS operatations
525 ## Available protocols are:
512 ## Available protocols are:
526 ## `http` - use http-rpc backend (default)
513 ## `http` - use http-rpc backend (default)
527 vcs.server.protocol = http
514 vcs.server.protocol = http
528
515
529 ## Push/Pull operations protocol, available options are:
516 ## Push/Pull operations protocol, available options are:
530 ## `http` - use http-rpc backend (default)
517 ## `http` - use http-rpc backend (default)
531 ##
518 ##
532 vcs.scm_app_implementation = http
519 vcs.scm_app_implementation = http
533
520
534 ## Push/Pull operations hooks protocol, available options are:
521 ## Push/Pull operations hooks protocol, available options are:
535 ## `http` - use http-rpc backend (default)
522 ## `http` - use http-rpc backend (default)
536 vcs.hooks.protocol = http
523 vcs.hooks.protocol = http
537
524
538 vcs.server.log_level = info
525 vcs.server.log_level = info
539 ## Start VCSServer with this instance as a subprocess, usefull for development
526 ## Start VCSServer with this instance as a subprocess, usefull for development
540 vcs.start_server = false
527 vcs.start_server = false
541
528
542 ## List of enabled VCS backends, available options are:
529 ## List of enabled VCS backends, available options are:
543 ## `hg` - mercurial
530 ## `hg` - mercurial
544 ## `git` - git
531 ## `git` - git
545 ## `svn` - subversion
532 ## `svn` - subversion
546 vcs.backends = hg, git, svn
533 vcs.backends = hg, git, svn
547
534
548 vcs.connection_timeout = 3600
535 vcs.connection_timeout = 3600
549 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
536 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
550 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
537 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
551 #vcs.svn.compatible_version = pre-1.8-compatible
538 #vcs.svn.compatible_version = pre-1.8-compatible
552
539
553
540
554 ############################################################
541 ############################################################
555 ### Subversion proxy support (mod_dav_svn) ###
542 ### Subversion proxy support (mod_dav_svn) ###
556 ### Maps RhodeCode repo groups into SVN paths for Apache ###
543 ### Maps RhodeCode repo groups into SVN paths for Apache ###
557 ############################################################
544 ############################################################
558 ## Enable or disable the config file generation.
545 ## Enable or disable the config file generation.
559 svn.proxy.generate_config = false
546 svn.proxy.generate_config = false
560 ## Generate config file with `SVNListParentPath` set to `On`.
547 ## Generate config file with `SVNListParentPath` set to `On`.
561 svn.proxy.list_parent_path = true
548 svn.proxy.list_parent_path = true
562 ## Set location and file name of generated config file.
549 ## Set location and file name of generated config file.
563 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
550 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
564 ## alternative mod_dav config template. This needs to be a mako template
551 ## alternative mod_dav config template. This needs to be a mako template
565 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
552 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
566 ## Used as a prefix to the `Location` block in the generated config file.
553 ## Used as a prefix to the `Location` block in the generated config file.
567 ## In most cases it should be set to `/`.
554 ## In most cases it should be set to `/`.
568 svn.proxy.location_root = /
555 svn.proxy.location_root = /
569 ## Command to reload the mod dav svn configuration on change.
556 ## Command to reload the mod dav svn configuration on change.
570 ## Example: `/etc/init.d/apache2 reload`
557 ## Example: `/etc/init.d/apache2 reload`
571 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
558 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
572 ## If the timeout expires before the reload command finishes, the command will
559 ## If the timeout expires before the reload command finishes, the command will
573 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
560 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
574 #svn.proxy.reload_timeout = 10
561 #svn.proxy.reload_timeout = 10
575
562
576 ############################################################
563 ############################################################
577 ### SSH Support Settings ###
564 ### SSH Support Settings ###
578 ############################################################
565 ############################################################
579
566
580 ## Defines if a custom authorized_keys file should be created and written on
567 ## Defines if a custom authorized_keys file should be created and written on
581 ## any change user ssh keys. Setting this to false also disables posibility
568 ## any change user ssh keys. Setting this to false also disables posibility
582 ## of adding SSH keys by users from web interface. Super admins can still
569 ## of adding SSH keys by users from web interface. Super admins can still
583 ## manage SSH Keys.
570 ## manage SSH Keys.
584 ssh.generate_authorized_keyfile = false
571 ssh.generate_authorized_keyfile = false
585
572
586 ## Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
573 ## Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
587 # ssh.authorized_keys_ssh_opts =
574 # ssh.authorized_keys_ssh_opts =
588
575
589 ## Path to the authrozied_keys file where the generate entries are placed.
576 ## Path to the authrozied_keys file where the generate entries are placed.
590 ## It is possible to have multiple key files specified in `sshd_config` e.g.
577 ## It is possible to have multiple key files specified in `sshd_config` e.g.
591 ## AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
578 ## AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
592 ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode
579 ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode
593
580
594 ## Command to execute the SSH wrapper. The binary is available in the
581 ## Command to execute the SSH wrapper. The binary is available in the
595 ## rhodecode installation directory.
582 ## rhodecode installation directory.
596 ## e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
583 ## e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
597 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
584 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
598
585
599 ## Allow shell when executing the ssh-wrapper command
586 ## Allow shell when executing the ssh-wrapper command
600 ssh.wrapper_cmd_allow_shell = false
587 ssh.wrapper_cmd_allow_shell = false
601
588
602 ## Enables logging, and detailed output send back to the client during SSH
589 ## Enables logging, and detailed output send back to the client during SSH
603 ## operations. Usefull for debugging, shouldn't be used in production.
590 ## operations. Usefull for debugging, shouldn't be used in production.
604 ssh.enable_debug_logging = false
591 ssh.enable_debug_logging = false
605
592
606 ## Paths to binary executable, by default they are the names, but we can
593 ## Paths to binary executable, by default they are the names, but we can
607 ## override them if we want to use a custom one
594 ## override them if we want to use a custom one
608 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
595 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
609 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
596 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
610 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
597 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
611
598
612
599
613 ## Dummy marker to add new entries after.
600 ## Dummy marker to add new entries after.
614 ## Add any custom entries below. Please don't remove.
601 ## Add any custom entries below. Please don't remove.
615 custom.conf = 1
602 custom.conf = 1
616
603
617
604
618 ################################
605 ################################
619 ### LOGGING CONFIGURATION ####
606 ### LOGGING CONFIGURATION ####
620 ################################
607 ################################
621 [loggers]
608 [loggers]
622 keys = root, sqlalchemy, beaker, rhodecode, ssh_wrapper
609 keys = root, sqlalchemy, beaker, rhodecode, ssh_wrapper, celery
623
610
624 [handlers]
611 [handlers]
625 keys = console, console_sql
612 keys = console, console_sql
626
613
627 [formatters]
614 [formatters]
628 keys = generic, color_formatter, color_formatter_sql
615 keys = generic, color_formatter, color_formatter_sql
629
616
630 #############
617 #############
631 ## LOGGERS ##
618 ## LOGGERS ##
632 #############
619 #############
633 [logger_root]
620 [logger_root]
634 level = NOTSET
621 level = NOTSET
635 handlers = console
622 handlers = console
636
623
637 [logger_sqlalchemy]
624 [logger_sqlalchemy]
638 level = INFO
625 level = INFO
639 handlers = console_sql
626 handlers = console_sql
640 qualname = sqlalchemy.engine
627 qualname = sqlalchemy.engine
641 propagate = 0
628 propagate = 0
642
629
643 [logger_beaker]
630 [logger_beaker]
644 level = DEBUG
631 level = DEBUG
645 handlers =
632 handlers =
646 qualname = beaker.container
633 qualname = beaker.container
647 propagate = 1
634 propagate = 1
648
635
649 [logger_rhodecode]
636 [logger_rhodecode]
650 level = DEBUG
637 level = DEBUG
651 handlers =
638 handlers =
652 qualname = rhodecode
639 qualname = rhodecode
653 propagate = 1
640 propagate = 1
654
641
655 [logger_ssh_wrapper]
642 [logger_ssh_wrapper]
656 level = DEBUG
643 level = DEBUG
657 handlers =
644 handlers =
658 qualname = ssh_wrapper
645 qualname = ssh_wrapper
659 propagate = 1
646 propagate = 1
660
647
648 [logger_celery]
649 level = DEBUG
650 handlers =
651 qualname = celery
652
661
653
662 ##############
654 ##############
663 ## HANDLERS ##
655 ## HANDLERS ##
664 ##############
656 ##############
665
657
666 [handler_console]
658 [handler_console]
667 class = StreamHandler
659 class = StreamHandler
668 args = (sys.stderr, )
660 args = (sys.stderr, )
669 level = INFO
661 level = INFO
670 formatter = generic
662 formatter = generic
671
663
672 [handler_console_sql]
664 [handler_console_sql]
673 class = StreamHandler
665 class = StreamHandler
674 args = (sys.stderr, )
666 args = (sys.stderr, )
675 level = WARN
667 level = WARN
676 formatter = generic
668 formatter = generic
677
669
678 ################
670 ################
679 ## FORMATTERS ##
671 ## FORMATTERS ##
680 ################
672 ################
681
673
682 [formatter_generic]
674 [formatter_generic]
683 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
675 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
684 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
676 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
685 datefmt = %Y-%m-%d %H:%M:%S
677 datefmt = %Y-%m-%d %H:%M:%S
686
678
687 [formatter_color_formatter]
679 [formatter_color_formatter]
688 class = rhodecode.lib.logging_formatter.ColorFormatter
680 class = rhodecode.lib.logging_formatter.ColorFormatter
689 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
681 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
690 datefmt = %Y-%m-%d %H:%M:%S
682 datefmt = %Y-%m-%d %H:%M:%S
691
683
692 [formatter_color_formatter_sql]
684 [formatter_color_formatter_sql]
693 class = rhodecode.lib.logging_formatter.ColorFormatterSql
685 class = rhodecode.lib.logging_formatter.ColorFormatterSql
694 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
686 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
695 datefmt = %Y-%m-%d %H:%M:%S
687 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,2067 +1,2063 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2017 RhodeCode GmbH
3 # Copyright (C) 2011-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 import time
22 import time
23
23
24 import rhodecode
24 import rhodecode
25 from rhodecode.api import (
25 from rhodecode.api import (
26 jsonrpc_method, JSONRPCError, JSONRPCForbidden, JSONRPCValidationError)
26 jsonrpc_method, JSONRPCError, JSONRPCForbidden, JSONRPCValidationError)
27 from rhodecode.api.utils import (
27 from rhodecode.api.utils import (
28 has_superadmin_permission, Optional, OAttr, get_repo_or_error,
28 has_superadmin_permission, Optional, OAttr, get_repo_or_error,
29 get_user_group_or_error, get_user_or_error, validate_repo_permissions,
29 get_user_group_or_error, get_user_or_error, validate_repo_permissions,
30 get_perm_or_error, parse_args, get_origin, build_commit_data,
30 get_perm_or_error, parse_args, get_origin, build_commit_data,
31 validate_set_owner_permissions)
31 validate_set_owner_permissions)
32 from rhodecode.lib import audit_logger
32 from rhodecode.lib import audit_logger
33 from rhodecode.lib import repo_maintenance
33 from rhodecode.lib import repo_maintenance
34 from rhodecode.lib.auth import HasPermissionAnyApi, HasUserGroupPermissionAnyApi
34 from rhodecode.lib.auth import HasPermissionAnyApi, HasUserGroupPermissionAnyApi
35 from rhodecode.lib.celerylib.utils import get_task_id
35 from rhodecode.lib.utils2 import str2bool, time_to_datetime
36 from rhodecode.lib.utils2 import str2bool, time_to_datetime
36 from rhodecode.lib.ext_json import json
37 from rhodecode.lib.ext_json import json
37 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError
38 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError
38 from rhodecode.model.changeset_status import ChangesetStatusModel
39 from rhodecode.model.changeset_status import ChangesetStatusModel
39 from rhodecode.model.comment import CommentsModel
40 from rhodecode.model.comment import CommentsModel
40 from rhodecode.model.db import (
41 from rhodecode.model.db import (
41 Session, ChangesetStatus, RepositoryField, Repository, RepoGroup,
42 Session, ChangesetStatus, RepositoryField, Repository, RepoGroup,
42 ChangesetComment)
43 ChangesetComment)
43 from rhodecode.model.repo import RepoModel
44 from rhodecode.model.repo import RepoModel
44 from rhodecode.model.scm import ScmModel, RepoList
45 from rhodecode.model.scm import ScmModel, RepoList
45 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
46 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
46 from rhodecode.model import validation_schema
47 from rhodecode.model import validation_schema
47 from rhodecode.model.validation_schema.schemas import repo_schema
48 from rhodecode.model.validation_schema.schemas import repo_schema
48
49
49 log = logging.getLogger(__name__)
50 log = logging.getLogger(__name__)
50
51
51
52
52 @jsonrpc_method()
53 @jsonrpc_method()
53 def get_repo(request, apiuser, repoid, cache=Optional(True)):
54 def get_repo(request, apiuser, repoid, cache=Optional(True)):
54 """
55 """
55 Gets an existing repository by its name or repository_id.
56 Gets an existing repository by its name or repository_id.
56
57
57 The members section so the output returns users groups or users
58 The members section so the output returns users groups or users
58 associated with that repository.
59 associated with that repository.
59
60
60 This command can only be run using an |authtoken| with admin rights,
61 This command can only be run using an |authtoken| with admin rights,
61 or users with at least read rights to the |repo|.
62 or users with at least read rights to the |repo|.
62
63
63 :param apiuser: This is filled automatically from the |authtoken|.
64 :param apiuser: This is filled automatically from the |authtoken|.
64 :type apiuser: AuthUser
65 :type apiuser: AuthUser
65 :param repoid: The repository name or repository id.
66 :param repoid: The repository name or repository id.
66 :type repoid: str or int
67 :type repoid: str or int
67 :param cache: use the cached value for last changeset
68 :param cache: use the cached value for last changeset
68 :type: cache: Optional(bool)
69 :type: cache: Optional(bool)
69
70
70 Example output:
71 Example output:
71
72
72 .. code-block:: bash
73 .. code-block:: bash
73
74
74 {
75 {
75 "error": null,
76 "error": null,
76 "id": <repo_id>,
77 "id": <repo_id>,
77 "result": {
78 "result": {
78 "clone_uri": null,
79 "clone_uri": null,
79 "created_on": "timestamp",
80 "created_on": "timestamp",
80 "description": "repo description",
81 "description": "repo description",
81 "enable_downloads": false,
82 "enable_downloads": false,
82 "enable_locking": false,
83 "enable_locking": false,
83 "enable_statistics": false,
84 "enable_statistics": false,
84 "followers": [
85 "followers": [
85 {
86 {
86 "active": true,
87 "active": true,
87 "admin": false,
88 "admin": false,
88 "api_key": "****************************************",
89 "api_key": "****************************************",
89 "api_keys": [
90 "api_keys": [
90 "****************************************"
91 "****************************************"
91 ],
92 ],
92 "email": "user@example.com",
93 "email": "user@example.com",
93 "emails": [
94 "emails": [
94 "user@example.com"
95 "user@example.com"
95 ],
96 ],
96 "extern_name": "rhodecode",
97 "extern_name": "rhodecode",
97 "extern_type": "rhodecode",
98 "extern_type": "rhodecode",
98 "firstname": "username",
99 "firstname": "username",
99 "ip_addresses": [],
100 "ip_addresses": [],
100 "language": null,
101 "language": null,
101 "last_login": "2015-09-16T17:16:35.854",
102 "last_login": "2015-09-16T17:16:35.854",
102 "lastname": "surname",
103 "lastname": "surname",
103 "user_id": <user_id>,
104 "user_id": <user_id>,
104 "username": "name"
105 "username": "name"
105 }
106 }
106 ],
107 ],
107 "fork_of": "parent-repo",
108 "fork_of": "parent-repo",
108 "landing_rev": [
109 "landing_rev": [
109 "rev",
110 "rev",
110 "tip"
111 "tip"
111 ],
112 ],
112 "last_changeset": {
113 "last_changeset": {
113 "author": "User <user@example.com>",
114 "author": "User <user@example.com>",
114 "branch": "default",
115 "branch": "default",
115 "date": "timestamp",
116 "date": "timestamp",
116 "message": "last commit message",
117 "message": "last commit message",
117 "parents": [
118 "parents": [
118 {
119 {
119 "raw_id": "commit-id"
120 "raw_id": "commit-id"
120 }
121 }
121 ],
122 ],
122 "raw_id": "commit-id",
123 "raw_id": "commit-id",
123 "revision": <revision number>,
124 "revision": <revision number>,
124 "short_id": "short id"
125 "short_id": "short id"
125 },
126 },
126 "lock_reason": null,
127 "lock_reason": null,
127 "locked_by": null,
128 "locked_by": null,
128 "locked_date": null,
129 "locked_date": null,
129 "members": [
130 "members": [
130 {
131 {
131 "name": "super-admin-name",
132 "name": "super-admin-name",
132 "origin": "super-admin",
133 "origin": "super-admin",
133 "permission": "repository.admin",
134 "permission": "repository.admin",
134 "type": "user"
135 "type": "user"
135 },
136 },
136 {
137 {
137 "name": "owner-name",
138 "name": "owner-name",
138 "origin": "owner",
139 "origin": "owner",
139 "permission": "repository.admin",
140 "permission": "repository.admin",
140 "type": "user"
141 "type": "user"
141 },
142 },
142 {
143 {
143 "name": "user-group-name",
144 "name": "user-group-name",
144 "origin": "permission",
145 "origin": "permission",
145 "permission": "repository.write",
146 "permission": "repository.write",
146 "type": "user_group"
147 "type": "user_group"
147 }
148 }
148 ],
149 ],
149 "owner": "owner-name",
150 "owner": "owner-name",
150 "permissions": [
151 "permissions": [
151 {
152 {
152 "name": "super-admin-name",
153 "name": "super-admin-name",
153 "origin": "super-admin",
154 "origin": "super-admin",
154 "permission": "repository.admin",
155 "permission": "repository.admin",
155 "type": "user"
156 "type": "user"
156 },
157 },
157 {
158 {
158 "name": "owner-name",
159 "name": "owner-name",
159 "origin": "owner",
160 "origin": "owner",
160 "permission": "repository.admin",
161 "permission": "repository.admin",
161 "type": "user"
162 "type": "user"
162 },
163 },
163 {
164 {
164 "name": "user-group-name",
165 "name": "user-group-name",
165 "origin": "permission",
166 "origin": "permission",
166 "permission": "repository.write",
167 "permission": "repository.write",
167 "type": "user_group"
168 "type": "user_group"
168 }
169 }
169 ],
170 ],
170 "private": true,
171 "private": true,
171 "repo_id": 676,
172 "repo_id": 676,
172 "repo_name": "user-group/repo-name",
173 "repo_name": "user-group/repo-name",
173 "repo_type": "hg"
174 "repo_type": "hg"
174 }
175 }
175 }
176 }
176 """
177 """
177
178
178 repo = get_repo_or_error(repoid)
179 repo = get_repo_or_error(repoid)
179 cache = Optional.extract(cache)
180 cache = Optional.extract(cache)
180
181
181 include_secrets = False
182 include_secrets = False
182 if has_superadmin_permission(apiuser):
183 if has_superadmin_permission(apiuser):
183 include_secrets = True
184 include_secrets = True
184 else:
185 else:
185 # check if we have at least read permission for this repo !
186 # check if we have at least read permission for this repo !
186 _perms = (
187 _perms = (
187 'repository.admin', 'repository.write', 'repository.read',)
188 'repository.admin', 'repository.write', 'repository.read',)
188 validate_repo_permissions(apiuser, repoid, repo, _perms)
189 validate_repo_permissions(apiuser, repoid, repo, _perms)
189
190
190 permissions = []
191 permissions = []
191 for _user in repo.permissions():
192 for _user in repo.permissions():
192 user_data = {
193 user_data = {
193 'name': _user.username,
194 'name': _user.username,
194 'permission': _user.permission,
195 'permission': _user.permission,
195 'origin': get_origin(_user),
196 'origin': get_origin(_user),
196 'type': "user",
197 'type': "user",
197 }
198 }
198 permissions.append(user_data)
199 permissions.append(user_data)
199
200
200 for _user_group in repo.permission_user_groups():
201 for _user_group in repo.permission_user_groups():
201 user_group_data = {
202 user_group_data = {
202 'name': _user_group.users_group_name,
203 'name': _user_group.users_group_name,
203 'permission': _user_group.permission,
204 'permission': _user_group.permission,
204 'origin': get_origin(_user_group),
205 'origin': get_origin(_user_group),
205 'type': "user_group",
206 'type': "user_group",
206 }
207 }
207 permissions.append(user_group_data)
208 permissions.append(user_group_data)
208
209
209 following_users = [
210 following_users = [
210 user.user.get_api_data(include_secrets=include_secrets)
211 user.user.get_api_data(include_secrets=include_secrets)
211 for user in repo.followers]
212 for user in repo.followers]
212
213
213 if not cache:
214 if not cache:
214 repo.update_commit_cache()
215 repo.update_commit_cache()
215 data = repo.get_api_data(include_secrets=include_secrets)
216 data = repo.get_api_data(include_secrets=include_secrets)
216 data['members'] = permissions # TODO: this should be deprecated soon
217 data['members'] = permissions # TODO: this should be deprecated soon
217 data['permissions'] = permissions
218 data['permissions'] = permissions
218 data['followers'] = following_users
219 data['followers'] = following_users
219 return data
220 return data
220
221
221
222
222 @jsonrpc_method()
223 @jsonrpc_method()
223 def get_repos(request, apiuser, root=Optional(None), traverse=Optional(True)):
224 def get_repos(request, apiuser, root=Optional(None), traverse=Optional(True)):
224 """
225 """
225 Lists all existing repositories.
226 Lists all existing repositories.
226
227
227 This command can only be run using an |authtoken| with admin rights,
228 This command can only be run using an |authtoken| with admin rights,
228 or users with at least read rights to |repos|.
229 or users with at least read rights to |repos|.
229
230
230 :param apiuser: This is filled automatically from the |authtoken|.
231 :param apiuser: This is filled automatically from the |authtoken|.
231 :type apiuser: AuthUser
232 :type apiuser: AuthUser
232 :param root: specify root repository group to fetch repositories.
233 :param root: specify root repository group to fetch repositories.
233 filters the returned repositories to be members of given root group.
234 filters the returned repositories to be members of given root group.
234 :type root: Optional(None)
235 :type root: Optional(None)
235 :param traverse: traverse given root into subrepositories. With this flag
236 :param traverse: traverse given root into subrepositories. With this flag
236 set to False, it will only return top-level repositories from `root`.
237 set to False, it will only return top-level repositories from `root`.
237 if root is empty it will return just top-level repositories.
238 if root is empty it will return just top-level repositories.
238 :type traverse: Optional(True)
239 :type traverse: Optional(True)
239
240
240
241
241 Example output:
242 Example output:
242
243
243 .. code-block:: bash
244 .. code-block:: bash
244
245
245 id : <id_given_in_input>
246 id : <id_given_in_input>
246 result: [
247 result: [
247 {
248 {
248 "repo_id" : "<repo_id>",
249 "repo_id" : "<repo_id>",
249 "repo_name" : "<reponame>"
250 "repo_name" : "<reponame>"
250 "repo_type" : "<repo_type>",
251 "repo_type" : "<repo_type>",
251 "clone_uri" : "<clone_uri>",
252 "clone_uri" : "<clone_uri>",
252 "private": : "<bool>",
253 "private": : "<bool>",
253 "created_on" : "<datetimecreated>",
254 "created_on" : "<datetimecreated>",
254 "description" : "<description>",
255 "description" : "<description>",
255 "landing_rev": "<landing_rev>",
256 "landing_rev": "<landing_rev>",
256 "owner": "<repo_owner>",
257 "owner": "<repo_owner>",
257 "fork_of": "<name_of_fork_parent>",
258 "fork_of": "<name_of_fork_parent>",
258 "enable_downloads": "<bool>",
259 "enable_downloads": "<bool>",
259 "enable_locking": "<bool>",
260 "enable_locking": "<bool>",
260 "enable_statistics": "<bool>",
261 "enable_statistics": "<bool>",
261 },
262 },
262 ...
263 ...
263 ]
264 ]
264 error: null
265 error: null
265 """
266 """
266
267
267 include_secrets = has_superadmin_permission(apiuser)
268 include_secrets = has_superadmin_permission(apiuser)
268 _perms = ('repository.read', 'repository.write', 'repository.admin',)
269 _perms = ('repository.read', 'repository.write', 'repository.admin',)
269 extras = {'user': apiuser}
270 extras = {'user': apiuser}
270
271
271 root = Optional.extract(root)
272 root = Optional.extract(root)
272 traverse = Optional.extract(traverse, binary=True)
273 traverse = Optional.extract(traverse, binary=True)
273
274
274 if root:
275 if root:
275 # verify parent existance, if it's empty return an error
276 # verify parent existance, if it's empty return an error
276 parent = RepoGroup.get_by_group_name(root)
277 parent = RepoGroup.get_by_group_name(root)
277 if not parent:
278 if not parent:
278 raise JSONRPCError(
279 raise JSONRPCError(
279 'Root repository group `{}` does not exist'.format(root))
280 'Root repository group `{}` does not exist'.format(root))
280
281
281 if traverse:
282 if traverse:
282 repos = RepoModel().get_repos_for_root(root=root, traverse=traverse)
283 repos = RepoModel().get_repos_for_root(root=root, traverse=traverse)
283 else:
284 else:
284 repos = RepoModel().get_repos_for_root(root=parent)
285 repos = RepoModel().get_repos_for_root(root=parent)
285 else:
286 else:
286 if traverse:
287 if traverse:
287 repos = RepoModel().get_all()
288 repos = RepoModel().get_all()
288 else:
289 else:
289 # return just top-level
290 # return just top-level
290 repos = RepoModel().get_repos_for_root(root=None)
291 repos = RepoModel().get_repos_for_root(root=None)
291
292
292 repo_list = RepoList(repos, perm_set=_perms, extra_kwargs=extras)
293 repo_list = RepoList(repos, perm_set=_perms, extra_kwargs=extras)
293 return [repo.get_api_data(include_secrets=include_secrets)
294 return [repo.get_api_data(include_secrets=include_secrets)
294 for repo in repo_list]
295 for repo in repo_list]
295
296
296
297
297 @jsonrpc_method()
298 @jsonrpc_method()
298 def get_repo_changeset(request, apiuser, repoid, revision,
299 def get_repo_changeset(request, apiuser, repoid, revision,
299 details=Optional('basic')):
300 details=Optional('basic')):
300 """
301 """
301 Returns information about a changeset.
302 Returns information about a changeset.
302
303
303 Additionally parameters define the amount of details returned by
304 Additionally parameters define the amount of details returned by
304 this function.
305 this function.
305
306
306 This command can only be run using an |authtoken| with admin rights,
307 This command can only be run using an |authtoken| with admin rights,
307 or users with at least read rights to the |repo|.
308 or users with at least read rights to the |repo|.
308
309
309 :param apiuser: This is filled automatically from the |authtoken|.
310 :param apiuser: This is filled automatically from the |authtoken|.
310 :type apiuser: AuthUser
311 :type apiuser: AuthUser
311 :param repoid: The repository name or repository id
312 :param repoid: The repository name or repository id
312 :type repoid: str or int
313 :type repoid: str or int
313 :param revision: revision for which listing should be done
314 :param revision: revision for which listing should be done
314 :type revision: str
315 :type revision: str
315 :param details: details can be 'basic|extended|full' full gives diff
316 :param details: details can be 'basic|extended|full' full gives diff
316 info details like the diff itself, and number of changed files etc.
317 info details like the diff itself, and number of changed files etc.
317 :type details: Optional(str)
318 :type details: Optional(str)
318
319
319 """
320 """
320 repo = get_repo_or_error(repoid)
321 repo = get_repo_or_error(repoid)
321 if not has_superadmin_permission(apiuser):
322 if not has_superadmin_permission(apiuser):
322 _perms = (
323 _perms = (
323 'repository.admin', 'repository.write', 'repository.read',)
324 'repository.admin', 'repository.write', 'repository.read',)
324 validate_repo_permissions(apiuser, repoid, repo, _perms)
325 validate_repo_permissions(apiuser, repoid, repo, _perms)
325
326
326 changes_details = Optional.extract(details)
327 changes_details = Optional.extract(details)
327 _changes_details_types = ['basic', 'extended', 'full']
328 _changes_details_types = ['basic', 'extended', 'full']
328 if changes_details not in _changes_details_types:
329 if changes_details not in _changes_details_types:
329 raise JSONRPCError(
330 raise JSONRPCError(
330 'ret_type must be one of %s' % (
331 'ret_type must be one of %s' % (
331 ','.join(_changes_details_types)))
332 ','.join(_changes_details_types)))
332
333
333 pre_load = ['author', 'branch', 'date', 'message', 'parents',
334 pre_load = ['author', 'branch', 'date', 'message', 'parents',
334 'status', '_commit', '_file_paths']
335 'status', '_commit', '_file_paths']
335
336
336 try:
337 try:
337 cs = repo.get_commit(commit_id=revision, pre_load=pre_load)
338 cs = repo.get_commit(commit_id=revision, pre_load=pre_load)
338 except TypeError as e:
339 except TypeError as e:
339 raise JSONRPCError(e.message)
340 raise JSONRPCError(e.message)
340 _cs_json = cs.__json__()
341 _cs_json = cs.__json__()
341 _cs_json['diff'] = build_commit_data(cs, changes_details)
342 _cs_json['diff'] = build_commit_data(cs, changes_details)
342 if changes_details == 'full':
343 if changes_details == 'full':
343 _cs_json['refs'] = cs._get_refs()
344 _cs_json['refs'] = cs._get_refs()
344 return _cs_json
345 return _cs_json
345
346
346
347
347 @jsonrpc_method()
348 @jsonrpc_method()
348 def get_repo_changesets(request, apiuser, repoid, start_rev, limit,
349 def get_repo_changesets(request, apiuser, repoid, start_rev, limit,
349 details=Optional('basic')):
350 details=Optional('basic')):
350 """
351 """
351 Returns a set of commits limited by the number starting
352 Returns a set of commits limited by the number starting
352 from the `start_rev` option.
353 from the `start_rev` option.
353
354
354 Additional parameters define the amount of details returned by this
355 Additional parameters define the amount of details returned by this
355 function.
356 function.
356
357
357 This command can only be run using an |authtoken| with admin rights,
358 This command can only be run using an |authtoken| with admin rights,
358 or users with at least read rights to |repos|.
359 or users with at least read rights to |repos|.
359
360
360 :param apiuser: This is filled automatically from the |authtoken|.
361 :param apiuser: This is filled automatically from the |authtoken|.
361 :type apiuser: AuthUser
362 :type apiuser: AuthUser
362 :param repoid: The repository name or repository ID.
363 :param repoid: The repository name or repository ID.
363 :type repoid: str or int
364 :type repoid: str or int
364 :param start_rev: The starting revision from where to get changesets.
365 :param start_rev: The starting revision from where to get changesets.
365 :type start_rev: str
366 :type start_rev: str
366 :param limit: Limit the number of commits to this amount
367 :param limit: Limit the number of commits to this amount
367 :type limit: str or int
368 :type limit: str or int
368 :param details: Set the level of detail returned. Valid option are:
369 :param details: Set the level of detail returned. Valid option are:
369 ``basic``, ``extended`` and ``full``.
370 ``basic``, ``extended`` and ``full``.
370 :type details: Optional(str)
371 :type details: Optional(str)
371
372
372 .. note::
373 .. note::
373
374
374 Setting the parameter `details` to the value ``full`` is extensive
375 Setting the parameter `details` to the value ``full`` is extensive
375 and returns details like the diff itself, and the number
376 and returns details like the diff itself, and the number
376 of changed files.
377 of changed files.
377
378
378 """
379 """
379 repo = get_repo_or_error(repoid)
380 repo = get_repo_or_error(repoid)
380 if not has_superadmin_permission(apiuser):
381 if not has_superadmin_permission(apiuser):
381 _perms = (
382 _perms = (
382 'repository.admin', 'repository.write', 'repository.read',)
383 'repository.admin', 'repository.write', 'repository.read',)
383 validate_repo_permissions(apiuser, repoid, repo, _perms)
384 validate_repo_permissions(apiuser, repoid, repo, _perms)
384
385
385 changes_details = Optional.extract(details)
386 changes_details = Optional.extract(details)
386 _changes_details_types = ['basic', 'extended', 'full']
387 _changes_details_types = ['basic', 'extended', 'full']
387 if changes_details not in _changes_details_types:
388 if changes_details not in _changes_details_types:
388 raise JSONRPCError(
389 raise JSONRPCError(
389 'ret_type must be one of %s' % (
390 'ret_type must be one of %s' % (
390 ','.join(_changes_details_types)))
391 ','.join(_changes_details_types)))
391
392
392 limit = int(limit)
393 limit = int(limit)
393 pre_load = ['author', 'branch', 'date', 'message', 'parents',
394 pre_load = ['author', 'branch', 'date', 'message', 'parents',
394 'status', '_commit', '_file_paths']
395 'status', '_commit', '_file_paths']
395
396
396 vcs_repo = repo.scm_instance()
397 vcs_repo = repo.scm_instance()
397 # SVN needs a special case to distinguish its index and commit id
398 # SVN needs a special case to distinguish its index and commit id
398 if vcs_repo and vcs_repo.alias == 'svn' and (start_rev == '0'):
399 if vcs_repo and vcs_repo.alias == 'svn' and (start_rev == '0'):
399 start_rev = vcs_repo.commit_ids[0]
400 start_rev = vcs_repo.commit_ids[0]
400
401
401 try:
402 try:
402 commits = vcs_repo.get_commits(
403 commits = vcs_repo.get_commits(
403 start_id=start_rev, pre_load=pre_load)
404 start_id=start_rev, pre_load=pre_load)
404 except TypeError as e:
405 except TypeError as e:
405 raise JSONRPCError(e.message)
406 raise JSONRPCError(e.message)
406 except Exception:
407 except Exception:
407 log.exception('Fetching of commits failed')
408 log.exception('Fetching of commits failed')
408 raise JSONRPCError('Error occurred during commit fetching')
409 raise JSONRPCError('Error occurred during commit fetching')
409
410
410 ret = []
411 ret = []
411 for cnt, commit in enumerate(commits):
412 for cnt, commit in enumerate(commits):
412 if cnt >= limit != -1:
413 if cnt >= limit != -1:
413 break
414 break
414 _cs_json = commit.__json__()
415 _cs_json = commit.__json__()
415 _cs_json['diff'] = build_commit_data(commit, changes_details)
416 _cs_json['diff'] = build_commit_data(commit, changes_details)
416 if changes_details == 'full':
417 if changes_details == 'full':
417 _cs_json['refs'] = {
418 _cs_json['refs'] = {
418 'branches': [commit.branch],
419 'branches': [commit.branch],
419 'bookmarks': getattr(commit, 'bookmarks', []),
420 'bookmarks': getattr(commit, 'bookmarks', []),
420 'tags': commit.tags
421 'tags': commit.tags
421 }
422 }
422 ret.append(_cs_json)
423 ret.append(_cs_json)
423 return ret
424 return ret
424
425
425
426
426 @jsonrpc_method()
427 @jsonrpc_method()
427 def get_repo_nodes(request, apiuser, repoid, revision, root_path,
428 def get_repo_nodes(request, apiuser, repoid, revision, root_path,
428 ret_type=Optional('all'), details=Optional('basic'),
429 ret_type=Optional('all'), details=Optional('basic'),
429 max_file_bytes=Optional(None)):
430 max_file_bytes=Optional(None)):
430 """
431 """
431 Returns a list of nodes and children in a flat list for a given
432 Returns a list of nodes and children in a flat list for a given
432 path at given revision.
433 path at given revision.
433
434
434 It's possible to specify ret_type to show only `files` or `dirs`.
435 It's possible to specify ret_type to show only `files` or `dirs`.
435
436
436 This command can only be run using an |authtoken| with admin rights,
437 This command can only be run using an |authtoken| with admin rights,
437 or users with at least read rights to |repos|.
438 or users with at least read rights to |repos|.
438
439
439 :param apiuser: This is filled automatically from the |authtoken|.
440 :param apiuser: This is filled automatically from the |authtoken|.
440 :type apiuser: AuthUser
441 :type apiuser: AuthUser
441 :param repoid: The repository name or repository ID.
442 :param repoid: The repository name or repository ID.
442 :type repoid: str or int
443 :type repoid: str or int
443 :param revision: The revision for which listing should be done.
444 :param revision: The revision for which listing should be done.
444 :type revision: str
445 :type revision: str
445 :param root_path: The path from which to start displaying.
446 :param root_path: The path from which to start displaying.
446 :type root_path: str
447 :type root_path: str
447 :param ret_type: Set the return type. Valid options are
448 :param ret_type: Set the return type. Valid options are
448 ``all`` (default), ``files`` and ``dirs``.
449 ``all`` (default), ``files`` and ``dirs``.
449 :type ret_type: Optional(str)
450 :type ret_type: Optional(str)
450 :param details: Returns extended information about nodes, such as
451 :param details: Returns extended information about nodes, such as
451 md5, binary, and or content. The valid options are ``basic`` and
452 md5, binary, and or content. The valid options are ``basic`` and
452 ``full``.
453 ``full``.
453 :type details: Optional(str)
454 :type details: Optional(str)
454 :param max_file_bytes: Only return file content under this file size bytes
455 :param max_file_bytes: Only return file content under this file size bytes
455 :type details: Optional(int)
456 :type details: Optional(int)
456
457
457 Example output:
458 Example output:
458
459
459 .. code-block:: bash
460 .. code-block:: bash
460
461
461 id : <id_given_in_input>
462 id : <id_given_in_input>
462 result: [
463 result: [
463 {
464 {
464 "name" : "<name>"
465 "name" : "<name>"
465 "type" : "<type>",
466 "type" : "<type>",
466 "binary": "<true|false>" (only in extended mode)
467 "binary": "<true|false>" (only in extended mode)
467 "md5" : "<md5 of file content>" (only in extended mode)
468 "md5" : "<md5 of file content>" (only in extended mode)
468 },
469 },
469 ...
470 ...
470 ]
471 ]
471 error: null
472 error: null
472 """
473 """
473
474
474 repo = get_repo_or_error(repoid)
475 repo = get_repo_or_error(repoid)
475 if not has_superadmin_permission(apiuser):
476 if not has_superadmin_permission(apiuser):
476 _perms = (
477 _perms = (
477 'repository.admin', 'repository.write', 'repository.read',)
478 'repository.admin', 'repository.write', 'repository.read',)
478 validate_repo_permissions(apiuser, repoid, repo, _perms)
479 validate_repo_permissions(apiuser, repoid, repo, _perms)
479
480
480 ret_type = Optional.extract(ret_type)
481 ret_type = Optional.extract(ret_type)
481 details = Optional.extract(details)
482 details = Optional.extract(details)
482 _extended_types = ['basic', 'full']
483 _extended_types = ['basic', 'full']
483 if details not in _extended_types:
484 if details not in _extended_types:
484 raise JSONRPCError(
485 raise JSONRPCError(
485 'ret_type must be one of %s' % (','.join(_extended_types)))
486 'ret_type must be one of %s' % (','.join(_extended_types)))
486 extended_info = False
487 extended_info = False
487 content = False
488 content = False
488 if details == 'basic':
489 if details == 'basic':
489 extended_info = True
490 extended_info = True
490
491
491 if details == 'full':
492 if details == 'full':
492 extended_info = content = True
493 extended_info = content = True
493
494
494 _map = {}
495 _map = {}
495 try:
496 try:
496 # check if repo is not empty by any chance, skip quicker if it is.
497 # check if repo is not empty by any chance, skip quicker if it is.
497 _scm = repo.scm_instance()
498 _scm = repo.scm_instance()
498 if _scm.is_empty():
499 if _scm.is_empty():
499 return []
500 return []
500
501
501 _d, _f = ScmModel().get_nodes(
502 _d, _f = ScmModel().get_nodes(
502 repo, revision, root_path, flat=False,
503 repo, revision, root_path, flat=False,
503 extended_info=extended_info, content=content,
504 extended_info=extended_info, content=content,
504 max_file_bytes=max_file_bytes)
505 max_file_bytes=max_file_bytes)
505 _map = {
506 _map = {
506 'all': _d + _f,
507 'all': _d + _f,
507 'files': _f,
508 'files': _f,
508 'dirs': _d,
509 'dirs': _d,
509 }
510 }
510 return _map[ret_type]
511 return _map[ret_type]
511 except KeyError:
512 except KeyError:
512 raise JSONRPCError(
513 raise JSONRPCError(
513 'ret_type must be one of %s' % (','.join(sorted(_map.keys()))))
514 'ret_type must be one of %s' % (','.join(sorted(_map.keys()))))
514 except Exception:
515 except Exception:
515 log.exception("Exception occurred while trying to get repo nodes")
516 log.exception("Exception occurred while trying to get repo nodes")
516 raise JSONRPCError(
517 raise JSONRPCError(
517 'failed to get repo: `%s` nodes' % repo.repo_name
518 'failed to get repo: `%s` nodes' % repo.repo_name
518 )
519 )
519
520
520
521
521 @jsonrpc_method()
522 @jsonrpc_method()
522 def get_repo_refs(request, apiuser, repoid):
523 def get_repo_refs(request, apiuser, repoid):
523 """
524 """
524 Returns a dictionary of current references. It returns
525 Returns a dictionary of current references. It returns
525 bookmarks, branches, closed_branches, and tags for given repository
526 bookmarks, branches, closed_branches, and tags for given repository
526
527
527 It's possible to specify ret_type to show only `files` or `dirs`.
528 It's possible to specify ret_type to show only `files` or `dirs`.
528
529
529 This command can only be run using an |authtoken| with admin rights,
530 This command can only be run using an |authtoken| with admin rights,
530 or users with at least read rights to |repos|.
531 or users with at least read rights to |repos|.
531
532
532 :param apiuser: This is filled automatically from the |authtoken|.
533 :param apiuser: This is filled automatically from the |authtoken|.
533 :type apiuser: AuthUser
534 :type apiuser: AuthUser
534 :param repoid: The repository name or repository ID.
535 :param repoid: The repository name or repository ID.
535 :type repoid: str or int
536 :type repoid: str or int
536
537
537 Example output:
538 Example output:
538
539
539 .. code-block:: bash
540 .. code-block:: bash
540
541
541 id : <id_given_in_input>
542 id : <id_given_in_input>
542 "result": {
543 "result": {
543 "bookmarks": {
544 "bookmarks": {
544 "dev": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
545 "dev": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
545 "master": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf"
546 "master": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf"
546 },
547 },
547 "branches": {
548 "branches": {
548 "default": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
549 "default": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
549 "stable": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf"
550 "stable": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf"
550 },
551 },
551 "branches_closed": {},
552 "branches_closed": {},
552 "tags": {
553 "tags": {
553 "tip": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
554 "tip": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
554 "v4.4.0": "1232313f9e6adac5ce5399c2a891dc1e72b79022",
555 "v4.4.0": "1232313f9e6adac5ce5399c2a891dc1e72b79022",
555 "v4.4.1": "cbb9f1d329ae5768379cdec55a62ebdd546c4e27",
556 "v4.4.1": "cbb9f1d329ae5768379cdec55a62ebdd546c4e27",
556 "v4.4.2": "24ffe44a27fcd1c5b6936144e176b9f6dd2f3a17",
557 "v4.4.2": "24ffe44a27fcd1c5b6936144e176b9f6dd2f3a17",
557 }
558 }
558 }
559 }
559 error: null
560 error: null
560 """
561 """
561
562
562 repo = get_repo_or_error(repoid)
563 repo = get_repo_or_error(repoid)
563 if not has_superadmin_permission(apiuser):
564 if not has_superadmin_permission(apiuser):
564 _perms = ('repository.admin', 'repository.write', 'repository.read',)
565 _perms = ('repository.admin', 'repository.write', 'repository.read',)
565 validate_repo_permissions(apiuser, repoid, repo, _perms)
566 validate_repo_permissions(apiuser, repoid, repo, _perms)
566
567
567 try:
568 try:
568 # check if repo is not empty by any chance, skip quicker if it is.
569 # check if repo is not empty by any chance, skip quicker if it is.
569 vcs_instance = repo.scm_instance()
570 vcs_instance = repo.scm_instance()
570 refs = vcs_instance.refs()
571 refs = vcs_instance.refs()
571 return refs
572 return refs
572 except Exception:
573 except Exception:
573 log.exception("Exception occurred while trying to get repo refs")
574 log.exception("Exception occurred while trying to get repo refs")
574 raise JSONRPCError(
575 raise JSONRPCError(
575 'failed to get repo: `%s` references' % repo.repo_name
576 'failed to get repo: `%s` references' % repo.repo_name
576 )
577 )
577
578
578
579
579 @jsonrpc_method()
580 @jsonrpc_method()
580 def create_repo(
581 def create_repo(
581 request, apiuser, repo_name, repo_type,
582 request, apiuser, repo_name, repo_type,
582 owner=Optional(OAttr('apiuser')),
583 owner=Optional(OAttr('apiuser')),
583 description=Optional(''),
584 description=Optional(''),
584 private=Optional(False),
585 private=Optional(False),
585 clone_uri=Optional(None),
586 clone_uri=Optional(None),
586 landing_rev=Optional('rev:tip'),
587 landing_rev=Optional('rev:tip'),
587 enable_statistics=Optional(False),
588 enable_statistics=Optional(False),
588 enable_locking=Optional(False),
589 enable_locking=Optional(False),
589 enable_downloads=Optional(False),
590 enable_downloads=Optional(False),
590 copy_permissions=Optional(False)):
591 copy_permissions=Optional(False)):
591 """
592 """
592 Creates a repository.
593 Creates a repository.
593
594
594 * If the repository name contains "/", repository will be created inside
595 * If the repository name contains "/", repository will be created inside
595 a repository group or nested repository groups
596 a repository group or nested repository groups
596
597
597 For example "foo/bar/repo1" will create |repo| called "repo1" inside
598 For example "foo/bar/repo1" will create |repo| called "repo1" inside
598 group "foo/bar". You have to have permissions to access and write to
599 group "foo/bar". You have to have permissions to access and write to
599 the last repository group ("bar" in this example)
600 the last repository group ("bar" in this example)
600
601
601 This command can only be run using an |authtoken| with at least
602 This command can only be run using an |authtoken| with at least
602 permissions to create repositories, or write permissions to
603 permissions to create repositories, or write permissions to
603 parent repository groups.
604 parent repository groups.
604
605
605 :param apiuser: This is filled automatically from the |authtoken|.
606 :param apiuser: This is filled automatically from the |authtoken|.
606 :type apiuser: AuthUser
607 :type apiuser: AuthUser
607 :param repo_name: Set the repository name.
608 :param repo_name: Set the repository name.
608 :type repo_name: str
609 :type repo_name: str
609 :param repo_type: Set the repository type; 'hg','git', or 'svn'.
610 :param repo_type: Set the repository type; 'hg','git', or 'svn'.
610 :type repo_type: str
611 :type repo_type: str
611 :param owner: user_id or username
612 :param owner: user_id or username
612 :type owner: Optional(str)
613 :type owner: Optional(str)
613 :param description: Set the repository description.
614 :param description: Set the repository description.
614 :type description: Optional(str)
615 :type description: Optional(str)
615 :param private: set repository as private
616 :param private: set repository as private
616 :type private: bool
617 :type private: bool
617 :param clone_uri: set clone_uri
618 :param clone_uri: set clone_uri
618 :type clone_uri: str
619 :type clone_uri: str
619 :param landing_rev: <rev_type>:<rev>
620 :param landing_rev: <rev_type>:<rev>
620 :type landing_rev: str
621 :type landing_rev: str
621 :param enable_locking:
622 :param enable_locking:
622 :type enable_locking: bool
623 :type enable_locking: bool
623 :param enable_downloads:
624 :param enable_downloads:
624 :type enable_downloads: bool
625 :type enable_downloads: bool
625 :param enable_statistics:
626 :param enable_statistics:
626 :type enable_statistics: bool
627 :type enable_statistics: bool
627 :param copy_permissions: Copy permission from group in which the
628 :param copy_permissions: Copy permission from group in which the
628 repository is being created.
629 repository is being created.
629 :type copy_permissions: bool
630 :type copy_permissions: bool
630
631
631
632
632 Example output:
633 Example output:
633
634
634 .. code-block:: bash
635 .. code-block:: bash
635
636
636 id : <id_given_in_input>
637 id : <id_given_in_input>
637 result: {
638 result: {
638 "msg": "Created new repository `<reponame>`",
639 "msg": "Created new repository `<reponame>`",
639 "success": true,
640 "success": true,
640 "task": "<celery task id or None if done sync>"
641 "task": "<celery task id or None if done sync>"
641 }
642 }
642 error: null
643 error: null
643
644
644
645
645 Example error output:
646 Example error output:
646
647
647 .. code-block:: bash
648 .. code-block:: bash
648
649
649 id : <id_given_in_input>
650 id : <id_given_in_input>
650 result : null
651 result : null
651 error : {
652 error : {
652 'failed to create repository `<repo_name>`'
653 'failed to create repository `<repo_name>`'
653 }
654 }
654
655
655 """
656 """
656
657
657 owner = validate_set_owner_permissions(apiuser, owner)
658 owner = validate_set_owner_permissions(apiuser, owner)
658
659
659 description = Optional.extract(description)
660 description = Optional.extract(description)
660 copy_permissions = Optional.extract(copy_permissions)
661 copy_permissions = Optional.extract(copy_permissions)
661 clone_uri = Optional.extract(clone_uri)
662 clone_uri = Optional.extract(clone_uri)
662 landing_commit_ref = Optional.extract(landing_rev)
663 landing_commit_ref = Optional.extract(landing_rev)
663
664
664 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
665 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
665 if isinstance(private, Optional):
666 if isinstance(private, Optional):
666 private = defs.get('repo_private') or Optional.extract(private)
667 private = defs.get('repo_private') or Optional.extract(private)
667 if isinstance(repo_type, Optional):
668 if isinstance(repo_type, Optional):
668 repo_type = defs.get('repo_type')
669 repo_type = defs.get('repo_type')
669 if isinstance(enable_statistics, Optional):
670 if isinstance(enable_statistics, Optional):
670 enable_statistics = defs.get('repo_enable_statistics')
671 enable_statistics = defs.get('repo_enable_statistics')
671 if isinstance(enable_locking, Optional):
672 if isinstance(enable_locking, Optional):
672 enable_locking = defs.get('repo_enable_locking')
673 enable_locking = defs.get('repo_enable_locking')
673 if isinstance(enable_downloads, Optional):
674 if isinstance(enable_downloads, Optional):
674 enable_downloads = defs.get('repo_enable_downloads')
675 enable_downloads = defs.get('repo_enable_downloads')
675
676
676 schema = repo_schema.RepoSchema().bind(
677 schema = repo_schema.RepoSchema().bind(
677 repo_type_options=rhodecode.BACKENDS.keys(),
678 repo_type_options=rhodecode.BACKENDS.keys(),
678 # user caller
679 # user caller
679 user=apiuser)
680 user=apiuser)
680
681
681 try:
682 try:
682 schema_data = schema.deserialize(dict(
683 schema_data = schema.deserialize(dict(
683 repo_name=repo_name,
684 repo_name=repo_name,
684 repo_type=repo_type,
685 repo_type=repo_type,
685 repo_owner=owner.username,
686 repo_owner=owner.username,
686 repo_description=description,
687 repo_description=description,
687 repo_landing_commit_ref=landing_commit_ref,
688 repo_landing_commit_ref=landing_commit_ref,
688 repo_clone_uri=clone_uri,
689 repo_clone_uri=clone_uri,
689 repo_private=private,
690 repo_private=private,
690 repo_copy_permissions=copy_permissions,
691 repo_copy_permissions=copy_permissions,
691 repo_enable_statistics=enable_statistics,
692 repo_enable_statistics=enable_statistics,
692 repo_enable_downloads=enable_downloads,
693 repo_enable_downloads=enable_downloads,
693 repo_enable_locking=enable_locking))
694 repo_enable_locking=enable_locking))
694 except validation_schema.Invalid as err:
695 except validation_schema.Invalid as err:
695 raise JSONRPCValidationError(colander_exc=err)
696 raise JSONRPCValidationError(colander_exc=err)
696
697
697 try:
698 try:
698 data = {
699 data = {
699 'owner': owner,
700 'owner': owner,
700 'repo_name': schema_data['repo_group']['repo_name_without_group'],
701 'repo_name': schema_data['repo_group']['repo_name_without_group'],
701 'repo_name_full': schema_data['repo_name'],
702 'repo_name_full': schema_data['repo_name'],
702 'repo_group': schema_data['repo_group']['repo_group_id'],
703 'repo_group': schema_data['repo_group']['repo_group_id'],
703 'repo_type': schema_data['repo_type'],
704 'repo_type': schema_data['repo_type'],
704 'repo_description': schema_data['repo_description'],
705 'repo_description': schema_data['repo_description'],
705 'repo_private': schema_data['repo_private'],
706 'repo_private': schema_data['repo_private'],
706 'clone_uri': schema_data['repo_clone_uri'],
707 'clone_uri': schema_data['repo_clone_uri'],
707 'repo_landing_rev': schema_data['repo_landing_commit_ref'],
708 'repo_landing_rev': schema_data['repo_landing_commit_ref'],
708 'enable_statistics': schema_data['repo_enable_statistics'],
709 'enable_statistics': schema_data['repo_enable_statistics'],
709 'enable_locking': schema_data['repo_enable_locking'],
710 'enable_locking': schema_data['repo_enable_locking'],
710 'enable_downloads': schema_data['repo_enable_downloads'],
711 'enable_downloads': schema_data['repo_enable_downloads'],
711 'repo_copy_permissions': schema_data['repo_copy_permissions'],
712 'repo_copy_permissions': schema_data['repo_copy_permissions'],
712 }
713 }
713
714
714 task = RepoModel().create(form_data=data, cur_user=owner)
715 task = RepoModel().create(form_data=data, cur_user=owner)
715 from celery.result import BaseAsyncResult
716 task_id = get_task_id(task)
716 task_id = None
717 if isinstance(task, BaseAsyncResult):
718 task_id = task.task_id
719 # no commit, it's done in RepoModel, or async via celery
717 # no commit, it's done in RepoModel, or async via celery
720 return {
718 return {
721 'msg': "Created new repository `%s`" % (schema_data['repo_name'],),
719 'msg': "Created new repository `%s`" % (schema_data['repo_name'],),
722 'success': True, # cannot return the repo data here since fork
720 'success': True, # cannot return the repo data here since fork
723 # can be done async
721 # can be done async
724 'task': task_id
722 'task': task_id
725 }
723 }
726 except Exception:
724 except Exception:
727 log.exception(
725 log.exception(
728 u"Exception while trying to create the repository %s",
726 u"Exception while trying to create the repository %s",
729 schema_data['repo_name'])
727 schema_data['repo_name'])
730 raise JSONRPCError(
728 raise JSONRPCError(
731 'failed to create repository `%s`' % (schema_data['repo_name'],))
729 'failed to create repository `%s`' % (schema_data['repo_name'],))
732
730
733
731
734 @jsonrpc_method()
732 @jsonrpc_method()
735 def add_field_to_repo(request, apiuser, repoid, key, label=Optional(''),
733 def add_field_to_repo(request, apiuser, repoid, key, label=Optional(''),
736 description=Optional('')):
734 description=Optional('')):
737 """
735 """
738 Adds an extra field to a repository.
736 Adds an extra field to a repository.
739
737
740 This command can only be run using an |authtoken| with at least
738 This command can only be run using an |authtoken| with at least
741 write permissions to the |repo|.
739 write permissions to the |repo|.
742
740
743 :param apiuser: This is filled automatically from the |authtoken|.
741 :param apiuser: This is filled automatically from the |authtoken|.
744 :type apiuser: AuthUser
742 :type apiuser: AuthUser
745 :param repoid: Set the repository name or repository id.
743 :param repoid: Set the repository name or repository id.
746 :type repoid: str or int
744 :type repoid: str or int
747 :param key: Create a unique field key for this repository.
745 :param key: Create a unique field key for this repository.
748 :type key: str
746 :type key: str
749 :param label:
747 :param label:
750 :type label: Optional(str)
748 :type label: Optional(str)
751 :param description:
749 :param description:
752 :type description: Optional(str)
750 :type description: Optional(str)
753 """
751 """
754 repo = get_repo_or_error(repoid)
752 repo = get_repo_or_error(repoid)
755 if not has_superadmin_permission(apiuser):
753 if not has_superadmin_permission(apiuser):
756 _perms = ('repository.admin',)
754 _perms = ('repository.admin',)
757 validate_repo_permissions(apiuser, repoid, repo, _perms)
755 validate_repo_permissions(apiuser, repoid, repo, _perms)
758
756
759 label = Optional.extract(label) or key
757 label = Optional.extract(label) or key
760 description = Optional.extract(description)
758 description = Optional.extract(description)
761
759
762 field = RepositoryField.get_by_key_name(key, repo)
760 field = RepositoryField.get_by_key_name(key, repo)
763 if field:
761 if field:
764 raise JSONRPCError('Field with key '
762 raise JSONRPCError('Field with key '
765 '`%s` exists for repo `%s`' % (key, repoid))
763 '`%s` exists for repo `%s`' % (key, repoid))
766
764
767 try:
765 try:
768 RepoModel().add_repo_field(repo, key, field_label=label,
766 RepoModel().add_repo_field(repo, key, field_label=label,
769 field_desc=description)
767 field_desc=description)
770 Session().commit()
768 Session().commit()
771 return {
769 return {
772 'msg': "Added new repository field `%s`" % (key,),
770 'msg': "Added new repository field `%s`" % (key,),
773 'success': True,
771 'success': True,
774 }
772 }
775 except Exception:
773 except Exception:
776 log.exception("Exception occurred while trying to add field to repo")
774 log.exception("Exception occurred while trying to add field to repo")
777 raise JSONRPCError(
775 raise JSONRPCError(
778 'failed to create new field for repository `%s`' % (repoid,))
776 'failed to create new field for repository `%s`' % (repoid,))
779
777
780
778
781 @jsonrpc_method()
779 @jsonrpc_method()
782 def remove_field_from_repo(request, apiuser, repoid, key):
780 def remove_field_from_repo(request, apiuser, repoid, key):
783 """
781 """
784 Removes an extra field from a repository.
782 Removes an extra field from a repository.
785
783
786 This command can only be run using an |authtoken| with at least
784 This command can only be run using an |authtoken| with at least
787 write permissions to the |repo|.
785 write permissions to the |repo|.
788
786
789 :param apiuser: This is filled automatically from the |authtoken|.
787 :param apiuser: This is filled automatically from the |authtoken|.
790 :type apiuser: AuthUser
788 :type apiuser: AuthUser
791 :param repoid: Set the repository name or repository ID.
789 :param repoid: Set the repository name or repository ID.
792 :type repoid: str or int
790 :type repoid: str or int
793 :param key: Set the unique field key for this repository.
791 :param key: Set the unique field key for this repository.
794 :type key: str
792 :type key: str
795 """
793 """
796
794
797 repo = get_repo_or_error(repoid)
795 repo = get_repo_or_error(repoid)
798 if not has_superadmin_permission(apiuser):
796 if not has_superadmin_permission(apiuser):
799 _perms = ('repository.admin',)
797 _perms = ('repository.admin',)
800 validate_repo_permissions(apiuser, repoid, repo, _perms)
798 validate_repo_permissions(apiuser, repoid, repo, _perms)
801
799
802 field = RepositoryField.get_by_key_name(key, repo)
800 field = RepositoryField.get_by_key_name(key, repo)
803 if not field:
801 if not field:
804 raise JSONRPCError('Field with key `%s` does not '
802 raise JSONRPCError('Field with key `%s` does not '
805 'exists for repo `%s`' % (key, repoid))
803 'exists for repo `%s`' % (key, repoid))
806
804
807 try:
805 try:
808 RepoModel().delete_repo_field(repo, field_key=key)
806 RepoModel().delete_repo_field(repo, field_key=key)
809 Session().commit()
807 Session().commit()
810 return {
808 return {
811 'msg': "Deleted repository field `%s`" % (key,),
809 'msg': "Deleted repository field `%s`" % (key,),
812 'success': True,
810 'success': True,
813 }
811 }
814 except Exception:
812 except Exception:
815 log.exception(
813 log.exception(
816 "Exception occurred while trying to delete field from repo")
814 "Exception occurred while trying to delete field from repo")
817 raise JSONRPCError(
815 raise JSONRPCError(
818 'failed to delete field for repository `%s`' % (repoid,))
816 'failed to delete field for repository `%s`' % (repoid,))
819
817
820
818
821 @jsonrpc_method()
819 @jsonrpc_method()
822 def update_repo(
820 def update_repo(
823 request, apiuser, repoid, repo_name=Optional(None),
821 request, apiuser, repoid, repo_name=Optional(None),
824 owner=Optional(OAttr('apiuser')), description=Optional(''),
822 owner=Optional(OAttr('apiuser')), description=Optional(''),
825 private=Optional(False), clone_uri=Optional(None),
823 private=Optional(False), clone_uri=Optional(None),
826 landing_rev=Optional('rev:tip'), fork_of=Optional(None),
824 landing_rev=Optional('rev:tip'), fork_of=Optional(None),
827 enable_statistics=Optional(False),
825 enable_statistics=Optional(False),
828 enable_locking=Optional(False),
826 enable_locking=Optional(False),
829 enable_downloads=Optional(False), fields=Optional('')):
827 enable_downloads=Optional(False), fields=Optional('')):
830 """
828 """
831 Updates a repository with the given information.
829 Updates a repository with the given information.
832
830
833 This command can only be run using an |authtoken| with at least
831 This command can only be run using an |authtoken| with at least
834 admin permissions to the |repo|.
832 admin permissions to the |repo|.
835
833
836 * If the repository name contains "/", repository will be updated
834 * If the repository name contains "/", repository will be updated
837 accordingly with a repository group or nested repository groups
835 accordingly with a repository group or nested repository groups
838
836
839 For example repoid=repo-test name="foo/bar/repo-test" will update |repo|
837 For example repoid=repo-test name="foo/bar/repo-test" will update |repo|
840 called "repo-test" and place it inside group "foo/bar".
838 called "repo-test" and place it inside group "foo/bar".
841 You have to have permissions to access and write to the last repository
839 You have to have permissions to access and write to the last repository
842 group ("bar" in this example)
840 group ("bar" in this example)
843
841
844 :param apiuser: This is filled automatically from the |authtoken|.
842 :param apiuser: This is filled automatically from the |authtoken|.
845 :type apiuser: AuthUser
843 :type apiuser: AuthUser
846 :param repoid: repository name or repository ID.
844 :param repoid: repository name or repository ID.
847 :type repoid: str or int
845 :type repoid: str or int
848 :param repo_name: Update the |repo| name, including the
846 :param repo_name: Update the |repo| name, including the
849 repository group it's in.
847 repository group it's in.
850 :type repo_name: str
848 :type repo_name: str
851 :param owner: Set the |repo| owner.
849 :param owner: Set the |repo| owner.
852 :type owner: str
850 :type owner: str
853 :param fork_of: Set the |repo| as fork of another |repo|.
851 :param fork_of: Set the |repo| as fork of another |repo|.
854 :type fork_of: str
852 :type fork_of: str
855 :param description: Update the |repo| description.
853 :param description: Update the |repo| description.
856 :type description: str
854 :type description: str
857 :param private: Set the |repo| as private. (True | False)
855 :param private: Set the |repo| as private. (True | False)
858 :type private: bool
856 :type private: bool
859 :param clone_uri: Update the |repo| clone URI.
857 :param clone_uri: Update the |repo| clone URI.
860 :type clone_uri: str
858 :type clone_uri: str
861 :param landing_rev: Set the |repo| landing revision. Default is ``rev:tip``.
859 :param landing_rev: Set the |repo| landing revision. Default is ``rev:tip``.
862 :type landing_rev: str
860 :type landing_rev: str
863 :param enable_statistics: Enable statistics on the |repo|, (True | False).
861 :param enable_statistics: Enable statistics on the |repo|, (True | False).
864 :type enable_statistics: bool
862 :type enable_statistics: bool
865 :param enable_locking: Enable |repo| locking.
863 :param enable_locking: Enable |repo| locking.
866 :type enable_locking: bool
864 :type enable_locking: bool
867 :param enable_downloads: Enable downloads from the |repo|, (True | False).
865 :param enable_downloads: Enable downloads from the |repo|, (True | False).
868 :type enable_downloads: bool
866 :type enable_downloads: bool
869 :param fields: Add extra fields to the |repo|. Use the following
867 :param fields: Add extra fields to the |repo|. Use the following
870 example format: ``field_key=field_val,field_key2=fieldval2``.
868 example format: ``field_key=field_val,field_key2=fieldval2``.
871 Escape ', ' with \,
869 Escape ', ' with \,
872 :type fields: str
870 :type fields: str
873 """
871 """
874
872
875 repo = get_repo_or_error(repoid)
873 repo = get_repo_or_error(repoid)
876
874
877 include_secrets = False
875 include_secrets = False
878 if not has_superadmin_permission(apiuser):
876 if not has_superadmin_permission(apiuser):
879 validate_repo_permissions(apiuser, repoid, repo, ('repository.admin',))
877 validate_repo_permissions(apiuser, repoid, repo, ('repository.admin',))
880 else:
878 else:
881 include_secrets = True
879 include_secrets = True
882
880
883 updates = dict(
881 updates = dict(
884 repo_name=repo_name
882 repo_name=repo_name
885 if not isinstance(repo_name, Optional) else repo.repo_name,
883 if not isinstance(repo_name, Optional) else repo.repo_name,
886
884
887 fork_id=fork_of
885 fork_id=fork_of
888 if not isinstance(fork_of, Optional) else repo.fork.repo_name if repo.fork else None,
886 if not isinstance(fork_of, Optional) else repo.fork.repo_name if repo.fork else None,
889
887
890 user=owner
888 user=owner
891 if not isinstance(owner, Optional) else repo.user.username,
889 if not isinstance(owner, Optional) else repo.user.username,
892
890
893 repo_description=description
891 repo_description=description
894 if not isinstance(description, Optional) else repo.description,
892 if not isinstance(description, Optional) else repo.description,
895
893
896 repo_private=private
894 repo_private=private
897 if not isinstance(private, Optional) else repo.private,
895 if not isinstance(private, Optional) else repo.private,
898
896
899 clone_uri=clone_uri
897 clone_uri=clone_uri
900 if not isinstance(clone_uri, Optional) else repo.clone_uri,
898 if not isinstance(clone_uri, Optional) else repo.clone_uri,
901
899
902 repo_landing_rev=landing_rev
900 repo_landing_rev=landing_rev
903 if not isinstance(landing_rev, Optional) else repo._landing_revision,
901 if not isinstance(landing_rev, Optional) else repo._landing_revision,
904
902
905 repo_enable_statistics=enable_statistics
903 repo_enable_statistics=enable_statistics
906 if not isinstance(enable_statistics, Optional) else repo.enable_statistics,
904 if not isinstance(enable_statistics, Optional) else repo.enable_statistics,
907
905
908 repo_enable_locking=enable_locking
906 repo_enable_locking=enable_locking
909 if not isinstance(enable_locking, Optional) else repo.enable_locking,
907 if not isinstance(enable_locking, Optional) else repo.enable_locking,
910
908
911 repo_enable_downloads=enable_downloads
909 repo_enable_downloads=enable_downloads
912 if not isinstance(enable_downloads, Optional) else repo.enable_downloads)
910 if not isinstance(enable_downloads, Optional) else repo.enable_downloads)
913
911
914 ref_choices, _labels = ScmModel().get_repo_landing_revs(
912 ref_choices, _labels = ScmModel().get_repo_landing_revs(
915 request.translate, repo=repo)
913 request.translate, repo=repo)
916
914
917 old_values = repo.get_api_data()
915 old_values = repo.get_api_data()
918 schema = repo_schema.RepoSchema().bind(
916 schema = repo_schema.RepoSchema().bind(
919 repo_type_options=rhodecode.BACKENDS.keys(),
917 repo_type_options=rhodecode.BACKENDS.keys(),
920 repo_ref_options=ref_choices,
918 repo_ref_options=ref_choices,
921 # user caller
919 # user caller
922 user=apiuser,
920 user=apiuser,
923 old_values=old_values)
921 old_values=old_values)
924 try:
922 try:
925 schema_data = schema.deserialize(dict(
923 schema_data = schema.deserialize(dict(
926 # we save old value, users cannot change type
924 # we save old value, users cannot change type
927 repo_type=repo.repo_type,
925 repo_type=repo.repo_type,
928
926
929 repo_name=updates['repo_name'],
927 repo_name=updates['repo_name'],
930 repo_owner=updates['user'],
928 repo_owner=updates['user'],
931 repo_description=updates['repo_description'],
929 repo_description=updates['repo_description'],
932 repo_clone_uri=updates['clone_uri'],
930 repo_clone_uri=updates['clone_uri'],
933 repo_fork_of=updates['fork_id'],
931 repo_fork_of=updates['fork_id'],
934 repo_private=updates['repo_private'],
932 repo_private=updates['repo_private'],
935 repo_landing_commit_ref=updates['repo_landing_rev'],
933 repo_landing_commit_ref=updates['repo_landing_rev'],
936 repo_enable_statistics=updates['repo_enable_statistics'],
934 repo_enable_statistics=updates['repo_enable_statistics'],
937 repo_enable_downloads=updates['repo_enable_downloads'],
935 repo_enable_downloads=updates['repo_enable_downloads'],
938 repo_enable_locking=updates['repo_enable_locking']))
936 repo_enable_locking=updates['repo_enable_locking']))
939 except validation_schema.Invalid as err:
937 except validation_schema.Invalid as err:
940 raise JSONRPCValidationError(colander_exc=err)
938 raise JSONRPCValidationError(colander_exc=err)
941
939
942 # save validated data back into the updates dict
940 # save validated data back into the updates dict
943 validated_updates = dict(
941 validated_updates = dict(
944 repo_name=schema_data['repo_group']['repo_name_without_group'],
942 repo_name=schema_data['repo_group']['repo_name_without_group'],
945 repo_group=schema_data['repo_group']['repo_group_id'],
943 repo_group=schema_data['repo_group']['repo_group_id'],
946
944
947 user=schema_data['repo_owner'],
945 user=schema_data['repo_owner'],
948 repo_description=schema_data['repo_description'],
946 repo_description=schema_data['repo_description'],
949 repo_private=schema_data['repo_private'],
947 repo_private=schema_data['repo_private'],
950 clone_uri=schema_data['repo_clone_uri'],
948 clone_uri=schema_data['repo_clone_uri'],
951 repo_landing_rev=schema_data['repo_landing_commit_ref'],
949 repo_landing_rev=schema_data['repo_landing_commit_ref'],
952 repo_enable_statistics=schema_data['repo_enable_statistics'],
950 repo_enable_statistics=schema_data['repo_enable_statistics'],
953 repo_enable_locking=schema_data['repo_enable_locking'],
951 repo_enable_locking=schema_data['repo_enable_locking'],
954 repo_enable_downloads=schema_data['repo_enable_downloads'],
952 repo_enable_downloads=schema_data['repo_enable_downloads'],
955 )
953 )
956
954
957 if schema_data['repo_fork_of']:
955 if schema_data['repo_fork_of']:
958 fork_repo = get_repo_or_error(schema_data['repo_fork_of'])
956 fork_repo = get_repo_or_error(schema_data['repo_fork_of'])
959 validated_updates['fork_id'] = fork_repo.repo_id
957 validated_updates['fork_id'] = fork_repo.repo_id
960
958
961 # extra fields
959 # extra fields
962 fields = parse_args(Optional.extract(fields), key_prefix='ex_')
960 fields = parse_args(Optional.extract(fields), key_prefix='ex_')
963 if fields:
961 if fields:
964 validated_updates.update(fields)
962 validated_updates.update(fields)
965
963
966 try:
964 try:
967 RepoModel().update(repo, **validated_updates)
965 RepoModel().update(repo, **validated_updates)
968 audit_logger.store_api(
966 audit_logger.store_api(
969 'repo.edit', action_data={'old_data': old_values},
967 'repo.edit', action_data={'old_data': old_values},
970 user=apiuser, repo=repo)
968 user=apiuser, repo=repo)
971 Session().commit()
969 Session().commit()
972 return {
970 return {
973 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo.repo_name),
971 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo.repo_name),
974 'repository': repo.get_api_data(include_secrets=include_secrets)
972 'repository': repo.get_api_data(include_secrets=include_secrets)
975 }
973 }
976 except Exception:
974 except Exception:
977 log.exception(
975 log.exception(
978 u"Exception while trying to update the repository %s",
976 u"Exception while trying to update the repository %s",
979 repoid)
977 repoid)
980 raise JSONRPCError('failed to update repo `%s`' % repoid)
978 raise JSONRPCError('failed to update repo `%s`' % repoid)
981
979
982
980
983 @jsonrpc_method()
981 @jsonrpc_method()
984 def fork_repo(request, apiuser, repoid, fork_name,
982 def fork_repo(request, apiuser, repoid, fork_name,
985 owner=Optional(OAttr('apiuser')),
983 owner=Optional(OAttr('apiuser')),
986 description=Optional(''),
984 description=Optional(''),
987 private=Optional(False),
985 private=Optional(False),
988 clone_uri=Optional(None),
986 clone_uri=Optional(None),
989 landing_rev=Optional('rev:tip'),
987 landing_rev=Optional('rev:tip'),
990 copy_permissions=Optional(False)):
988 copy_permissions=Optional(False)):
991 """
989 """
992 Creates a fork of the specified |repo|.
990 Creates a fork of the specified |repo|.
993
991
994 * If the fork_name contains "/", fork will be created inside
992 * If the fork_name contains "/", fork will be created inside
995 a repository group or nested repository groups
993 a repository group or nested repository groups
996
994
997 For example "foo/bar/fork-repo" will create fork called "fork-repo"
995 For example "foo/bar/fork-repo" will create fork called "fork-repo"
998 inside group "foo/bar". You have to have permissions to access and
996 inside group "foo/bar". You have to have permissions to access and
999 write to the last repository group ("bar" in this example)
997 write to the last repository group ("bar" in this example)
1000
998
1001 This command can only be run using an |authtoken| with minimum
999 This command can only be run using an |authtoken| with minimum
1002 read permissions of the forked repo, create fork permissions for an user.
1000 read permissions of the forked repo, create fork permissions for an user.
1003
1001
1004 :param apiuser: This is filled automatically from the |authtoken|.
1002 :param apiuser: This is filled automatically from the |authtoken|.
1005 :type apiuser: AuthUser
1003 :type apiuser: AuthUser
1006 :param repoid: Set repository name or repository ID.
1004 :param repoid: Set repository name or repository ID.
1007 :type repoid: str or int
1005 :type repoid: str or int
1008 :param fork_name: Set the fork name, including it's repository group membership.
1006 :param fork_name: Set the fork name, including it's repository group membership.
1009 :type fork_name: str
1007 :type fork_name: str
1010 :param owner: Set the fork owner.
1008 :param owner: Set the fork owner.
1011 :type owner: str
1009 :type owner: str
1012 :param description: Set the fork description.
1010 :param description: Set the fork description.
1013 :type description: str
1011 :type description: str
1014 :param copy_permissions: Copy permissions from parent |repo|. The
1012 :param copy_permissions: Copy permissions from parent |repo|. The
1015 default is False.
1013 default is False.
1016 :type copy_permissions: bool
1014 :type copy_permissions: bool
1017 :param private: Make the fork private. The default is False.
1015 :param private: Make the fork private. The default is False.
1018 :type private: bool
1016 :type private: bool
1019 :param landing_rev: Set the landing revision. The default is tip.
1017 :param landing_rev: Set the landing revision. The default is tip.
1020
1018
1021 Example output:
1019 Example output:
1022
1020
1023 .. code-block:: bash
1021 .. code-block:: bash
1024
1022
1025 id : <id_for_response>
1023 id : <id_for_response>
1026 api_key : "<api_key>"
1024 api_key : "<api_key>"
1027 args: {
1025 args: {
1028 "repoid" : "<reponame or repo_id>",
1026 "repoid" : "<reponame or repo_id>",
1029 "fork_name": "<forkname>",
1027 "fork_name": "<forkname>",
1030 "owner": "<username or user_id = Optional(=apiuser)>",
1028 "owner": "<username or user_id = Optional(=apiuser)>",
1031 "description": "<description>",
1029 "description": "<description>",
1032 "copy_permissions": "<bool>",
1030 "copy_permissions": "<bool>",
1033 "private": "<bool>",
1031 "private": "<bool>",
1034 "landing_rev": "<landing_rev>"
1032 "landing_rev": "<landing_rev>"
1035 }
1033 }
1036
1034
1037 Example error output:
1035 Example error output:
1038
1036
1039 .. code-block:: bash
1037 .. code-block:: bash
1040
1038
1041 id : <id_given_in_input>
1039 id : <id_given_in_input>
1042 result: {
1040 result: {
1043 "msg": "Created fork of `<reponame>` as `<forkname>`",
1041 "msg": "Created fork of `<reponame>` as `<forkname>`",
1044 "success": true,
1042 "success": true,
1045 "task": "<celery task id or None if done sync>"
1043 "task": "<celery task id or None if done sync>"
1046 }
1044 }
1047 error: null
1045 error: null
1048
1046
1049 """
1047 """
1050
1048
1051 repo = get_repo_or_error(repoid)
1049 repo = get_repo_or_error(repoid)
1052 repo_name = repo.repo_name
1050 repo_name = repo.repo_name
1053
1051
1054 if not has_superadmin_permission(apiuser):
1052 if not has_superadmin_permission(apiuser):
1055 # check if we have at least read permission for
1053 # check if we have at least read permission for
1056 # this repo that we fork !
1054 # this repo that we fork !
1057 _perms = (
1055 _perms = (
1058 'repository.admin', 'repository.write', 'repository.read')
1056 'repository.admin', 'repository.write', 'repository.read')
1059 validate_repo_permissions(apiuser, repoid, repo, _perms)
1057 validate_repo_permissions(apiuser, repoid, repo, _perms)
1060
1058
1061 # check if the regular user has at least fork permissions as well
1059 # check if the regular user has at least fork permissions as well
1062 if not HasPermissionAnyApi('hg.fork.repository')(user=apiuser):
1060 if not HasPermissionAnyApi('hg.fork.repository')(user=apiuser):
1063 raise JSONRPCForbidden()
1061 raise JSONRPCForbidden()
1064
1062
1065 # check if user can set owner parameter
1063 # check if user can set owner parameter
1066 owner = validate_set_owner_permissions(apiuser, owner)
1064 owner = validate_set_owner_permissions(apiuser, owner)
1067
1065
1068 description = Optional.extract(description)
1066 description = Optional.extract(description)
1069 copy_permissions = Optional.extract(copy_permissions)
1067 copy_permissions = Optional.extract(copy_permissions)
1070 clone_uri = Optional.extract(clone_uri)
1068 clone_uri = Optional.extract(clone_uri)
1071 landing_commit_ref = Optional.extract(landing_rev)
1069 landing_commit_ref = Optional.extract(landing_rev)
1072 private = Optional.extract(private)
1070 private = Optional.extract(private)
1073
1071
1074 schema = repo_schema.RepoSchema().bind(
1072 schema = repo_schema.RepoSchema().bind(
1075 repo_type_options=rhodecode.BACKENDS.keys(),
1073 repo_type_options=rhodecode.BACKENDS.keys(),
1076 # user caller
1074 # user caller
1077 user=apiuser)
1075 user=apiuser)
1078
1076
1079 try:
1077 try:
1080 schema_data = schema.deserialize(dict(
1078 schema_data = schema.deserialize(dict(
1081 repo_name=fork_name,
1079 repo_name=fork_name,
1082 repo_type=repo.repo_type,
1080 repo_type=repo.repo_type,
1083 repo_owner=owner.username,
1081 repo_owner=owner.username,
1084 repo_description=description,
1082 repo_description=description,
1085 repo_landing_commit_ref=landing_commit_ref,
1083 repo_landing_commit_ref=landing_commit_ref,
1086 repo_clone_uri=clone_uri,
1084 repo_clone_uri=clone_uri,
1087 repo_private=private,
1085 repo_private=private,
1088 repo_copy_permissions=copy_permissions))
1086 repo_copy_permissions=copy_permissions))
1089 except validation_schema.Invalid as err:
1087 except validation_schema.Invalid as err:
1090 raise JSONRPCValidationError(colander_exc=err)
1088 raise JSONRPCValidationError(colander_exc=err)
1091
1089
1092 try:
1090 try:
1093 data = {
1091 data = {
1094 'fork_parent_id': repo.repo_id,
1092 'fork_parent_id': repo.repo_id,
1095
1093
1096 'repo_name': schema_data['repo_group']['repo_name_without_group'],
1094 'repo_name': schema_data['repo_group']['repo_name_without_group'],
1097 'repo_name_full': schema_data['repo_name'],
1095 'repo_name_full': schema_data['repo_name'],
1098 'repo_group': schema_data['repo_group']['repo_group_id'],
1096 'repo_group': schema_data['repo_group']['repo_group_id'],
1099 'repo_type': schema_data['repo_type'],
1097 'repo_type': schema_data['repo_type'],
1100 'description': schema_data['repo_description'],
1098 'description': schema_data['repo_description'],
1101 'private': schema_data['repo_private'],
1099 'private': schema_data['repo_private'],
1102 'copy_permissions': schema_data['repo_copy_permissions'],
1100 'copy_permissions': schema_data['repo_copy_permissions'],
1103 'landing_rev': schema_data['repo_landing_commit_ref'],
1101 'landing_rev': schema_data['repo_landing_commit_ref'],
1104 }
1102 }
1105
1103
1106 task = RepoModel().create_fork(data, cur_user=owner)
1104 task = RepoModel().create_fork(data, cur_user=owner)
1107 # no commit, it's done in RepoModel, or async via celery
1105 # no commit, it's done in RepoModel, or async via celery
1108 from celery.result import BaseAsyncResult
1106 task_id = get_task_id(task)
1109 task_id = None
1107
1110 if isinstance(task, BaseAsyncResult):
1111 task_id = task.task_id
1112 return {
1108 return {
1113 'msg': 'Created fork of `%s` as `%s`' % (
1109 'msg': 'Created fork of `%s` as `%s`' % (
1114 repo.repo_name, schema_data['repo_name']),
1110 repo.repo_name, schema_data['repo_name']),
1115 'success': True, # cannot return the repo data here since fork
1111 'success': True, # cannot return the repo data here since fork
1116 # can be done async
1112 # can be done async
1117 'task': task_id
1113 'task': task_id
1118 }
1114 }
1119 except Exception:
1115 except Exception:
1120 log.exception(
1116 log.exception(
1121 u"Exception while trying to create fork %s",
1117 u"Exception while trying to create fork %s",
1122 schema_data['repo_name'])
1118 schema_data['repo_name'])
1123 raise JSONRPCError(
1119 raise JSONRPCError(
1124 'failed to fork repository `%s` as `%s`' % (
1120 'failed to fork repository `%s` as `%s`' % (
1125 repo_name, schema_data['repo_name']))
1121 repo_name, schema_data['repo_name']))
1126
1122
1127
1123
1128 @jsonrpc_method()
1124 @jsonrpc_method()
1129 def delete_repo(request, apiuser, repoid, forks=Optional('')):
1125 def delete_repo(request, apiuser, repoid, forks=Optional('')):
1130 """
1126 """
1131 Deletes a repository.
1127 Deletes a repository.
1132
1128
1133 * When the `forks` parameter is set it's possible to detach or delete
1129 * When the `forks` parameter is set it's possible to detach or delete
1134 forks of deleted repository.
1130 forks of deleted repository.
1135
1131
1136 This command can only be run using an |authtoken| with admin
1132 This command can only be run using an |authtoken| with admin
1137 permissions on the |repo|.
1133 permissions on the |repo|.
1138
1134
1139 :param apiuser: This is filled automatically from the |authtoken|.
1135 :param apiuser: This is filled automatically from the |authtoken|.
1140 :type apiuser: AuthUser
1136 :type apiuser: AuthUser
1141 :param repoid: Set the repository name or repository ID.
1137 :param repoid: Set the repository name or repository ID.
1142 :type repoid: str or int
1138 :type repoid: str or int
1143 :param forks: Set to `detach` or `delete` forks from the |repo|.
1139 :param forks: Set to `detach` or `delete` forks from the |repo|.
1144 :type forks: Optional(str)
1140 :type forks: Optional(str)
1145
1141
1146 Example error output:
1142 Example error output:
1147
1143
1148 .. code-block:: bash
1144 .. code-block:: bash
1149
1145
1150 id : <id_given_in_input>
1146 id : <id_given_in_input>
1151 result: {
1147 result: {
1152 "msg": "Deleted repository `<reponame>`",
1148 "msg": "Deleted repository `<reponame>`",
1153 "success": true
1149 "success": true
1154 }
1150 }
1155 error: null
1151 error: null
1156 """
1152 """
1157
1153
1158 repo = get_repo_or_error(repoid)
1154 repo = get_repo_or_error(repoid)
1159 repo_name = repo.repo_name
1155 repo_name = repo.repo_name
1160 if not has_superadmin_permission(apiuser):
1156 if not has_superadmin_permission(apiuser):
1161 _perms = ('repository.admin',)
1157 _perms = ('repository.admin',)
1162 validate_repo_permissions(apiuser, repoid, repo, _perms)
1158 validate_repo_permissions(apiuser, repoid, repo, _perms)
1163
1159
1164 try:
1160 try:
1165 handle_forks = Optional.extract(forks)
1161 handle_forks = Optional.extract(forks)
1166 _forks_msg = ''
1162 _forks_msg = ''
1167 _forks = [f for f in repo.forks]
1163 _forks = [f for f in repo.forks]
1168 if handle_forks == 'detach':
1164 if handle_forks == 'detach':
1169 _forks_msg = ' ' + 'Detached %s forks' % len(_forks)
1165 _forks_msg = ' ' + 'Detached %s forks' % len(_forks)
1170 elif handle_forks == 'delete':
1166 elif handle_forks == 'delete':
1171 _forks_msg = ' ' + 'Deleted %s forks' % len(_forks)
1167 _forks_msg = ' ' + 'Deleted %s forks' % len(_forks)
1172 elif _forks:
1168 elif _forks:
1173 raise JSONRPCError(
1169 raise JSONRPCError(
1174 'Cannot delete `%s` it still contains attached forks' %
1170 'Cannot delete `%s` it still contains attached forks' %
1175 (repo.repo_name,)
1171 (repo.repo_name,)
1176 )
1172 )
1177 old_data = repo.get_api_data()
1173 old_data = repo.get_api_data()
1178 RepoModel().delete(repo, forks=forks)
1174 RepoModel().delete(repo, forks=forks)
1179
1175
1180 repo = audit_logger.RepoWrap(repo_id=None,
1176 repo = audit_logger.RepoWrap(repo_id=None,
1181 repo_name=repo.repo_name)
1177 repo_name=repo.repo_name)
1182
1178
1183 audit_logger.store_api(
1179 audit_logger.store_api(
1184 'repo.delete', action_data={'old_data': old_data},
1180 'repo.delete', action_data={'old_data': old_data},
1185 user=apiuser, repo=repo)
1181 user=apiuser, repo=repo)
1186
1182
1187 ScmModel().mark_for_invalidation(repo_name, delete=True)
1183 ScmModel().mark_for_invalidation(repo_name, delete=True)
1188 Session().commit()
1184 Session().commit()
1189 return {
1185 return {
1190 'msg': 'Deleted repository `%s`%s' % (repo_name, _forks_msg),
1186 'msg': 'Deleted repository `%s`%s' % (repo_name, _forks_msg),
1191 'success': True
1187 'success': True
1192 }
1188 }
1193 except Exception:
1189 except Exception:
1194 log.exception("Exception occurred while trying to delete repo")
1190 log.exception("Exception occurred while trying to delete repo")
1195 raise JSONRPCError(
1191 raise JSONRPCError(
1196 'failed to delete repository `%s`' % (repo_name,)
1192 'failed to delete repository `%s`' % (repo_name,)
1197 )
1193 )
1198
1194
1199
1195
1200 #TODO: marcink, change name ?
1196 #TODO: marcink, change name ?
1201 @jsonrpc_method()
1197 @jsonrpc_method()
1202 def invalidate_cache(request, apiuser, repoid, delete_keys=Optional(False)):
1198 def invalidate_cache(request, apiuser, repoid, delete_keys=Optional(False)):
1203 """
1199 """
1204 Invalidates the cache for the specified repository.
1200 Invalidates the cache for the specified repository.
1205
1201
1206 This command can only be run using an |authtoken| with admin rights to
1202 This command can only be run using an |authtoken| with admin rights to
1207 the specified repository.
1203 the specified repository.
1208
1204
1209 This command takes the following options:
1205 This command takes the following options:
1210
1206
1211 :param apiuser: This is filled automatically from |authtoken|.
1207 :param apiuser: This is filled automatically from |authtoken|.
1212 :type apiuser: AuthUser
1208 :type apiuser: AuthUser
1213 :param repoid: Sets the repository name or repository ID.
1209 :param repoid: Sets the repository name or repository ID.
1214 :type repoid: str or int
1210 :type repoid: str or int
1215 :param delete_keys: This deletes the invalidated keys instead of
1211 :param delete_keys: This deletes the invalidated keys instead of
1216 just flagging them.
1212 just flagging them.
1217 :type delete_keys: Optional(``True`` | ``False``)
1213 :type delete_keys: Optional(``True`` | ``False``)
1218
1214
1219 Example output:
1215 Example output:
1220
1216
1221 .. code-block:: bash
1217 .. code-block:: bash
1222
1218
1223 id : <id_given_in_input>
1219 id : <id_given_in_input>
1224 result : {
1220 result : {
1225 'msg': Cache for repository `<repository name>` was invalidated,
1221 'msg': Cache for repository `<repository name>` was invalidated,
1226 'repository': <repository name>
1222 'repository': <repository name>
1227 }
1223 }
1228 error : null
1224 error : null
1229
1225
1230 Example error output:
1226 Example error output:
1231
1227
1232 .. code-block:: bash
1228 .. code-block:: bash
1233
1229
1234 id : <id_given_in_input>
1230 id : <id_given_in_input>
1235 result : null
1231 result : null
1236 error : {
1232 error : {
1237 'Error occurred during cache invalidation action'
1233 'Error occurred during cache invalidation action'
1238 }
1234 }
1239
1235
1240 """
1236 """
1241
1237
1242 repo = get_repo_or_error(repoid)
1238 repo = get_repo_or_error(repoid)
1243 if not has_superadmin_permission(apiuser):
1239 if not has_superadmin_permission(apiuser):
1244 _perms = ('repository.admin', 'repository.write',)
1240 _perms = ('repository.admin', 'repository.write',)
1245 validate_repo_permissions(apiuser, repoid, repo, _perms)
1241 validate_repo_permissions(apiuser, repoid, repo, _perms)
1246
1242
1247 delete = Optional.extract(delete_keys)
1243 delete = Optional.extract(delete_keys)
1248 try:
1244 try:
1249 ScmModel().mark_for_invalidation(repo.repo_name, delete=delete)
1245 ScmModel().mark_for_invalidation(repo.repo_name, delete=delete)
1250 return {
1246 return {
1251 'msg': 'Cache for repository `%s` was invalidated' % (repoid,),
1247 'msg': 'Cache for repository `%s` was invalidated' % (repoid,),
1252 'repository': repo.repo_name
1248 'repository': repo.repo_name
1253 }
1249 }
1254 except Exception:
1250 except Exception:
1255 log.exception(
1251 log.exception(
1256 "Exception occurred while trying to invalidate repo cache")
1252 "Exception occurred while trying to invalidate repo cache")
1257 raise JSONRPCError(
1253 raise JSONRPCError(
1258 'Error occurred during cache invalidation action'
1254 'Error occurred during cache invalidation action'
1259 )
1255 )
1260
1256
1261
1257
1262 #TODO: marcink, change name ?
1258 #TODO: marcink, change name ?
1263 @jsonrpc_method()
1259 @jsonrpc_method()
1264 def lock(request, apiuser, repoid, locked=Optional(None),
1260 def lock(request, apiuser, repoid, locked=Optional(None),
1265 userid=Optional(OAttr('apiuser'))):
1261 userid=Optional(OAttr('apiuser'))):
1266 """
1262 """
1267 Sets the lock state of the specified |repo| by the given user.
1263 Sets the lock state of the specified |repo| by the given user.
1268 From more information, see :ref:`repo-locking`.
1264 From more information, see :ref:`repo-locking`.
1269
1265
1270 * If the ``userid`` option is not set, the repository is locked to the
1266 * If the ``userid`` option is not set, the repository is locked to the
1271 user who called the method.
1267 user who called the method.
1272 * If the ``locked`` parameter is not set, the current lock state of the
1268 * If the ``locked`` parameter is not set, the current lock state of the
1273 repository is displayed.
1269 repository is displayed.
1274
1270
1275 This command can only be run using an |authtoken| with admin rights to
1271 This command can only be run using an |authtoken| with admin rights to
1276 the specified repository.
1272 the specified repository.
1277
1273
1278 This command takes the following options:
1274 This command takes the following options:
1279
1275
1280 :param apiuser: This is filled automatically from the |authtoken|.
1276 :param apiuser: This is filled automatically from the |authtoken|.
1281 :type apiuser: AuthUser
1277 :type apiuser: AuthUser
1282 :param repoid: Sets the repository name or repository ID.
1278 :param repoid: Sets the repository name or repository ID.
1283 :type repoid: str or int
1279 :type repoid: str or int
1284 :param locked: Sets the lock state.
1280 :param locked: Sets the lock state.
1285 :type locked: Optional(``True`` | ``False``)
1281 :type locked: Optional(``True`` | ``False``)
1286 :param userid: Set the repository lock to this user.
1282 :param userid: Set the repository lock to this user.
1287 :type userid: Optional(str or int)
1283 :type userid: Optional(str or int)
1288
1284
1289 Example error output:
1285 Example error output:
1290
1286
1291 .. code-block:: bash
1287 .. code-block:: bash
1292
1288
1293 id : <id_given_in_input>
1289 id : <id_given_in_input>
1294 result : {
1290 result : {
1295 'repo': '<reponame>',
1291 'repo': '<reponame>',
1296 'locked': <bool: lock state>,
1292 'locked': <bool: lock state>,
1297 'locked_since': <int: lock timestamp>,
1293 'locked_since': <int: lock timestamp>,
1298 'locked_by': <username of person who made the lock>,
1294 'locked_by': <username of person who made the lock>,
1299 'lock_reason': <str: reason for locking>,
1295 'lock_reason': <str: reason for locking>,
1300 'lock_state_changed': <bool: True if lock state has been changed in this request>,
1296 'lock_state_changed': <bool: True if lock state has been changed in this request>,
1301 'msg': 'Repo `<reponame>` locked by `<username>` on <timestamp>.'
1297 'msg': 'Repo `<reponame>` locked by `<username>` on <timestamp>.'
1302 or
1298 or
1303 'msg': 'Repo `<repository name>` not locked.'
1299 'msg': 'Repo `<repository name>` not locked.'
1304 or
1300 or
1305 'msg': 'User `<user name>` set lock state for repo `<repository name>` to `<new lock state>`'
1301 'msg': 'User `<user name>` set lock state for repo `<repository name>` to `<new lock state>`'
1306 }
1302 }
1307 error : null
1303 error : null
1308
1304
1309 Example error output:
1305 Example error output:
1310
1306
1311 .. code-block:: bash
1307 .. code-block:: bash
1312
1308
1313 id : <id_given_in_input>
1309 id : <id_given_in_input>
1314 result : null
1310 result : null
1315 error : {
1311 error : {
1316 'Error occurred locking repository `<reponame>`'
1312 'Error occurred locking repository `<reponame>`'
1317 }
1313 }
1318 """
1314 """
1319
1315
1320 repo = get_repo_or_error(repoid)
1316 repo = get_repo_or_error(repoid)
1321 if not has_superadmin_permission(apiuser):
1317 if not has_superadmin_permission(apiuser):
1322 # check if we have at least write permission for this repo !
1318 # check if we have at least write permission for this repo !
1323 _perms = ('repository.admin', 'repository.write',)
1319 _perms = ('repository.admin', 'repository.write',)
1324 validate_repo_permissions(apiuser, repoid, repo, _perms)
1320 validate_repo_permissions(apiuser, repoid, repo, _perms)
1325
1321
1326 # make sure normal user does not pass someone else userid,
1322 # make sure normal user does not pass someone else userid,
1327 # he is not allowed to do that
1323 # he is not allowed to do that
1328 if not isinstance(userid, Optional) and userid != apiuser.user_id:
1324 if not isinstance(userid, Optional) and userid != apiuser.user_id:
1329 raise JSONRPCError('userid is not the same as your user')
1325 raise JSONRPCError('userid is not the same as your user')
1330
1326
1331 if isinstance(userid, Optional):
1327 if isinstance(userid, Optional):
1332 userid = apiuser.user_id
1328 userid = apiuser.user_id
1333
1329
1334 user = get_user_or_error(userid)
1330 user = get_user_or_error(userid)
1335
1331
1336 if isinstance(locked, Optional):
1332 if isinstance(locked, Optional):
1337 lockobj = repo.locked
1333 lockobj = repo.locked
1338
1334
1339 if lockobj[0] is None:
1335 if lockobj[0] is None:
1340 _d = {
1336 _d = {
1341 'repo': repo.repo_name,
1337 'repo': repo.repo_name,
1342 'locked': False,
1338 'locked': False,
1343 'locked_since': None,
1339 'locked_since': None,
1344 'locked_by': None,
1340 'locked_by': None,
1345 'lock_reason': None,
1341 'lock_reason': None,
1346 'lock_state_changed': False,
1342 'lock_state_changed': False,
1347 'msg': 'Repo `%s` not locked.' % repo.repo_name
1343 'msg': 'Repo `%s` not locked.' % repo.repo_name
1348 }
1344 }
1349 return _d
1345 return _d
1350 else:
1346 else:
1351 _user_id, _time, _reason = lockobj
1347 _user_id, _time, _reason = lockobj
1352 lock_user = get_user_or_error(userid)
1348 lock_user = get_user_or_error(userid)
1353 _d = {
1349 _d = {
1354 'repo': repo.repo_name,
1350 'repo': repo.repo_name,
1355 'locked': True,
1351 'locked': True,
1356 'locked_since': _time,
1352 'locked_since': _time,
1357 'locked_by': lock_user.username,
1353 'locked_by': lock_user.username,
1358 'lock_reason': _reason,
1354 'lock_reason': _reason,
1359 'lock_state_changed': False,
1355 'lock_state_changed': False,
1360 'msg': ('Repo `%s` locked by `%s` on `%s`.'
1356 'msg': ('Repo `%s` locked by `%s` on `%s`.'
1361 % (repo.repo_name, lock_user.username,
1357 % (repo.repo_name, lock_user.username,
1362 json.dumps(time_to_datetime(_time))))
1358 json.dumps(time_to_datetime(_time))))
1363 }
1359 }
1364 return _d
1360 return _d
1365
1361
1366 # force locked state through a flag
1362 # force locked state through a flag
1367 else:
1363 else:
1368 locked = str2bool(locked)
1364 locked = str2bool(locked)
1369 lock_reason = Repository.LOCK_API
1365 lock_reason = Repository.LOCK_API
1370 try:
1366 try:
1371 if locked:
1367 if locked:
1372 lock_time = time.time()
1368 lock_time = time.time()
1373 Repository.lock(repo, user.user_id, lock_time, lock_reason)
1369 Repository.lock(repo, user.user_id, lock_time, lock_reason)
1374 else:
1370 else:
1375 lock_time = None
1371 lock_time = None
1376 Repository.unlock(repo)
1372 Repository.unlock(repo)
1377 _d = {
1373 _d = {
1378 'repo': repo.repo_name,
1374 'repo': repo.repo_name,
1379 'locked': locked,
1375 'locked': locked,
1380 'locked_since': lock_time,
1376 'locked_since': lock_time,
1381 'locked_by': user.username,
1377 'locked_by': user.username,
1382 'lock_reason': lock_reason,
1378 'lock_reason': lock_reason,
1383 'lock_state_changed': True,
1379 'lock_state_changed': True,
1384 'msg': ('User `%s` set lock state for repo `%s` to `%s`'
1380 'msg': ('User `%s` set lock state for repo `%s` to `%s`'
1385 % (user.username, repo.repo_name, locked))
1381 % (user.username, repo.repo_name, locked))
1386 }
1382 }
1387 return _d
1383 return _d
1388 except Exception:
1384 except Exception:
1389 log.exception(
1385 log.exception(
1390 "Exception occurred while trying to lock repository")
1386 "Exception occurred while trying to lock repository")
1391 raise JSONRPCError(
1387 raise JSONRPCError(
1392 'Error occurred locking repository `%s`' % repo.repo_name
1388 'Error occurred locking repository `%s`' % repo.repo_name
1393 )
1389 )
1394
1390
1395
1391
1396 @jsonrpc_method()
1392 @jsonrpc_method()
1397 def comment_commit(
1393 def comment_commit(
1398 request, apiuser, repoid, commit_id, message, status=Optional(None),
1394 request, apiuser, repoid, commit_id, message, status=Optional(None),
1399 comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE),
1395 comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE),
1400 resolves_comment_id=Optional(None),
1396 resolves_comment_id=Optional(None),
1401 userid=Optional(OAttr('apiuser'))):
1397 userid=Optional(OAttr('apiuser'))):
1402 """
1398 """
1403 Set a commit comment, and optionally change the status of the commit.
1399 Set a commit comment, and optionally change the status of the commit.
1404
1400
1405 :param apiuser: This is filled automatically from the |authtoken|.
1401 :param apiuser: This is filled automatically from the |authtoken|.
1406 :type apiuser: AuthUser
1402 :type apiuser: AuthUser
1407 :param repoid: Set the repository name or repository ID.
1403 :param repoid: Set the repository name or repository ID.
1408 :type repoid: str or int
1404 :type repoid: str or int
1409 :param commit_id: Specify the commit_id for which to set a comment.
1405 :param commit_id: Specify the commit_id for which to set a comment.
1410 :type commit_id: str
1406 :type commit_id: str
1411 :param message: The comment text.
1407 :param message: The comment text.
1412 :type message: str
1408 :type message: str
1413 :param status: (**Optional**) status of commit, one of: 'not_reviewed',
1409 :param status: (**Optional**) status of commit, one of: 'not_reviewed',
1414 'approved', 'rejected', 'under_review'
1410 'approved', 'rejected', 'under_review'
1415 :type status: str
1411 :type status: str
1416 :param comment_type: Comment type, one of: 'note', 'todo'
1412 :param comment_type: Comment type, one of: 'note', 'todo'
1417 :type comment_type: Optional(str), default: 'note'
1413 :type comment_type: Optional(str), default: 'note'
1418 :param userid: Set the user name of the comment creator.
1414 :param userid: Set the user name of the comment creator.
1419 :type userid: Optional(str or int)
1415 :type userid: Optional(str or int)
1420
1416
1421 Example error output:
1417 Example error output:
1422
1418
1423 .. code-block:: bash
1419 .. code-block:: bash
1424
1420
1425 {
1421 {
1426 "id" : <id_given_in_input>,
1422 "id" : <id_given_in_input>,
1427 "result" : {
1423 "result" : {
1428 "msg": "Commented on commit `<commit_id>` for repository `<repoid>`",
1424 "msg": "Commented on commit `<commit_id>` for repository `<repoid>`",
1429 "status_change": null or <status>,
1425 "status_change": null or <status>,
1430 "success": true
1426 "success": true
1431 },
1427 },
1432 "error" : null
1428 "error" : null
1433 }
1429 }
1434
1430
1435 """
1431 """
1436 repo = get_repo_or_error(repoid)
1432 repo = get_repo_or_error(repoid)
1437 if not has_superadmin_permission(apiuser):
1433 if not has_superadmin_permission(apiuser):
1438 _perms = ('repository.read', 'repository.write', 'repository.admin')
1434 _perms = ('repository.read', 'repository.write', 'repository.admin')
1439 validate_repo_permissions(apiuser, repoid, repo, _perms)
1435 validate_repo_permissions(apiuser, repoid, repo, _perms)
1440
1436
1441 try:
1437 try:
1442 commit_id = repo.scm_instance().get_commit(commit_id=commit_id).raw_id
1438 commit_id = repo.scm_instance().get_commit(commit_id=commit_id).raw_id
1443 except Exception as e:
1439 except Exception as e:
1444 log.exception('Failed to fetch commit')
1440 log.exception('Failed to fetch commit')
1445 raise JSONRPCError(e.message)
1441 raise JSONRPCError(e.message)
1446
1442
1447 if isinstance(userid, Optional):
1443 if isinstance(userid, Optional):
1448 userid = apiuser.user_id
1444 userid = apiuser.user_id
1449
1445
1450 user = get_user_or_error(userid)
1446 user = get_user_or_error(userid)
1451 status = Optional.extract(status)
1447 status = Optional.extract(status)
1452 comment_type = Optional.extract(comment_type)
1448 comment_type = Optional.extract(comment_type)
1453 resolves_comment_id = Optional.extract(resolves_comment_id)
1449 resolves_comment_id = Optional.extract(resolves_comment_id)
1454
1450
1455 allowed_statuses = [x[0] for x in ChangesetStatus.STATUSES]
1451 allowed_statuses = [x[0] for x in ChangesetStatus.STATUSES]
1456 if status and status not in allowed_statuses:
1452 if status and status not in allowed_statuses:
1457 raise JSONRPCError('Bad status, must be on '
1453 raise JSONRPCError('Bad status, must be on '
1458 'of %s got %s' % (allowed_statuses, status,))
1454 'of %s got %s' % (allowed_statuses, status,))
1459
1455
1460 if resolves_comment_id:
1456 if resolves_comment_id:
1461 comment = ChangesetComment.get(resolves_comment_id)
1457 comment = ChangesetComment.get(resolves_comment_id)
1462 if not comment:
1458 if not comment:
1463 raise JSONRPCError(
1459 raise JSONRPCError(
1464 'Invalid resolves_comment_id `%s` for this commit.'
1460 'Invalid resolves_comment_id `%s` for this commit.'
1465 % resolves_comment_id)
1461 % resolves_comment_id)
1466 if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO:
1462 if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO:
1467 raise JSONRPCError(
1463 raise JSONRPCError(
1468 'Comment `%s` is wrong type for setting status to resolved.'
1464 'Comment `%s` is wrong type for setting status to resolved.'
1469 % resolves_comment_id)
1465 % resolves_comment_id)
1470
1466
1471 try:
1467 try:
1472 rc_config = SettingsModel().get_all_settings()
1468 rc_config = SettingsModel().get_all_settings()
1473 renderer = rc_config.get('rhodecode_markup_renderer', 'rst')
1469 renderer = rc_config.get('rhodecode_markup_renderer', 'rst')
1474 status_change_label = ChangesetStatus.get_status_lbl(status)
1470 status_change_label = ChangesetStatus.get_status_lbl(status)
1475 comment = CommentsModel().create(
1471 comment = CommentsModel().create(
1476 message, repo, user, commit_id=commit_id,
1472 message, repo, user, commit_id=commit_id,
1477 status_change=status_change_label,
1473 status_change=status_change_label,
1478 status_change_type=status,
1474 status_change_type=status,
1479 renderer=renderer,
1475 renderer=renderer,
1480 comment_type=comment_type,
1476 comment_type=comment_type,
1481 resolves_comment_id=resolves_comment_id
1477 resolves_comment_id=resolves_comment_id
1482 )
1478 )
1483 if status:
1479 if status:
1484 # also do a status change
1480 # also do a status change
1485 try:
1481 try:
1486 ChangesetStatusModel().set_status(
1482 ChangesetStatusModel().set_status(
1487 repo, status, user, comment, revision=commit_id,
1483 repo, status, user, comment, revision=commit_id,
1488 dont_allow_on_closed_pull_request=True
1484 dont_allow_on_closed_pull_request=True
1489 )
1485 )
1490 except StatusChangeOnClosedPullRequestError:
1486 except StatusChangeOnClosedPullRequestError:
1491 log.exception(
1487 log.exception(
1492 "Exception occurred while trying to change repo commit status")
1488 "Exception occurred while trying to change repo commit status")
1493 msg = ('Changing status on a changeset associated with '
1489 msg = ('Changing status on a changeset associated with '
1494 'a closed pull request is not allowed')
1490 'a closed pull request is not allowed')
1495 raise JSONRPCError(msg)
1491 raise JSONRPCError(msg)
1496
1492
1497 Session().commit()
1493 Session().commit()
1498 return {
1494 return {
1499 'msg': (
1495 'msg': (
1500 'Commented on commit `%s` for repository `%s`' % (
1496 'Commented on commit `%s` for repository `%s`' % (
1501 comment.revision, repo.repo_name)),
1497 comment.revision, repo.repo_name)),
1502 'status_change': status,
1498 'status_change': status,
1503 'success': True,
1499 'success': True,
1504 }
1500 }
1505 except JSONRPCError:
1501 except JSONRPCError:
1506 # catch any inside errors, and re-raise them to prevent from
1502 # catch any inside errors, and re-raise them to prevent from
1507 # below global catch to silence them
1503 # below global catch to silence them
1508 raise
1504 raise
1509 except Exception:
1505 except Exception:
1510 log.exception("Exception occurred while trying to comment on commit")
1506 log.exception("Exception occurred while trying to comment on commit")
1511 raise JSONRPCError(
1507 raise JSONRPCError(
1512 'failed to set comment on repository `%s`' % (repo.repo_name,)
1508 'failed to set comment on repository `%s`' % (repo.repo_name,)
1513 )
1509 )
1514
1510
1515
1511
1516 @jsonrpc_method()
1512 @jsonrpc_method()
1517 def grant_user_permission(request, apiuser, repoid, userid, perm):
1513 def grant_user_permission(request, apiuser, repoid, userid, perm):
1518 """
1514 """
1519 Grant permissions for the specified user on the given repository,
1515 Grant permissions for the specified user on the given repository,
1520 or update existing permissions if found.
1516 or update existing permissions if found.
1521
1517
1522 This command can only be run using an |authtoken| with admin
1518 This command can only be run using an |authtoken| with admin
1523 permissions on the |repo|.
1519 permissions on the |repo|.
1524
1520
1525 :param apiuser: This is filled automatically from the |authtoken|.
1521 :param apiuser: This is filled automatically from the |authtoken|.
1526 :type apiuser: AuthUser
1522 :type apiuser: AuthUser
1527 :param repoid: Set the repository name or repository ID.
1523 :param repoid: Set the repository name or repository ID.
1528 :type repoid: str or int
1524 :type repoid: str or int
1529 :param userid: Set the user name.
1525 :param userid: Set the user name.
1530 :type userid: str
1526 :type userid: str
1531 :param perm: Set the user permissions, using the following format
1527 :param perm: Set the user permissions, using the following format
1532 ``(repository.(none|read|write|admin))``
1528 ``(repository.(none|read|write|admin))``
1533 :type perm: str
1529 :type perm: str
1534
1530
1535 Example output:
1531 Example output:
1536
1532
1537 .. code-block:: bash
1533 .. code-block:: bash
1538
1534
1539 id : <id_given_in_input>
1535 id : <id_given_in_input>
1540 result: {
1536 result: {
1541 "msg" : "Granted perm: `<perm>` for user: `<username>` in repo: `<reponame>`",
1537 "msg" : "Granted perm: `<perm>` for user: `<username>` in repo: `<reponame>`",
1542 "success": true
1538 "success": true
1543 }
1539 }
1544 error: null
1540 error: null
1545 """
1541 """
1546
1542
1547 repo = get_repo_or_error(repoid)
1543 repo = get_repo_or_error(repoid)
1548 user = get_user_or_error(userid)
1544 user = get_user_or_error(userid)
1549 perm = get_perm_or_error(perm)
1545 perm = get_perm_or_error(perm)
1550 if not has_superadmin_permission(apiuser):
1546 if not has_superadmin_permission(apiuser):
1551 _perms = ('repository.admin',)
1547 _perms = ('repository.admin',)
1552 validate_repo_permissions(apiuser, repoid, repo, _perms)
1548 validate_repo_permissions(apiuser, repoid, repo, _perms)
1553
1549
1554 try:
1550 try:
1555
1551
1556 RepoModel().grant_user_permission(repo=repo, user=user, perm=perm)
1552 RepoModel().grant_user_permission(repo=repo, user=user, perm=perm)
1557
1553
1558 Session().commit()
1554 Session().commit()
1559 return {
1555 return {
1560 'msg': 'Granted perm: `%s` for user: `%s` in repo: `%s`' % (
1556 'msg': 'Granted perm: `%s` for user: `%s` in repo: `%s`' % (
1561 perm.permission_name, user.username, repo.repo_name
1557 perm.permission_name, user.username, repo.repo_name
1562 ),
1558 ),
1563 'success': True
1559 'success': True
1564 }
1560 }
1565 except Exception:
1561 except Exception:
1566 log.exception(
1562 log.exception(
1567 "Exception occurred while trying edit permissions for repo")
1563 "Exception occurred while trying edit permissions for repo")
1568 raise JSONRPCError(
1564 raise JSONRPCError(
1569 'failed to edit permission for user: `%s` in repo: `%s`' % (
1565 'failed to edit permission for user: `%s` in repo: `%s`' % (
1570 userid, repoid
1566 userid, repoid
1571 )
1567 )
1572 )
1568 )
1573
1569
1574
1570
1575 @jsonrpc_method()
1571 @jsonrpc_method()
1576 def revoke_user_permission(request, apiuser, repoid, userid):
1572 def revoke_user_permission(request, apiuser, repoid, userid):
1577 """
1573 """
1578 Revoke permission for a user on the specified repository.
1574 Revoke permission for a user on the specified repository.
1579
1575
1580 This command can only be run using an |authtoken| with admin
1576 This command can only be run using an |authtoken| with admin
1581 permissions on the |repo|.
1577 permissions on the |repo|.
1582
1578
1583 :param apiuser: This is filled automatically from the |authtoken|.
1579 :param apiuser: This is filled automatically from the |authtoken|.
1584 :type apiuser: AuthUser
1580 :type apiuser: AuthUser
1585 :param repoid: Set the repository name or repository ID.
1581 :param repoid: Set the repository name or repository ID.
1586 :type repoid: str or int
1582 :type repoid: str or int
1587 :param userid: Set the user name of revoked user.
1583 :param userid: Set the user name of revoked user.
1588 :type userid: str or int
1584 :type userid: str or int
1589
1585
1590 Example error output:
1586 Example error output:
1591
1587
1592 .. code-block:: bash
1588 .. code-block:: bash
1593
1589
1594 id : <id_given_in_input>
1590 id : <id_given_in_input>
1595 result: {
1591 result: {
1596 "msg" : "Revoked perm for user: `<username>` in repo: `<reponame>`",
1592 "msg" : "Revoked perm for user: `<username>` in repo: `<reponame>`",
1597 "success": true
1593 "success": true
1598 }
1594 }
1599 error: null
1595 error: null
1600 """
1596 """
1601
1597
1602 repo = get_repo_or_error(repoid)
1598 repo = get_repo_or_error(repoid)
1603 user = get_user_or_error(userid)
1599 user = get_user_or_error(userid)
1604 if not has_superadmin_permission(apiuser):
1600 if not has_superadmin_permission(apiuser):
1605 _perms = ('repository.admin',)
1601 _perms = ('repository.admin',)
1606 validate_repo_permissions(apiuser, repoid, repo, _perms)
1602 validate_repo_permissions(apiuser, repoid, repo, _perms)
1607
1603
1608 try:
1604 try:
1609 RepoModel().revoke_user_permission(repo=repo, user=user)
1605 RepoModel().revoke_user_permission(repo=repo, user=user)
1610 Session().commit()
1606 Session().commit()
1611 return {
1607 return {
1612 'msg': 'Revoked perm for user: `%s` in repo: `%s`' % (
1608 'msg': 'Revoked perm for user: `%s` in repo: `%s`' % (
1613 user.username, repo.repo_name
1609 user.username, repo.repo_name
1614 ),
1610 ),
1615 'success': True
1611 'success': True
1616 }
1612 }
1617 except Exception:
1613 except Exception:
1618 log.exception(
1614 log.exception(
1619 "Exception occurred while trying revoke permissions to repo")
1615 "Exception occurred while trying revoke permissions to repo")
1620 raise JSONRPCError(
1616 raise JSONRPCError(
1621 'failed to edit permission for user: `%s` in repo: `%s`' % (
1617 'failed to edit permission for user: `%s` in repo: `%s`' % (
1622 userid, repoid
1618 userid, repoid
1623 )
1619 )
1624 )
1620 )
1625
1621
1626
1622
1627 @jsonrpc_method()
1623 @jsonrpc_method()
1628 def grant_user_group_permission(request, apiuser, repoid, usergroupid, perm):
1624 def grant_user_group_permission(request, apiuser, repoid, usergroupid, perm):
1629 """
1625 """
1630 Grant permission for a user group on the specified repository,
1626 Grant permission for a user group on the specified repository,
1631 or update existing permissions.
1627 or update existing permissions.
1632
1628
1633 This command can only be run using an |authtoken| with admin
1629 This command can only be run using an |authtoken| with admin
1634 permissions on the |repo|.
1630 permissions on the |repo|.
1635
1631
1636 :param apiuser: This is filled automatically from the |authtoken|.
1632 :param apiuser: This is filled automatically from the |authtoken|.
1637 :type apiuser: AuthUser
1633 :type apiuser: AuthUser
1638 :param repoid: Set the repository name or repository ID.
1634 :param repoid: Set the repository name or repository ID.
1639 :type repoid: str or int
1635 :type repoid: str or int
1640 :param usergroupid: Specify the ID of the user group.
1636 :param usergroupid: Specify the ID of the user group.
1641 :type usergroupid: str or int
1637 :type usergroupid: str or int
1642 :param perm: Set the user group permissions using the following
1638 :param perm: Set the user group permissions using the following
1643 format: (repository.(none|read|write|admin))
1639 format: (repository.(none|read|write|admin))
1644 :type perm: str
1640 :type perm: str
1645
1641
1646 Example output:
1642 Example output:
1647
1643
1648 .. code-block:: bash
1644 .. code-block:: bash
1649
1645
1650 id : <id_given_in_input>
1646 id : <id_given_in_input>
1651 result : {
1647 result : {
1652 "msg" : "Granted perm: `<perm>` for group: `<usersgroupname>` in repo: `<reponame>`",
1648 "msg" : "Granted perm: `<perm>` for group: `<usersgroupname>` in repo: `<reponame>`",
1653 "success": true
1649 "success": true
1654
1650
1655 }
1651 }
1656 error : null
1652 error : null
1657
1653
1658 Example error output:
1654 Example error output:
1659
1655
1660 .. code-block:: bash
1656 .. code-block:: bash
1661
1657
1662 id : <id_given_in_input>
1658 id : <id_given_in_input>
1663 result : null
1659 result : null
1664 error : {
1660 error : {
1665 "failed to edit permission for user group: `<usergroup>` in repo `<repo>`'
1661 "failed to edit permission for user group: `<usergroup>` in repo `<repo>`'
1666 }
1662 }
1667
1663
1668 """
1664 """
1669
1665
1670 repo = get_repo_or_error(repoid)
1666 repo = get_repo_or_error(repoid)
1671 perm = get_perm_or_error(perm)
1667 perm = get_perm_or_error(perm)
1672 if not has_superadmin_permission(apiuser):
1668 if not has_superadmin_permission(apiuser):
1673 _perms = ('repository.admin',)
1669 _perms = ('repository.admin',)
1674 validate_repo_permissions(apiuser, repoid, repo, _perms)
1670 validate_repo_permissions(apiuser, repoid, repo, _perms)
1675
1671
1676 user_group = get_user_group_or_error(usergroupid)
1672 user_group = get_user_group_or_error(usergroupid)
1677 if not has_superadmin_permission(apiuser):
1673 if not has_superadmin_permission(apiuser):
1678 # check if we have at least read permission for this user group !
1674 # check if we have at least read permission for this user group !
1679 _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',)
1675 _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',)
1680 if not HasUserGroupPermissionAnyApi(*_perms)(
1676 if not HasUserGroupPermissionAnyApi(*_perms)(
1681 user=apiuser, user_group_name=user_group.users_group_name):
1677 user=apiuser, user_group_name=user_group.users_group_name):
1682 raise JSONRPCError(
1678 raise JSONRPCError(
1683 'user group `%s` does not exist' % (usergroupid,))
1679 'user group `%s` does not exist' % (usergroupid,))
1684
1680
1685 try:
1681 try:
1686 RepoModel().grant_user_group_permission(
1682 RepoModel().grant_user_group_permission(
1687 repo=repo, group_name=user_group, perm=perm)
1683 repo=repo, group_name=user_group, perm=perm)
1688
1684
1689 Session().commit()
1685 Session().commit()
1690 return {
1686 return {
1691 'msg': 'Granted perm: `%s` for user group: `%s` in '
1687 'msg': 'Granted perm: `%s` for user group: `%s` in '
1692 'repo: `%s`' % (
1688 'repo: `%s`' % (
1693 perm.permission_name, user_group.users_group_name,
1689 perm.permission_name, user_group.users_group_name,
1694 repo.repo_name
1690 repo.repo_name
1695 ),
1691 ),
1696 'success': True
1692 'success': True
1697 }
1693 }
1698 except Exception:
1694 except Exception:
1699 log.exception(
1695 log.exception(
1700 "Exception occurred while trying change permission on repo")
1696 "Exception occurred while trying change permission on repo")
1701 raise JSONRPCError(
1697 raise JSONRPCError(
1702 'failed to edit permission for user group: `%s` in '
1698 'failed to edit permission for user group: `%s` in '
1703 'repo: `%s`' % (
1699 'repo: `%s`' % (
1704 usergroupid, repo.repo_name
1700 usergroupid, repo.repo_name
1705 )
1701 )
1706 )
1702 )
1707
1703
1708
1704
1709 @jsonrpc_method()
1705 @jsonrpc_method()
1710 def revoke_user_group_permission(request, apiuser, repoid, usergroupid):
1706 def revoke_user_group_permission(request, apiuser, repoid, usergroupid):
1711 """
1707 """
1712 Revoke the permissions of a user group on a given repository.
1708 Revoke the permissions of a user group on a given repository.
1713
1709
1714 This command can only be run using an |authtoken| with admin
1710 This command can only be run using an |authtoken| with admin
1715 permissions on the |repo|.
1711 permissions on the |repo|.
1716
1712
1717 :param apiuser: This is filled automatically from the |authtoken|.
1713 :param apiuser: This is filled automatically from the |authtoken|.
1718 :type apiuser: AuthUser
1714 :type apiuser: AuthUser
1719 :param repoid: Set the repository name or repository ID.
1715 :param repoid: Set the repository name or repository ID.
1720 :type repoid: str or int
1716 :type repoid: str or int
1721 :param usergroupid: Specify the user group ID.
1717 :param usergroupid: Specify the user group ID.
1722 :type usergroupid: str or int
1718 :type usergroupid: str or int
1723
1719
1724 Example output:
1720 Example output:
1725
1721
1726 .. code-block:: bash
1722 .. code-block:: bash
1727
1723
1728 id : <id_given_in_input>
1724 id : <id_given_in_input>
1729 result: {
1725 result: {
1730 "msg" : "Revoked perm for group: `<usersgroupname>` in repo: `<reponame>`",
1726 "msg" : "Revoked perm for group: `<usersgroupname>` in repo: `<reponame>`",
1731 "success": true
1727 "success": true
1732 }
1728 }
1733 error: null
1729 error: null
1734 """
1730 """
1735
1731
1736 repo = get_repo_or_error(repoid)
1732 repo = get_repo_or_error(repoid)
1737 if not has_superadmin_permission(apiuser):
1733 if not has_superadmin_permission(apiuser):
1738 _perms = ('repository.admin',)
1734 _perms = ('repository.admin',)
1739 validate_repo_permissions(apiuser, repoid, repo, _perms)
1735 validate_repo_permissions(apiuser, repoid, repo, _perms)
1740
1736
1741 user_group = get_user_group_or_error(usergroupid)
1737 user_group = get_user_group_or_error(usergroupid)
1742 if not has_superadmin_permission(apiuser):
1738 if not has_superadmin_permission(apiuser):
1743 # check if we have at least read permission for this user group !
1739 # check if we have at least read permission for this user group !
1744 _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',)
1740 _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',)
1745 if not HasUserGroupPermissionAnyApi(*_perms)(
1741 if not HasUserGroupPermissionAnyApi(*_perms)(
1746 user=apiuser, user_group_name=user_group.users_group_name):
1742 user=apiuser, user_group_name=user_group.users_group_name):
1747 raise JSONRPCError(
1743 raise JSONRPCError(
1748 'user group `%s` does not exist' % (usergroupid,))
1744 'user group `%s` does not exist' % (usergroupid,))
1749
1745
1750 try:
1746 try:
1751 RepoModel().revoke_user_group_permission(
1747 RepoModel().revoke_user_group_permission(
1752 repo=repo, group_name=user_group)
1748 repo=repo, group_name=user_group)
1753
1749
1754 Session().commit()
1750 Session().commit()
1755 return {
1751 return {
1756 'msg': 'Revoked perm for user group: `%s` in repo: `%s`' % (
1752 'msg': 'Revoked perm for user group: `%s` in repo: `%s`' % (
1757 user_group.users_group_name, repo.repo_name
1753 user_group.users_group_name, repo.repo_name
1758 ),
1754 ),
1759 'success': True
1755 'success': True
1760 }
1756 }
1761 except Exception:
1757 except Exception:
1762 log.exception("Exception occurred while trying revoke "
1758 log.exception("Exception occurred while trying revoke "
1763 "user group permission on repo")
1759 "user group permission on repo")
1764 raise JSONRPCError(
1760 raise JSONRPCError(
1765 'failed to edit permission for user group: `%s` in '
1761 'failed to edit permission for user group: `%s` in '
1766 'repo: `%s`' % (
1762 'repo: `%s`' % (
1767 user_group.users_group_name, repo.repo_name
1763 user_group.users_group_name, repo.repo_name
1768 )
1764 )
1769 )
1765 )
1770
1766
1771
1767
1772 @jsonrpc_method()
1768 @jsonrpc_method()
1773 def pull(request, apiuser, repoid):
1769 def pull(request, apiuser, repoid):
1774 """
1770 """
1775 Triggers a pull on the given repository from a remote location. You
1771 Triggers a pull on the given repository from a remote location. You
1776 can use this to keep remote repositories up-to-date.
1772 can use this to keep remote repositories up-to-date.
1777
1773
1778 This command can only be run using an |authtoken| with admin
1774 This command can only be run using an |authtoken| with admin
1779 rights to the specified repository. For more information,
1775 rights to the specified repository. For more information,
1780 see :ref:`config-token-ref`.
1776 see :ref:`config-token-ref`.
1781
1777
1782 This command takes the following options:
1778 This command takes the following options:
1783
1779
1784 :param apiuser: This is filled automatically from the |authtoken|.
1780 :param apiuser: This is filled automatically from the |authtoken|.
1785 :type apiuser: AuthUser
1781 :type apiuser: AuthUser
1786 :param repoid: The repository name or repository ID.
1782 :param repoid: The repository name or repository ID.
1787 :type repoid: str or int
1783 :type repoid: str or int
1788
1784
1789 Example output:
1785 Example output:
1790
1786
1791 .. code-block:: bash
1787 .. code-block:: bash
1792
1788
1793 id : <id_given_in_input>
1789 id : <id_given_in_input>
1794 result : {
1790 result : {
1795 "msg": "Pulled from `<repository name>`"
1791 "msg": "Pulled from `<repository name>`"
1796 "repository": "<repository name>"
1792 "repository": "<repository name>"
1797 }
1793 }
1798 error : null
1794 error : null
1799
1795
1800 Example error output:
1796 Example error output:
1801
1797
1802 .. code-block:: bash
1798 .. code-block:: bash
1803
1799
1804 id : <id_given_in_input>
1800 id : <id_given_in_input>
1805 result : null
1801 result : null
1806 error : {
1802 error : {
1807 "Unable to pull changes from `<reponame>`"
1803 "Unable to pull changes from `<reponame>`"
1808 }
1804 }
1809
1805
1810 """
1806 """
1811
1807
1812 repo = get_repo_or_error(repoid)
1808 repo = get_repo_or_error(repoid)
1813 if not has_superadmin_permission(apiuser):
1809 if not has_superadmin_permission(apiuser):
1814 _perms = ('repository.admin',)
1810 _perms = ('repository.admin',)
1815 validate_repo_permissions(apiuser, repoid, repo, _perms)
1811 validate_repo_permissions(apiuser, repoid, repo, _perms)
1816
1812
1817 try:
1813 try:
1818 ScmModel().pull_changes(repo.repo_name, apiuser.username)
1814 ScmModel().pull_changes(repo.repo_name, apiuser.username)
1819 return {
1815 return {
1820 'msg': 'Pulled from `%s`' % repo.repo_name,
1816 'msg': 'Pulled from `%s`' % repo.repo_name,
1821 'repository': repo.repo_name
1817 'repository': repo.repo_name
1822 }
1818 }
1823 except Exception:
1819 except Exception:
1824 log.exception("Exception occurred while trying to "
1820 log.exception("Exception occurred while trying to "
1825 "pull changes from remote location")
1821 "pull changes from remote location")
1826 raise JSONRPCError(
1822 raise JSONRPCError(
1827 'Unable to pull changes from `%s`' % repo.repo_name
1823 'Unable to pull changes from `%s`' % repo.repo_name
1828 )
1824 )
1829
1825
1830
1826
1831 @jsonrpc_method()
1827 @jsonrpc_method()
1832 def strip(request, apiuser, repoid, revision, branch):
1828 def strip(request, apiuser, repoid, revision, branch):
1833 """
1829 """
1834 Strips the given revision from the specified repository.
1830 Strips the given revision from the specified repository.
1835
1831
1836 * This will remove the revision and all of its decendants.
1832 * This will remove the revision and all of its decendants.
1837
1833
1838 This command can only be run using an |authtoken| with admin rights to
1834 This command can only be run using an |authtoken| with admin rights to
1839 the specified repository.
1835 the specified repository.
1840
1836
1841 This command takes the following options:
1837 This command takes the following options:
1842
1838
1843 :param apiuser: This is filled automatically from the |authtoken|.
1839 :param apiuser: This is filled automatically from the |authtoken|.
1844 :type apiuser: AuthUser
1840 :type apiuser: AuthUser
1845 :param repoid: The repository name or repository ID.
1841 :param repoid: The repository name or repository ID.
1846 :type repoid: str or int
1842 :type repoid: str or int
1847 :param revision: The revision you wish to strip.
1843 :param revision: The revision you wish to strip.
1848 :type revision: str
1844 :type revision: str
1849 :param branch: The branch from which to strip the revision.
1845 :param branch: The branch from which to strip the revision.
1850 :type branch: str
1846 :type branch: str
1851
1847
1852 Example output:
1848 Example output:
1853
1849
1854 .. code-block:: bash
1850 .. code-block:: bash
1855
1851
1856 id : <id_given_in_input>
1852 id : <id_given_in_input>
1857 result : {
1853 result : {
1858 "msg": "'Stripped commit <commit_hash> from repo `<repository name>`'"
1854 "msg": "'Stripped commit <commit_hash> from repo `<repository name>`'"
1859 "repository": "<repository name>"
1855 "repository": "<repository name>"
1860 }
1856 }
1861 error : null
1857 error : null
1862
1858
1863 Example error output:
1859 Example error output:
1864
1860
1865 .. code-block:: bash
1861 .. code-block:: bash
1866
1862
1867 id : <id_given_in_input>
1863 id : <id_given_in_input>
1868 result : null
1864 result : null
1869 error : {
1865 error : {
1870 "Unable to strip commit <commit_hash> from repo `<repository name>`"
1866 "Unable to strip commit <commit_hash> from repo `<repository name>`"
1871 }
1867 }
1872
1868
1873 """
1869 """
1874
1870
1875 repo = get_repo_or_error(repoid)
1871 repo = get_repo_or_error(repoid)
1876 if not has_superadmin_permission(apiuser):
1872 if not has_superadmin_permission(apiuser):
1877 _perms = ('repository.admin',)
1873 _perms = ('repository.admin',)
1878 validate_repo_permissions(apiuser, repoid, repo, _perms)
1874 validate_repo_permissions(apiuser, repoid, repo, _perms)
1879
1875
1880 try:
1876 try:
1881 ScmModel().strip(repo, revision, branch)
1877 ScmModel().strip(repo, revision, branch)
1882 audit_logger.store_api(
1878 audit_logger.store_api(
1883 'repo.commit.strip', action_data={'commit_id': revision},
1879 'repo.commit.strip', action_data={'commit_id': revision},
1884 repo=repo,
1880 repo=repo,
1885 user=apiuser, commit=True)
1881 user=apiuser, commit=True)
1886
1882
1887 return {
1883 return {
1888 'msg': 'Stripped commit %s from repo `%s`' % (
1884 'msg': 'Stripped commit %s from repo `%s`' % (
1889 revision, repo.repo_name),
1885 revision, repo.repo_name),
1890 'repository': repo.repo_name
1886 'repository': repo.repo_name
1891 }
1887 }
1892 except Exception:
1888 except Exception:
1893 log.exception("Exception while trying to strip")
1889 log.exception("Exception while trying to strip")
1894 raise JSONRPCError(
1890 raise JSONRPCError(
1895 'Unable to strip commit %s from repo `%s`' % (
1891 'Unable to strip commit %s from repo `%s`' % (
1896 revision, repo.repo_name)
1892 revision, repo.repo_name)
1897 )
1893 )
1898
1894
1899
1895
1900 @jsonrpc_method()
1896 @jsonrpc_method()
1901 def get_repo_settings(request, apiuser, repoid, key=Optional(None)):
1897 def get_repo_settings(request, apiuser, repoid, key=Optional(None)):
1902 """
1898 """
1903 Returns all settings for a repository. If key is given it only returns the
1899 Returns all settings for a repository. If key is given it only returns the
1904 setting identified by the key or null.
1900 setting identified by the key or null.
1905
1901
1906 :param apiuser: This is filled automatically from the |authtoken|.
1902 :param apiuser: This is filled automatically from the |authtoken|.
1907 :type apiuser: AuthUser
1903 :type apiuser: AuthUser
1908 :param repoid: The repository name or repository id.
1904 :param repoid: The repository name or repository id.
1909 :type repoid: str or int
1905 :type repoid: str or int
1910 :param key: Key of the setting to return.
1906 :param key: Key of the setting to return.
1911 :type: key: Optional(str)
1907 :type: key: Optional(str)
1912
1908
1913 Example output:
1909 Example output:
1914
1910
1915 .. code-block:: bash
1911 .. code-block:: bash
1916
1912
1917 {
1913 {
1918 "error": null,
1914 "error": null,
1919 "id": 237,
1915 "id": 237,
1920 "result": {
1916 "result": {
1921 "extensions_largefiles": true,
1917 "extensions_largefiles": true,
1922 "extensions_evolve": true,
1918 "extensions_evolve": true,
1923 "hooks_changegroup_push_logger": true,
1919 "hooks_changegroup_push_logger": true,
1924 "hooks_changegroup_repo_size": false,
1920 "hooks_changegroup_repo_size": false,
1925 "hooks_outgoing_pull_logger": true,
1921 "hooks_outgoing_pull_logger": true,
1926 "phases_publish": "True",
1922 "phases_publish": "True",
1927 "rhodecode_hg_use_rebase_for_merging": true,
1923 "rhodecode_hg_use_rebase_for_merging": true,
1928 "rhodecode_pr_merge_enabled": true,
1924 "rhodecode_pr_merge_enabled": true,
1929 "rhodecode_use_outdated_comments": true
1925 "rhodecode_use_outdated_comments": true
1930 }
1926 }
1931 }
1927 }
1932 """
1928 """
1933
1929
1934 # Restrict access to this api method to admins only.
1930 # Restrict access to this api method to admins only.
1935 if not has_superadmin_permission(apiuser):
1931 if not has_superadmin_permission(apiuser):
1936 raise JSONRPCForbidden()
1932 raise JSONRPCForbidden()
1937
1933
1938 try:
1934 try:
1939 repo = get_repo_or_error(repoid)
1935 repo = get_repo_or_error(repoid)
1940 settings_model = VcsSettingsModel(repo=repo)
1936 settings_model = VcsSettingsModel(repo=repo)
1941 settings = settings_model.get_global_settings()
1937 settings = settings_model.get_global_settings()
1942 settings.update(settings_model.get_repo_settings())
1938 settings.update(settings_model.get_repo_settings())
1943
1939
1944 # If only a single setting is requested fetch it from all settings.
1940 # If only a single setting is requested fetch it from all settings.
1945 key = Optional.extract(key)
1941 key = Optional.extract(key)
1946 if key is not None:
1942 if key is not None:
1947 settings = settings.get(key, None)
1943 settings = settings.get(key, None)
1948 except Exception:
1944 except Exception:
1949 msg = 'Failed to fetch settings for repository `{}`'.format(repoid)
1945 msg = 'Failed to fetch settings for repository `{}`'.format(repoid)
1950 log.exception(msg)
1946 log.exception(msg)
1951 raise JSONRPCError(msg)
1947 raise JSONRPCError(msg)
1952
1948
1953 return settings
1949 return settings
1954
1950
1955
1951
1956 @jsonrpc_method()
1952 @jsonrpc_method()
1957 def set_repo_settings(request, apiuser, repoid, settings):
1953 def set_repo_settings(request, apiuser, repoid, settings):
1958 """
1954 """
1959 Update repository settings. Returns true on success.
1955 Update repository settings. Returns true on success.
1960
1956
1961 :param apiuser: This is filled automatically from the |authtoken|.
1957 :param apiuser: This is filled automatically from the |authtoken|.
1962 :type apiuser: AuthUser
1958 :type apiuser: AuthUser
1963 :param repoid: The repository name or repository id.
1959 :param repoid: The repository name or repository id.
1964 :type repoid: str or int
1960 :type repoid: str or int
1965 :param settings: The new settings for the repository.
1961 :param settings: The new settings for the repository.
1966 :type: settings: dict
1962 :type: settings: dict
1967
1963
1968 Example output:
1964 Example output:
1969
1965
1970 .. code-block:: bash
1966 .. code-block:: bash
1971
1967
1972 {
1968 {
1973 "error": null,
1969 "error": null,
1974 "id": 237,
1970 "id": 237,
1975 "result": true
1971 "result": true
1976 }
1972 }
1977 """
1973 """
1978 # Restrict access to this api method to admins only.
1974 # Restrict access to this api method to admins only.
1979 if not has_superadmin_permission(apiuser):
1975 if not has_superadmin_permission(apiuser):
1980 raise JSONRPCForbidden()
1976 raise JSONRPCForbidden()
1981
1977
1982 if type(settings) is not dict:
1978 if type(settings) is not dict:
1983 raise JSONRPCError('Settings have to be a JSON Object.')
1979 raise JSONRPCError('Settings have to be a JSON Object.')
1984
1980
1985 try:
1981 try:
1986 settings_model = VcsSettingsModel(repo=repoid)
1982 settings_model = VcsSettingsModel(repo=repoid)
1987
1983
1988 # Merge global, repo and incoming settings.
1984 # Merge global, repo and incoming settings.
1989 new_settings = settings_model.get_global_settings()
1985 new_settings = settings_model.get_global_settings()
1990 new_settings.update(settings_model.get_repo_settings())
1986 new_settings.update(settings_model.get_repo_settings())
1991 new_settings.update(settings)
1987 new_settings.update(settings)
1992
1988
1993 # Update the settings.
1989 # Update the settings.
1994 inherit_global_settings = new_settings.get(
1990 inherit_global_settings = new_settings.get(
1995 'inherit_global_settings', False)
1991 'inherit_global_settings', False)
1996 settings_model.create_or_update_repo_settings(
1992 settings_model.create_or_update_repo_settings(
1997 new_settings, inherit_global_settings=inherit_global_settings)
1993 new_settings, inherit_global_settings=inherit_global_settings)
1998 Session().commit()
1994 Session().commit()
1999 except Exception:
1995 except Exception:
2000 msg = 'Failed to update settings for repository `{}`'.format(repoid)
1996 msg = 'Failed to update settings for repository `{}`'.format(repoid)
2001 log.exception(msg)
1997 log.exception(msg)
2002 raise JSONRPCError(msg)
1998 raise JSONRPCError(msg)
2003
1999
2004 # Indicate success.
2000 # Indicate success.
2005 return True
2001 return True
2006
2002
2007
2003
2008 @jsonrpc_method()
2004 @jsonrpc_method()
2009 def maintenance(request, apiuser, repoid):
2005 def maintenance(request, apiuser, repoid):
2010 """
2006 """
2011 Triggers a maintenance on the given repository.
2007 Triggers a maintenance on the given repository.
2012
2008
2013 This command can only be run using an |authtoken| with admin
2009 This command can only be run using an |authtoken| with admin
2014 rights to the specified repository. For more information,
2010 rights to the specified repository. For more information,
2015 see :ref:`config-token-ref`.
2011 see :ref:`config-token-ref`.
2016
2012
2017 This command takes the following options:
2013 This command takes the following options:
2018
2014
2019 :param apiuser: This is filled automatically from the |authtoken|.
2015 :param apiuser: This is filled automatically from the |authtoken|.
2020 :type apiuser: AuthUser
2016 :type apiuser: AuthUser
2021 :param repoid: The repository name or repository ID.
2017 :param repoid: The repository name or repository ID.
2022 :type repoid: str or int
2018 :type repoid: str or int
2023
2019
2024 Example output:
2020 Example output:
2025
2021
2026 .. code-block:: bash
2022 .. code-block:: bash
2027
2023
2028 id : <id_given_in_input>
2024 id : <id_given_in_input>
2029 result : {
2025 result : {
2030 "msg": "executed maintenance command",
2026 "msg": "executed maintenance command",
2031 "executed_actions": [
2027 "executed_actions": [
2032 <action_message>, <action_message2>...
2028 <action_message>, <action_message2>...
2033 ],
2029 ],
2034 "repository": "<repository name>"
2030 "repository": "<repository name>"
2035 }
2031 }
2036 error : null
2032 error : null
2037
2033
2038 Example error output:
2034 Example error output:
2039
2035
2040 .. code-block:: bash
2036 .. code-block:: bash
2041
2037
2042 id : <id_given_in_input>
2038 id : <id_given_in_input>
2043 result : null
2039 result : null
2044 error : {
2040 error : {
2045 "Unable to execute maintenance on `<reponame>`"
2041 "Unable to execute maintenance on `<reponame>`"
2046 }
2042 }
2047
2043
2048 """
2044 """
2049
2045
2050 repo = get_repo_or_error(repoid)
2046 repo = get_repo_or_error(repoid)
2051 if not has_superadmin_permission(apiuser):
2047 if not has_superadmin_permission(apiuser):
2052 _perms = ('repository.admin',)
2048 _perms = ('repository.admin',)
2053 validate_repo_permissions(apiuser, repoid, repo, _perms)
2049 validate_repo_permissions(apiuser, repoid, repo, _perms)
2054
2050
2055 try:
2051 try:
2056 maintenance = repo_maintenance.RepoMaintenance()
2052 maintenance = repo_maintenance.RepoMaintenance()
2057 executed_actions = maintenance.execute(repo)
2053 executed_actions = maintenance.execute(repo)
2058
2054
2059 return {
2055 return {
2060 'msg': 'executed maintenance command',
2056 'msg': 'executed maintenance command',
2061 'executed_actions': executed_actions,
2057 'executed_actions': executed_actions,
2062 'repository': repo.repo_name
2058 'repository': repo.repo_name
2063 }
2059 }
2064 except Exception:
2060 except Exception:
2065 log.exception("Exception occurred while trying to run maintenance")
2061 log.exception("Exception occurred while trying to run maintenance")
2066 raise JSONRPCError(
2062 raise JSONRPCError(
2067 'Unable to execute maintenance on `%s`' % repo.repo_name)
2063 'Unable to execute maintenance on `%s`' % repo.repo_name)
@@ -1,183 +1,181 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2017 RhodeCode GmbH
3 # Copyright (C) 2016-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 import formencode
22 import formencode
23 import formencode.htmlfill
23 import formencode.htmlfill
24
24
25 from pyramid.httpexceptions import HTTPFound, HTTPForbidden
25 from pyramid.httpexceptions import HTTPFound, HTTPForbidden
26 from pyramid.view import view_config
26 from pyramid.view import view_config
27 from pyramid.renderers import render
27 from pyramid.renderers import render
28 from pyramid.response import Response
28 from pyramid.response import Response
29
29
30 from rhodecode.apps._base import BaseAppView, DataGridAppView
30 from rhodecode.apps._base import BaseAppView, DataGridAppView
31 from rhodecode.lib.celerylib.utils import get_task_id
31
32
32 from rhodecode.lib.ext_json import json
33 from rhodecode.lib.ext_json import json
33 from rhodecode.lib.auth import (
34 from rhodecode.lib.auth import (
34 LoginRequired, CSRFRequired, NotAnonymous,
35 LoginRequired, CSRFRequired, NotAnonymous,
35 HasPermissionAny, HasRepoGroupPermissionAny)
36 HasPermissionAny, HasRepoGroupPermissionAny)
36 from rhodecode.lib import helpers as h
37 from rhodecode.lib import helpers as h
37 from rhodecode.lib.utils import repo_name_slug
38 from rhodecode.lib.utils import repo_name_slug
38 from rhodecode.lib.utils2 import safe_int, safe_unicode
39 from rhodecode.lib.utils2 import safe_int, safe_unicode
39 from rhodecode.model.forms import RepoForm
40 from rhodecode.model.forms import RepoForm
40 from rhodecode.model.repo import RepoModel
41 from rhodecode.model.repo import RepoModel
41 from rhodecode.model.scm import RepoList, RepoGroupList, ScmModel
42 from rhodecode.model.scm import RepoList, RepoGroupList, ScmModel
42 from rhodecode.model.settings import SettingsModel
43 from rhodecode.model.settings import SettingsModel
43 from rhodecode.model.db import Repository, RepoGroup
44 from rhodecode.model.db import Repository, RepoGroup
44
45
45 log = logging.getLogger(__name__)
46 log = logging.getLogger(__name__)
46
47
47
48
48 class AdminReposView(BaseAppView, DataGridAppView):
49 class AdminReposView(BaseAppView, DataGridAppView):
49
50
50 def load_default_context(self):
51 def load_default_context(self):
51 c = self._get_local_tmpl_context()
52 c = self._get_local_tmpl_context()
52
53
53 return c
54 return c
54
55
55 def _load_form_data(self, c):
56 def _load_form_data(self, c):
56 acl_groups = RepoGroupList(RepoGroup.query().all(),
57 acl_groups = RepoGroupList(RepoGroup.query().all(),
57 perm_set=['group.write', 'group.admin'])
58 perm_set=['group.write', 'group.admin'])
58 c.repo_groups = RepoGroup.groups_choices(groups=acl_groups)
59 c.repo_groups = RepoGroup.groups_choices(groups=acl_groups)
59 c.repo_groups_choices = map(lambda k: safe_unicode(k[0]), c.repo_groups)
60 c.repo_groups_choices = map(lambda k: safe_unicode(k[0]), c.repo_groups)
60 c.landing_revs_choices, c.landing_revs = \
61 c.landing_revs_choices, c.landing_revs = \
61 ScmModel().get_repo_landing_revs(self.request.translate)
62 ScmModel().get_repo_landing_revs(self.request.translate)
62 c.personal_repo_group = self._rhodecode_user.personal_repo_group
63 c.personal_repo_group = self._rhodecode_user.personal_repo_group
63
64
64 @LoginRequired()
65 @LoginRequired()
65 @NotAnonymous()
66 @NotAnonymous()
66 # perms check inside
67 # perms check inside
67 @view_config(
68 @view_config(
68 route_name='repos', request_method='GET',
69 route_name='repos', request_method='GET',
69 renderer='rhodecode:templates/admin/repos/repos.mako')
70 renderer='rhodecode:templates/admin/repos/repos.mako')
70 def repository_list(self):
71 def repository_list(self):
71 c = self.load_default_context()
72 c = self.load_default_context()
72
73
73 repo_list = Repository.get_all_repos()
74 repo_list = Repository.get_all_repos()
74 c.repo_list = RepoList(repo_list, perm_set=['repository.admin'])
75 c.repo_list = RepoList(repo_list, perm_set=['repository.admin'])
75 repos_data = RepoModel().get_repos_as_dict(
76 repos_data = RepoModel().get_repos_as_dict(
76 repo_list=c.repo_list, admin=True, super_user_actions=True)
77 repo_list=c.repo_list, admin=True, super_user_actions=True)
77 # json used to render the grid
78 # json used to render the grid
78 c.data = json.dumps(repos_data)
79 c.data = json.dumps(repos_data)
79
80
80 return self._get_template_context(c)
81 return self._get_template_context(c)
81
82
82 @LoginRequired()
83 @LoginRequired()
83 @NotAnonymous()
84 @NotAnonymous()
84 # perms check inside
85 # perms check inside
85 @view_config(
86 @view_config(
86 route_name='repo_new', request_method='GET',
87 route_name='repo_new', request_method='GET',
87 renderer='rhodecode:templates/admin/repos/repo_add.mako')
88 renderer='rhodecode:templates/admin/repos/repo_add.mako')
88 def repository_new(self):
89 def repository_new(self):
89 c = self.load_default_context()
90 c = self.load_default_context()
90
91
91 new_repo = self.request.GET.get('repo', '')
92 new_repo = self.request.GET.get('repo', '')
92 parent_group = safe_int(self.request.GET.get('parent_group'))
93 parent_group = safe_int(self.request.GET.get('parent_group'))
93 _gr = RepoGroup.get(parent_group)
94 _gr = RepoGroup.get(parent_group)
94
95
95 if not HasPermissionAny('hg.admin', 'hg.create.repository')():
96 if not HasPermissionAny('hg.admin', 'hg.create.repository')():
96 # you're not super admin nor have global create permissions,
97 # you're not super admin nor have global create permissions,
97 # but maybe you have at least write permission to a parent group ?
98 # but maybe you have at least write permission to a parent group ?
98
99
99 gr_name = _gr.group_name if _gr else None
100 gr_name = _gr.group_name if _gr else None
100 # create repositories with write permission on group is set to true
101 # create repositories with write permission on group is set to true
101 create_on_write = HasPermissionAny('hg.create.write_on_repogroup.true')()
102 create_on_write = HasPermissionAny('hg.create.write_on_repogroup.true')()
102 group_admin = HasRepoGroupPermissionAny('group.admin')(group_name=gr_name)
103 group_admin = HasRepoGroupPermissionAny('group.admin')(group_name=gr_name)
103 group_write = HasRepoGroupPermissionAny('group.write')(group_name=gr_name)
104 group_write = HasRepoGroupPermissionAny('group.write')(group_name=gr_name)
104 if not (group_admin or (group_write and create_on_write)):
105 if not (group_admin or (group_write and create_on_write)):
105 raise HTTPForbidden()
106 raise HTTPForbidden()
106
107
107 self._load_form_data(c)
108 self._load_form_data(c)
108 c.new_repo = repo_name_slug(new_repo)
109 c.new_repo = repo_name_slug(new_repo)
109
110
110 # apply the defaults from defaults page
111 # apply the defaults from defaults page
111 defaults = SettingsModel().get_default_repo_settings(strip_prefix=True)
112 defaults = SettingsModel().get_default_repo_settings(strip_prefix=True)
112 # set checkbox to autochecked
113 # set checkbox to autochecked
113 defaults['repo_copy_permissions'] = True
114 defaults['repo_copy_permissions'] = True
114
115
115 parent_group_choice = '-1'
116 parent_group_choice = '-1'
116 if not self._rhodecode_user.is_admin and self._rhodecode_user.personal_repo_group:
117 if not self._rhodecode_user.is_admin and self._rhodecode_user.personal_repo_group:
117 parent_group_choice = self._rhodecode_user.personal_repo_group
118 parent_group_choice = self._rhodecode_user.personal_repo_group
118
119
119 if parent_group and _gr:
120 if parent_group and _gr:
120 if parent_group in [x[0] for x in c.repo_groups]:
121 if parent_group in [x[0] for x in c.repo_groups]:
121 parent_group_choice = safe_unicode(parent_group)
122 parent_group_choice = safe_unicode(parent_group)
122
123
123 defaults.update({'repo_group': parent_group_choice})
124 defaults.update({'repo_group': parent_group_choice})
124
125
125 data = render('rhodecode:templates/admin/repos/repo_add.mako',
126 data = render('rhodecode:templates/admin/repos/repo_add.mako',
126 self._get_template_context(c), self.request)
127 self._get_template_context(c), self.request)
127 html = formencode.htmlfill.render(
128 html = formencode.htmlfill.render(
128 data,
129 data,
129 defaults=defaults,
130 defaults=defaults,
130 encoding="UTF-8",
131 encoding="UTF-8",
131 force_defaults=False
132 force_defaults=False
132 )
133 )
133 return Response(html)
134 return Response(html)
134
135
135 @LoginRequired()
136 @LoginRequired()
136 @NotAnonymous()
137 @NotAnonymous()
137 @CSRFRequired()
138 @CSRFRequired()
138 # perms check inside
139 # perms check inside
139 @view_config(
140 @view_config(
140 route_name='repo_create', request_method='POST',
141 route_name='repo_create', request_method='POST',
141 renderer='rhodecode:templates/admin/repos/repos.mako')
142 renderer='rhodecode:templates/admin/repos/repos.mako')
142 def repository_create(self):
143 def repository_create(self):
143 c = self.load_default_context()
144 c = self.load_default_context()
144
145
145 form_result = {}
146 form_result = {}
147 self._load_form_data(c)
146 task_id = None
148 task_id = None
147 self._load_form_data(c)
148
149 try:
149 try:
150 # CanWriteToGroup validators checks permissions of this POST
150 # CanWriteToGroup validators checks permissions of this POST
151 form = RepoForm(
151 form = RepoForm(
152 self.request.translate, repo_groups=c.repo_groups_choices,
152 self.request.translate, repo_groups=c.repo_groups_choices,
153 landing_revs=c.landing_revs_choices)()
153 landing_revs=c.landing_revs_choices)()
154 form_results = form.to_python(dict(self.request.POST))
154 form_result = form.to_python(dict(self.request.POST))
155
155
156 # create is done sometimes async on celery, db transaction
156 # create is done sometimes async on celery, db transaction
157 # management is handled there.
157 # management is handled there.
158 task = RepoModel().create(form_result, self._rhodecode_user.user_id)
158 task = RepoModel().create(form_result, self._rhodecode_user.user_id)
159 from celery.result import BaseAsyncResult
159 task_id = get_task_id(task)
160 if isinstance(task, BaseAsyncResult):
161 task_id = task.task_id
162 except formencode.Invalid as errors:
160 except formencode.Invalid as errors:
163 data = render('rhodecode:templates/admin/repos/repo_add.mako',
161 data = render('rhodecode:templates/admin/repos/repo_add.mako',
164 self._get_template_context(c), self.request)
162 self._get_template_context(c), self.request)
165 html = formencode.htmlfill.render(
163 html = formencode.htmlfill.render(
166 data,
164 data,
167 defaults=errors.value,
165 defaults=errors.value,
168 errors=errors.error_dict or {},
166 errors=errors.error_dict or {},
169 prefix_error=False,
167 prefix_error=False,
170 encoding="UTF-8",
168 encoding="UTF-8",
171 force_defaults=False
169 force_defaults=False
172 )
170 )
173 return Response(html)
171 return Response(html)
174
172
175 except Exception as e:
173 except Exception as e:
176 msg = self._log_creation_exception(e, form_result.get('repo_name'))
174 msg = self._log_creation_exception(e, form_result.get('repo_name'))
177 h.flash(msg, category='error')
175 h.flash(msg, category='error')
178 raise HTTPFound(h.route_path('home'))
176 raise HTTPFound(h.route_path('home'))
179
177
180 raise HTTPFound(
178 raise HTTPFound(
181 h.route_path('repo_creating',
179 h.route_path('repo_creating',
182 repo_name=form_result['repo_name_full'],
180 repo_name=form_result['repo_name_full'],
183 _query=dict(task_id=task_id)))
181 _query=dict(task_id=task_id)))
@@ -1,110 +1,109 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2017 RhodeCode GmbH
3 # Copyright (C) 2011-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22
22
23 from pyramid.view import view_config
23 from pyramid.view import view_config
24 from pyramid.httpexceptions import HTTPFound, HTTPNotFound
24 from pyramid.httpexceptions import HTTPFound, HTTPNotFound
25
25
26 from rhodecode.apps._base import BaseAppView
26 from rhodecode.apps._base import BaseAppView
27 from rhodecode.lib import helpers as h
27 from rhodecode.lib import helpers as h
28 from rhodecode.lib.auth import (NotAnonymous, HasRepoPermissionAny)
28 from rhodecode.lib.auth import (NotAnonymous, HasRepoPermissionAny)
29 from rhodecode.model.db import Repository
29 from rhodecode.model.db import Repository
30
30
31 log = logging.getLogger(__name__)
31 log = logging.getLogger(__name__)
32
32
33
33
34 class RepoChecksView(BaseAppView):
34 class RepoChecksView(BaseAppView):
35 def load_default_context(self):
35 def load_default_context(self):
36 c = self._get_local_tmpl_context()
36 c = self._get_local_tmpl_context()
37
37
38 return c
38 return c
39
39
40 @NotAnonymous()
40 @NotAnonymous()
41 @view_config(
41 @view_config(
42 route_name='repo_creating', request_method='GET',
42 route_name='repo_creating', request_method='GET',
43 renderer='rhodecode:templates/admin/repos/repo_creating.mako')
43 renderer='rhodecode:templates/admin/repos/repo_creating.mako')
44 def repo_creating(self):
44 def repo_creating(self):
45 c = self.load_default_context()
45 c = self.load_default_context()
46
46
47 repo_name = self.request.matchdict['repo_name']
47 repo_name = self.request.matchdict['repo_name']
48 db_repo = Repository.get_by_repo_name(repo_name)
48 db_repo = Repository.get_by_repo_name(repo_name)
49 if not db_repo:
50 raise HTTPNotFound()
51
49
52 # check if maybe repo is already created
50 # check if maybe repo is already created
53 if db_repo.repo_state in [Repository.STATE_CREATED]:
51 if db_repo and db_repo.repo_state in [Repository.STATE_CREATED]:
54 # re-check permissions before redirecting to prevent resource
52 # re-check permissions before redirecting to prevent resource
55 # discovery by checking the 302 code
53 # discovery by checking the 302 code
56 perm_set = ['repository.read', 'repository.write', 'repository.admin']
54 perm_set = ['repository.read', 'repository.write', 'repository.admin']
57 has_perm = HasRepoPermissionAny(*perm_set)(
55 has_perm = HasRepoPermissionAny(*perm_set)(
58 db_repo.repo_name, 'Repo Creating check')
56 db_repo.repo_name, 'Repo Creating check')
59 if not has_perm:
57 if not has_perm:
60 raise HTTPNotFound()
58 raise HTTPNotFound()
61
59
62 raise HTTPFound(h.route_path(
60 raise HTTPFound(h.route_path(
63 'repo_summary', repo_name=db_repo.repo_name))
61 'repo_summary', repo_name=db_repo.repo_name))
64
62
65 c.task_id = self.request.GET.get('task_id')
63 c.task_id = self.request.GET.get('task_id')
66 c.repo_name = repo_name
64 c.repo_name = repo_name
67
65
68 return self._get_template_context(c)
66 return self._get_template_context(c)
69
67
70 @NotAnonymous()
68 @NotAnonymous()
71 @view_config(
69 @view_config(
72 route_name='repo_creating_check', request_method='GET',
70 route_name='repo_creating_check', request_method='GET',
73 renderer='json_ext')
71 renderer='json_ext')
74 def repo_creating_check(self):
72 def repo_creating_check(self):
75 _ = self.request.translate
73 _ = self.request.translate
76 task_id = self.request.GET.get('task_id')
74 task_id = self.request.GET.get('task_id')
77 self.load_default_context()
75 self.load_default_context()
78
76
79 repo_name = self.request.matchdict['repo_name']
77 repo_name = self.request.matchdict['repo_name']
80
78
81 if task_id and task_id not in ['None']:
79 if task_id and task_id not in ['None']:
82 import rhodecode
80 import rhodecode
83 from celery.result import AsyncResult
81 from rhodecode.lib.celerylib.loader import celery_app
84 if rhodecode.CELERY_ENABLED:
82 if rhodecode.CELERY_ENABLED:
85 task = AsyncResult(task_id)
83 task = celery_app.AsyncResult(task_id)
84 task.get()
86 if task.failed():
85 if task.failed():
87 msg = self._log_creation_exception(task.result, repo_name)
86 msg = self._log_creation_exception(task.result, repo_name)
88 h.flash(msg, category='error')
87 h.flash(msg, category='error')
89 raise HTTPFound(h.route_path('home'), code=501)
88 raise HTTPFound(h.route_path('home'), code=501)
90
89
91 db_repo = Repository.get_by_repo_name(repo_name)
90 db_repo = Repository.get_by_repo_name(repo_name)
92 if db_repo and db_repo.repo_state == Repository.STATE_CREATED:
91 if db_repo and db_repo.repo_state == Repository.STATE_CREATED:
93 if db_repo.clone_uri:
92 if db_repo.clone_uri:
94 clone_uri = db_repo.clone_uri_hidden
93 clone_uri = db_repo.clone_uri_hidden
95 h.flash(_('Created repository %s from %s')
94 h.flash(_('Created repository %s from %s')
96 % (db_repo.repo_name, clone_uri), category='success')
95 % (db_repo.repo_name, clone_uri), category='success')
97 else:
96 else:
98 repo_url = h.link_to(
97 repo_url = h.link_to(
99 db_repo.repo_name,
98 db_repo.repo_name,
100 h.route_path('repo_summary', repo_name=db_repo.repo_name))
99 h.route_path('repo_summary', repo_name=db_repo.repo_name))
101 fork = db_repo.fork
100 fork = db_repo.fork
102 if fork:
101 if fork:
103 fork_name = fork.repo_name
102 fork_name = fork.repo_name
104 h.flash(h.literal(_('Forked repository %s as %s')
103 h.flash(h.literal(_('Forked repository %s as %s')
105 % (fork_name, repo_url)), category='success')
104 % (fork_name, repo_url)), category='success')
106 else:
105 else:
107 h.flash(h.literal(_('Created repository %s') % repo_url),
106 h.flash(h.literal(_('Created repository %s') % repo_url),
108 category='success')
107 category='success')
109 return {'result': True}
108 return {'result': True}
110 return {'result': False}
109 return {'result': False}
@@ -1,259 +1,259 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2017 RhodeCode GmbH
3 # Copyright (C) 2011-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 import datetime
22 import datetime
23 import formencode
23 import formencode
24 import formencode.htmlfill
24 import formencode.htmlfill
25
25
26 from pyramid.httpexceptions import HTTPFound
26 from pyramid.httpexceptions import HTTPFound
27 from pyramid.view import view_config
27 from pyramid.view import view_config
28 from pyramid.renderers import render
28 from pyramid.renderers import render
29 from pyramid.response import Response
29 from pyramid.response import Response
30
30
31 from rhodecode.apps._base import RepoAppView, DataGridAppView
31 from rhodecode.apps._base import RepoAppView, DataGridAppView
32 from rhodecode.lib.auth import (
32 from rhodecode.lib.auth import (
33 LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous,
33 LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous,
34 HasRepoPermissionAny, HasPermissionAnyDecorator, CSRFRequired)
34 HasRepoPermissionAny, HasPermissionAnyDecorator, CSRFRequired)
35 import rhodecode.lib.helpers as h
35 import rhodecode.lib.helpers as h
36 from rhodecode.lib.celerylib.utils import get_task_id
36 from rhodecode.model.db import coalesce, or_, Repository, RepoGroup
37 from rhodecode.model.db import coalesce, or_, Repository, RepoGroup
37 from rhodecode.model.repo import RepoModel
38 from rhodecode.model.repo import RepoModel
38 from rhodecode.model.forms import RepoForkForm
39 from rhodecode.model.forms import RepoForkForm
39 from rhodecode.model.scm import ScmModel, RepoGroupList
40 from rhodecode.model.scm import ScmModel, RepoGroupList
40 from rhodecode.lib.utils2 import safe_int, safe_unicode
41 from rhodecode.lib.utils2 import safe_int, safe_unicode
41
42
42 log = logging.getLogger(__name__)
43 log = logging.getLogger(__name__)
43
44
44
45
45 class RepoForksView(RepoAppView, DataGridAppView):
46 class RepoForksView(RepoAppView, DataGridAppView):
46
47
47 def load_default_context(self):
48 def load_default_context(self):
48 c = self._get_local_tmpl_context(include_app_defaults=True)
49 c = self._get_local_tmpl_context(include_app_defaults=True)
49 c.rhodecode_repo = self.rhodecode_vcs_repo
50 c.rhodecode_repo = self.rhodecode_vcs_repo
50
51
51 acl_groups = RepoGroupList(
52 acl_groups = RepoGroupList(
52 RepoGroup.query().all(),
53 RepoGroup.query().all(),
53 perm_set=['group.write', 'group.admin'])
54 perm_set=['group.write', 'group.admin'])
54 c.repo_groups = RepoGroup.groups_choices(groups=acl_groups)
55 c.repo_groups = RepoGroup.groups_choices(groups=acl_groups)
55 c.repo_groups_choices = map(lambda k: safe_unicode(k[0]), c.repo_groups)
56 c.repo_groups_choices = map(lambda k: safe_unicode(k[0]), c.repo_groups)
56 choices, c.landing_revs = ScmModel().get_repo_landing_revs(
57 choices, c.landing_revs = ScmModel().get_repo_landing_revs(
57 self.request.translate)
58 self.request.translate)
58 c.landing_revs_choices = choices
59 c.landing_revs_choices = choices
59 c.personal_repo_group = c.rhodecode_user.personal_repo_group
60 c.personal_repo_group = c.rhodecode_user.personal_repo_group
60
61
61 return c
62 return c
62
63
63 @LoginRequired()
64 @LoginRequired()
64 @HasRepoPermissionAnyDecorator(
65 @HasRepoPermissionAnyDecorator(
65 'repository.read', 'repository.write', 'repository.admin')
66 'repository.read', 'repository.write', 'repository.admin')
66 @view_config(
67 @view_config(
67 route_name='repo_forks_show_all', request_method='GET',
68 route_name='repo_forks_show_all', request_method='GET',
68 renderer='rhodecode:templates/forks/forks.mako')
69 renderer='rhodecode:templates/forks/forks.mako')
69 def repo_forks_show_all(self):
70 def repo_forks_show_all(self):
70 c = self.load_default_context()
71 c = self.load_default_context()
71 return self._get_template_context(c)
72 return self._get_template_context(c)
72
73
73 @LoginRequired()
74 @LoginRequired()
74 @HasRepoPermissionAnyDecorator(
75 @HasRepoPermissionAnyDecorator(
75 'repository.read', 'repository.write', 'repository.admin')
76 'repository.read', 'repository.write', 'repository.admin')
76 @view_config(
77 @view_config(
77 route_name='repo_forks_data', request_method='GET',
78 route_name='repo_forks_data', request_method='GET',
78 renderer='json_ext', xhr=True)
79 renderer='json_ext', xhr=True)
79 def repo_forks_data(self):
80 def repo_forks_data(self):
80 _ = self.request.translate
81 _ = self.request.translate
81 self.load_default_context()
82 self.load_default_context()
82 column_map = {
83 column_map = {
83 'fork_name': 'repo_name',
84 'fork_name': 'repo_name',
84 'fork_date': 'created_on',
85 'fork_date': 'created_on',
85 'last_activity': 'updated_on'
86 'last_activity': 'updated_on'
86 }
87 }
87 draw, start, limit = self._extract_chunk(self.request)
88 draw, start, limit = self._extract_chunk(self.request)
88 search_q, order_by, order_dir = self._extract_ordering(
89 search_q, order_by, order_dir = self._extract_ordering(
89 self.request, column_map=column_map)
90 self.request, column_map=column_map)
90
91
91 acl_check = HasRepoPermissionAny(
92 acl_check = HasRepoPermissionAny(
92 'repository.read', 'repository.write', 'repository.admin')
93 'repository.read', 'repository.write', 'repository.admin')
93 repo_id = self.db_repo.repo_id
94 repo_id = self.db_repo.repo_id
94 allowed_ids = [-1]
95 allowed_ids = [-1]
95 for f in Repository.query().filter(Repository.fork_id == repo_id):
96 for f in Repository.query().filter(Repository.fork_id == repo_id):
96 if acl_check(f.repo_name, 'get forks check'):
97 if acl_check(f.repo_name, 'get forks check'):
97 allowed_ids.append(f.repo_id)
98 allowed_ids.append(f.repo_id)
98
99
99 forks_data_total_count = Repository.query()\
100 forks_data_total_count = Repository.query()\
100 .filter(Repository.fork_id == repo_id)\
101 .filter(Repository.fork_id == repo_id)\
101 .filter(Repository.repo_id.in_(allowed_ids))\
102 .filter(Repository.repo_id.in_(allowed_ids))\
102 .count()
103 .count()
103
104
104 # json generate
105 # json generate
105 base_q = Repository.query()\
106 base_q = Repository.query()\
106 .filter(Repository.fork_id == repo_id)\
107 .filter(Repository.fork_id == repo_id)\
107 .filter(Repository.repo_id.in_(allowed_ids))\
108 .filter(Repository.repo_id.in_(allowed_ids))\
108
109
109 if search_q:
110 if search_q:
110 like_expression = u'%{}%'.format(safe_unicode(search_q))
111 like_expression = u'%{}%'.format(safe_unicode(search_q))
111 base_q = base_q.filter(or_(
112 base_q = base_q.filter(or_(
112 Repository.repo_name.ilike(like_expression),
113 Repository.repo_name.ilike(like_expression),
113 Repository.description.ilike(like_expression),
114 Repository.description.ilike(like_expression),
114 ))
115 ))
115
116
116 forks_data_total_filtered_count = base_q.count()
117 forks_data_total_filtered_count = base_q.count()
117
118
118 sort_col = getattr(Repository, order_by, None)
119 sort_col = getattr(Repository, order_by, None)
119 if sort_col:
120 if sort_col:
120 if order_dir == 'asc':
121 if order_dir == 'asc':
121 # handle null values properly to order by NULL last
122 # handle null values properly to order by NULL last
122 if order_by in ['last_activity']:
123 if order_by in ['last_activity']:
123 sort_col = coalesce(sort_col, datetime.date.max)
124 sort_col = coalesce(sort_col, datetime.date.max)
124 sort_col = sort_col.asc()
125 sort_col = sort_col.asc()
125 else:
126 else:
126 # handle null values properly to order by NULL last
127 # handle null values properly to order by NULL last
127 if order_by in ['last_activity']:
128 if order_by in ['last_activity']:
128 sort_col = coalesce(sort_col, datetime.date.min)
129 sort_col = coalesce(sort_col, datetime.date.min)
129 sort_col = sort_col.desc()
130 sort_col = sort_col.desc()
130
131
131 base_q = base_q.order_by(sort_col)
132 base_q = base_q.order_by(sort_col)
132 base_q = base_q.offset(start).limit(limit)
133 base_q = base_q.offset(start).limit(limit)
133
134
134 fork_list = base_q.all()
135 fork_list = base_q.all()
135
136
136 def fork_actions(fork):
137 def fork_actions(fork):
137 url_link = h.route_path(
138 url_link = h.route_path(
138 'repo_compare',
139 'repo_compare',
139 repo_name=fork.repo_name,
140 repo_name=fork.repo_name,
140 source_ref_type=self.db_repo.landing_rev[0],
141 source_ref_type=self.db_repo.landing_rev[0],
141 source_ref=self.db_repo.landing_rev[1],
142 source_ref=self.db_repo.landing_rev[1],
142 target_ref_type=self.db_repo.landing_rev[0],
143 target_ref_type=self.db_repo.landing_rev[0],
143 target_ref=self.db_repo.landing_rev[1],
144 target_ref=self.db_repo.landing_rev[1],
144 _query=dict(merge=1, target_repo=f.repo_name))
145 _query=dict(merge=1, target_repo=f.repo_name))
145 return h.link_to(_('Compare fork'), url_link, class_='btn-link')
146 return h.link_to(_('Compare fork'), url_link, class_='btn-link')
146
147
147 def fork_name(fork):
148 def fork_name(fork):
148 return h.link_to(fork.repo_name,
149 return h.link_to(fork.repo_name,
149 h.route_path('repo_summary', repo_name=fork.repo_name))
150 h.route_path('repo_summary', repo_name=fork.repo_name))
150
151
151 forks_data = []
152 forks_data = []
152 for fork in fork_list:
153 for fork in fork_list:
153 forks_data.append({
154 forks_data.append({
154 "username": h.gravatar_with_user(self.request, fork.user.username),
155 "username": h.gravatar_with_user(self.request, fork.user.username),
155 "fork_name": fork_name(fork),
156 "fork_name": fork_name(fork),
156 "description": fork.description,
157 "description": fork.description,
157 "fork_date": h.age_component(fork.created_on, time_is_local=True),
158 "fork_date": h.age_component(fork.created_on, time_is_local=True),
158 "last_activity": h.format_date(fork.updated_on),
159 "last_activity": h.format_date(fork.updated_on),
159 "action": fork_actions(fork),
160 "action": fork_actions(fork),
160 })
161 })
161
162
162 data = ({
163 data = ({
163 'draw': draw,
164 'draw': draw,
164 'data': forks_data,
165 'data': forks_data,
165 'recordsTotal': forks_data_total_count,
166 'recordsTotal': forks_data_total_count,
166 'recordsFiltered': forks_data_total_filtered_count,
167 'recordsFiltered': forks_data_total_filtered_count,
167 })
168 })
168
169
169 return data
170 return data
170
171
171 @LoginRequired()
172 @LoginRequired()
172 @NotAnonymous()
173 @NotAnonymous()
173 @HasPermissionAnyDecorator('hg.admin', 'hg.fork.repository')
174 @HasPermissionAnyDecorator('hg.admin', 'hg.fork.repository')
174 @HasRepoPermissionAnyDecorator(
175 @HasRepoPermissionAnyDecorator(
175 'repository.read', 'repository.write', 'repository.admin')
176 'repository.read', 'repository.write', 'repository.admin')
176 @view_config(
177 @view_config(
177 route_name='repo_fork_new', request_method='GET',
178 route_name='repo_fork_new', request_method='GET',
178 renderer='rhodecode:templates/forks/forks.mako')
179 renderer='rhodecode:templates/forks/forks.mako')
179 def repo_fork_new(self):
180 def repo_fork_new(self):
180 c = self.load_default_context()
181 c = self.load_default_context()
181
182
182 defaults = RepoModel()._get_defaults(self.db_repo_name)
183 defaults = RepoModel()._get_defaults(self.db_repo_name)
183 # alter the description to indicate a fork
184 # alter the description to indicate a fork
184 defaults['description'] = (
185 defaults['description'] = (
185 'fork of repository: %s \n%s' % (
186 'fork of repository: %s \n%s' % (
186 defaults['repo_name'], defaults['description']))
187 defaults['repo_name'], defaults['description']))
187 # add suffix to fork
188 # add suffix to fork
188 defaults['repo_name'] = '%s-fork' % defaults['repo_name']
189 defaults['repo_name'] = '%s-fork' % defaults['repo_name']
189
190
190 data = render('rhodecode:templates/forks/fork.mako',
191 data = render('rhodecode:templates/forks/fork.mako',
191 self._get_template_context(c), self.request)
192 self._get_template_context(c), self.request)
192 html = formencode.htmlfill.render(
193 html = formencode.htmlfill.render(
193 data,
194 data,
194 defaults=defaults,
195 defaults=defaults,
195 encoding="UTF-8",
196 encoding="UTF-8",
196 force_defaults=False
197 force_defaults=False
197 )
198 )
198 return Response(html)
199 return Response(html)
199
200
200 @LoginRequired()
201 @LoginRequired()
201 @NotAnonymous()
202 @NotAnonymous()
202 @HasPermissionAnyDecorator('hg.admin', 'hg.fork.repository')
203 @HasPermissionAnyDecorator('hg.admin', 'hg.fork.repository')
203 @HasRepoPermissionAnyDecorator(
204 @HasRepoPermissionAnyDecorator(
204 'repository.read', 'repository.write', 'repository.admin')
205 'repository.read', 'repository.write', 'repository.admin')
205 @CSRFRequired()
206 @CSRFRequired()
206 @view_config(
207 @view_config(
207 route_name='repo_fork_create', request_method='POST',
208 route_name='repo_fork_create', request_method='POST',
208 renderer='rhodecode:templates/forks/fork.mako')
209 renderer='rhodecode:templates/forks/fork.mako')
209 def repo_fork_create(self):
210 def repo_fork_create(self):
210 _ = self.request.translate
211 _ = self.request.translate
211 c = self.load_default_context()
212 c = self.load_default_context()
212
213
213 _form = RepoForkForm(self.request.translate, old_data={'repo_type': self.db_repo.repo_type},
214 _form = RepoForkForm(self.request.translate, old_data={'repo_type': self.db_repo.repo_type},
214 repo_groups=c.repo_groups_choices,
215 repo_groups=c.repo_groups_choices,
215 landing_revs=c.landing_revs_choices)()
216 landing_revs=c.landing_revs_choices)()
216 post_data = dict(self.request.POST)
217 post_data = dict(self.request.POST)
217
218
218 # forbid injecting other repo by forging a request
219 # forbid injecting other repo by forging a request
219 post_data['fork_parent_id'] = self.db_repo.repo_id
220 post_data['fork_parent_id'] = self.db_repo.repo_id
220
221
221 form_result = {}
222 form_result = {}
222 task_id = None
223 task_id = None
223 try:
224 try:
224 form_result = _form.to_python(post_data)
225 form_result = _form.to_python(post_data)
225 # create fork is done sometimes async on celery, db transaction
226 # create fork is done sometimes async on celery, db transaction
226 # management is handled there.
227 # management is handled there.
227 task = RepoModel().create_fork(
228 task = RepoModel().create_fork(
228 form_result, c.rhodecode_user.user_id)
229 form_result, c.rhodecode_user.user_id)
229 from celery.result import BaseAsyncResult
230
230 if isinstance(task, BaseAsyncResult):
231 task_id = get_task_id(task)
231 task_id = task.task_id
232 except formencode.Invalid as errors:
232 except formencode.Invalid as errors:
233 c.rhodecode_db_repo = self.db_repo
233 c.rhodecode_db_repo = self.db_repo
234
234
235 data = render('rhodecode:templates/forks/fork.mako',
235 data = render('rhodecode:templates/forks/fork.mako',
236 self._get_template_context(c), self.request)
236 self._get_template_context(c), self.request)
237 html = formencode.htmlfill.render(
237 html = formencode.htmlfill.render(
238 data,
238 data,
239 defaults=errors.value,
239 defaults=errors.value,
240 errors=errors.error_dict or {},
240 errors=errors.error_dict or {},
241 prefix_error=False,
241 prefix_error=False,
242 encoding="UTF-8",
242 encoding="UTF-8",
243 force_defaults=False
243 force_defaults=False
244 )
244 )
245 return Response(html)
245 return Response(html)
246 except Exception:
246 except Exception:
247 log.exception(
247 log.exception(
248 u'Exception while trying to fork the repository %s',
248 u'Exception while trying to fork the repository %s',
249 self.db_repo_name)
249 self.db_repo_name)
250 msg = (
250 msg = (
251 _('An error occurred during repository forking %s') % (
251 _('An error occurred during repository forking %s') % (
252 self.db_repo_name, ))
252 self.db_repo_name, ))
253 h.flash(msg, category='error')
253 h.flash(msg, category='error')
254
254
255 repo_name = form_result.get('repo_name_full', self.db_repo_name)
255 repo_name = form_result.get('repo_name_full', self.db_repo_name)
256 raise HTTPFound(
256 raise HTTPFound(
257 h.route_path('repo_creating',
257 h.route_path('repo_creating',
258 repo_name=repo_name,
258 repo_name=repo_name,
259 _query=dict(task_id=task_id)))
259 _query=dict(task_id=task_id)))
@@ -1,106 +1,90 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 import os
22 import os
23 import logging
23 import logging
24 import rhodecode
24 import rhodecode
25
25
26 # ------------------------------------------------------------------------------
27 # CELERY magic until refactor - issue #4163 - import order matters here:
28 #from rhodecode.lib import celerypylons # this must be first, celerypylons
29 # sets config settings upon import
30
31 import rhodecode.integrations # any modules using celery task
32 # decorators should be added afterwards:
33 # ------------------------------------------------------------------------------
34
26
35 from rhodecode.config import utils
27 from rhodecode.config import utils
36
28
37 from rhodecode.lib.utils import load_rcextensions
29 from rhodecode.lib.utils import load_rcextensions
38 from rhodecode.lib.utils2 import str2bool
30 from rhodecode.lib.utils2 import str2bool
39 from rhodecode.lib.vcs import connect_vcs, start_vcs_server
31 from rhodecode.lib.vcs import connect_vcs, start_vcs_server
40
32
41 log = logging.getLogger(__name__)
33 log = logging.getLogger(__name__)
42
34
43
35
44 def load_pyramid_environment(global_config, settings):
36 def load_pyramid_environment(global_config, settings):
45 # Some parts of the code expect a merge of global and app settings.
37 # Some parts of the code expect a merge of global and app settings.
46 settings_merged = global_config.copy()
38 settings_merged = global_config.copy()
47 settings_merged.update(settings)
39 settings_merged.update(settings)
48
40
49 # TODO(marcink): probably not required anymore
41 # TODO(marcink): probably not required anymore
50 # configure channelstream,
42 # configure channelstream,
51 settings_merged['channelstream_config'] = {
43 settings_merged['channelstream_config'] = {
52 'enabled': str2bool(settings_merged.get('channelstream.enabled', False)),
44 'enabled': str2bool(settings_merged.get('channelstream.enabled', False)),
53 'server': settings_merged.get('channelstream.server'),
45 'server': settings_merged.get('channelstream.server'),
54 'secret': settings_merged.get('channelstream.secret')
46 'secret': settings_merged.get('channelstream.secret')
55 }
47 }
56
48
57
58 # TODO(marcink): celery
59 # # store some globals into rhodecode
60 # rhodecode.CELERY_ENABLED = str2bool(config['app_conf'].get('use_celery'))
61 # rhodecode.CELERY_EAGER = str2bool(
62 # config['app_conf'].get('celery.always.eager'))
63
64
65 # If this is a test run we prepare the test environment like
49 # If this is a test run we prepare the test environment like
66 # creating a test database, test search index and test repositories.
50 # creating a test database, test search index and test repositories.
67 # This has to be done before the database connection is initialized.
51 # This has to be done before the database connection is initialized.
68 if settings['is_test']:
52 if settings['is_test']:
69 rhodecode.is_test = True
53 rhodecode.is_test = True
70 rhodecode.disable_error_handler = True
54 rhodecode.disable_error_handler = True
71
55
72 utils.initialize_test_environment(settings_merged)
56 utils.initialize_test_environment(settings_merged)
73
57
74 # Initialize the database connection.
58 # Initialize the database connection.
75 utils.initialize_database(settings_merged)
59 utils.initialize_database(settings_merged)
76
60
77 load_rcextensions(root_path=settings_merged['here'])
61 load_rcextensions(root_path=settings_merged['here'])
78
62
79 # Limit backends to `vcs.backends` from configuration
63 # Limit backends to `vcs.backends` from configuration
80 for alias in rhodecode.BACKENDS.keys():
64 for alias in rhodecode.BACKENDS.keys():
81 if alias not in settings['vcs.backends']:
65 if alias not in settings['vcs.backends']:
82 del rhodecode.BACKENDS[alias]
66 del rhodecode.BACKENDS[alias]
83 log.info('Enabled VCS backends: %s', rhodecode.BACKENDS.keys())
67 log.info('Enabled VCS backends: %s', rhodecode.BACKENDS.keys())
84
68
85 # initialize vcs client and optionally run the server if enabled
69 # initialize vcs client and optionally run the server if enabled
86 vcs_server_uri = settings['vcs.server']
70 vcs_server_uri = settings['vcs.server']
87 vcs_server_enabled = settings['vcs.server.enable']
71 vcs_server_enabled = settings['vcs.server.enable']
88 start_server = (
72 start_server = (
89 settings['vcs.start_server'] and
73 settings['vcs.start_server'] and
90 not int(os.environ.get('RC_VCSSERVER_TEST_DISABLE', '0')))
74 not int(os.environ.get('RC_VCSSERVER_TEST_DISABLE', '0')))
91
75
92 if vcs_server_enabled and start_server:
76 if vcs_server_enabled and start_server:
93 log.info("Starting vcsserver")
77 log.info("Starting vcsserver")
94 start_vcs_server(server_and_port=vcs_server_uri,
78 start_vcs_server(server_and_port=vcs_server_uri,
95 protocol=utils.get_vcs_server_protocol(settings),
79 protocol=utils.get_vcs_server_protocol(settings),
96 log_level=settings['vcs.server.log_level'])
80 log_level=settings['vcs.server.log_level'])
97
81
98 utils.configure_vcs(settings)
82 utils.configure_vcs(settings)
99
83
100 # Store the settings to make them available to other modules.
84 # Store the settings to make them available to other modules.
101
85
102 rhodecode.PYRAMID_SETTINGS = settings_merged
86 rhodecode.PYRAMID_SETTINGS = settings_merged
103 rhodecode.CONFIG = settings_merged
87 rhodecode.CONFIG = settings_merged
104
88
105 if vcs_server_enabled:
89 if vcs_server_enabled:
106 connect_vcs(vcs_server_uri, utils.get_vcs_server_protocol(settings))
90 connect_vcs(vcs_server_uri, utils.get_vcs_server_protocol(settings))
@@ -1,379 +1,379 b''
1 {
1 {
2 "libnghttp2-1.7.1": {
2 "libnghttp2-1.7.1": {
3 "MIT License": "http://spdx.org/licenses/MIT"
3 "MIT License": "http://spdx.org/licenses/MIT"
4 },
4 },
5 "nodejs-4.3.1": {
5 "nodejs-4.3.1": {
6 "MIT License": "http://spdx.org/licenses/MIT"
6 "MIT License": "http://spdx.org/licenses/MIT"
7 },
7 },
8 "python-2.7.12": {
8 "python-2.7.12": {
9 "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0"
9 "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0"
10 },
10 },
11 "python2.7-Babel-1.3": {
11 "python2.7-Babel-1.3": {
12 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
12 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
13 },
13 },
14 "python2.7-Beaker-1.7.0": {
14 "python2.7-Beaker-1.7.0": {
15 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
15 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
16 },
16 },
17 "python2.7-Chameleon-2.24": {
17 "python2.7-Chameleon-2.24": {
18 "BSD-like": "http://repoze.org/license.html"
18 "BSD-like": "http://repoze.org/license.html"
19 },
19 },
20 "python2.7-FormEncode-1.2.4": {
20 "python2.7-FormEncode-1.2.4": {
21 "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0"
21 "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0"
22 },
22 },
23 "python2.7-Jinja2-2.7.3": {
23 "python2.7-Jinja2-2.7.3": {
24 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
24 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
25 },
25 },
26 "python2.7-Mako-1.0.6": {
26 "python2.7-Mako-1.0.6": {
27 "MIT License": "http://spdx.org/licenses/MIT"
27 "MIT License": "http://spdx.org/licenses/MIT"
28 },
28 },
29 "python2.7-Markdown-2.6.7": {
29 "python2.7-Markdown-2.6.7": {
30 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
30 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
31 },
31 },
32 "python2.7-MarkupSafe-0.23": {
32 "python2.7-MarkupSafe-0.23": {
33 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
33 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
34 },
34 },
35 "python2.7-Paste-2.0.3": {
35 "python2.7-Paste-2.0.3": {
36 "MIT License": "http://spdx.org/licenses/MIT"
36 "MIT License": "http://spdx.org/licenses/MIT"
37 },
37 },
38 "python2.7-PasteDeploy-1.5.2": {
38 "python2.7-PasteDeploy-1.5.2": {
39 "MIT License": "http://spdx.org/licenses/MIT"
39 "MIT License": "http://spdx.org/licenses/MIT"
40 },
40 },
41 "python2.7-PasteScript-1.7.5": {
41 "python2.7-PasteScript-1.7.5": {
42 "MIT License": "http://spdx.org/licenses/MIT"
42 "MIT License": "http://spdx.org/licenses/MIT"
43 },
43 },
44 "python2.7-Pygments-2.2.0": {
44 "python2.7-Pygments-2.2.0": {
45 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
45 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
46 },
46 },
47 "python2.7-Routes-1.13": {
47 "python2.7-Routes-1.13": {
48 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
48 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
49 },
49 },
50 "python2.7-SQLAlchemy-0.9.9": {
50 "python2.7-SQLAlchemy-0.9.9": {
51 "MIT License": "http://spdx.org/licenses/MIT"
51 "MIT License": "http://spdx.org/licenses/MIT"
52 },
52 },
53 "python2.7-Tempita-0.5.2": {
53 "python2.7-Tempita-0.5.2": {
54 "MIT License": "http://spdx.org/licenses/MIT"
54 "MIT License": "http://spdx.org/licenses/MIT"
55 },
55 },
56 "python2.7-URLObject-2.4.0": {
56 "python2.7-URLObject-2.4.0": {
57 "The Unlicense": "http://unlicense.org/"
57 "The Unlicense": "http://unlicense.org/"
58 },
58 },
59 "python2.7-WebError-0.10.3": {
59 "python2.7-WebError-0.10.3": {
60 "MIT License": "http://spdx.org/licenses/MIT"
60 "MIT License": "http://spdx.org/licenses/MIT"
61 },
61 },
62 "python2.7-WebHelpers-1.3": {
62 "python2.7-WebHelpers-1.3": {
63 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
63 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
64 },
64 },
65 "python2.7-WebHelpers2-2.0": {
65 "python2.7-WebHelpers2-2.0": {
66 "MIT License": "http://spdx.org/licenses/MIT"
66 "MIT License": "http://spdx.org/licenses/MIT"
67 },
67 },
68 "python2.7-WebOb-1.3.1": {
68 "python2.7-WebOb-1.3.1": {
69 "MIT License": "http://spdx.org/licenses/MIT"
69 "MIT License": "http://spdx.org/licenses/MIT"
70 },
70 },
71 "python2.7-Whoosh-2.7.4": {
71 "python2.7-Whoosh-2.7.4": {
72 "BSD 2-clause \"Simplified\" License": "http://spdx.org/licenses/BSD-2-Clause",
72 "BSD 2-clause \"Simplified\" License": "http://spdx.org/licenses/BSD-2-Clause",
73 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
73 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
74 },
74 },
75 "python2.7-alembic-0.8.4": {
75 "python2.7-alembic-0.8.4": {
76 "MIT License": "http://spdx.org/licenses/MIT"
76 "MIT License": "http://spdx.org/licenses/MIT"
77 },
77 },
78 "python2.7-amqplib-1.0.2": {
78 "python2.7-amqplib-1.0.2": {
79 "GNU Lesser General Public License v3.0 only": "http://spdx.org/licenses/LGPL-3.0"
79 "GNU Lesser General Public License v3.0 only": "http://spdx.org/licenses/LGPL-3.0"
80 },
80 },
81 "python2.7-appenlight-client-0.6.14": {
81 "python2.7-appenlight-client-0.6.14": {
82 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
82 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
83 },
83 },
84 "python2.7-authomatic-0.1.0.post1": {
84 "python2.7-authomatic-0.1.0.post1": {
85 "MIT License": "http://spdx.org/licenses/MIT"
85 "MIT License": "http://spdx.org/licenses/MIT"
86 },
86 },
87 "python2.7-backports.shutil-get-terminal-size-1.0.0": {
87 "python2.7-backports.shutil-get-terminal-size-1.0.0": {
88 "MIT License": "http://spdx.org/licenses/MIT"
88 "MIT License": "http://spdx.org/licenses/MIT"
89 },
89 },
90 "python2.7-bleach-1.5.0": {
90 "python2.7-bleach-1.5.0": {
91 "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0"
91 "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0"
92 },
92 },
93 "python2.7-celery-2.2.10": {
93 "python2.7-celery-2.2.10": {
94 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
94 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
95 },
95 },
96 "python2.7-channelstream-0.5.2": {
96 "python2.7-channelstream-0.5.2": {
97 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
97 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
98 },
98 },
99 "python2.7-click-5.1": {
99 "python2.7-click-5.1": {
100 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
100 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
101 },
101 },
102 "python2.7-colander-1.2": {
102 "python2.7-colander-1.2": {
103 "Repoze License": "http://www.repoze.org/LICENSE.txt"
103 "Repoze License": "http://www.repoze.org/LICENSE.txt"
104 },
104 },
105 "python2.7-configobj-5.0.6": {
105 "python2.7-configobj-5.0.6": {
106 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
106 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
107 },
107 },
108 "python2.7-configparser-3.5.0": {
108 "python2.7-configparser-3.5.0": {
109 "MIT License": "http://spdx.org/licenses/MIT"
109 "MIT License": "http://spdx.org/licenses/MIT"
110 },
110 },
111 "python2.7-cssselect-1.0.1": {
111 "python2.7-cssselect-1.0.1": {
112 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
112 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
113 },
113 },
114 "python2.7-decorator-4.0.11": {
114 "python2.7-decorator-4.0.11": {
115 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
115 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
116 },
116 },
117 "python2.7-deform-2.0a2": {
117 "python2.7-deform-2.0a2": {
118 "BSD-derived": "http://www.repoze.org/LICENSE.txt"
118 "BSD-derived": "http://www.repoze.org/LICENSE.txt"
119 },
119 },
120 "python2.7-docutils-0.12": {
120 "python2.7-docutils-0.12": {
121 "BSD 2-clause \"Simplified\" License": "http://spdx.org/licenses/BSD-2-Clause"
121 "BSD 2-clause \"Simplified\" License": "http://spdx.org/licenses/BSD-2-Clause"
122 },
122 },
123 "python2.7-dogpile.cache-0.6.1": {
123 "python2.7-dogpile.cache-0.6.1": {
124 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
124 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
125 },
125 },
126 "python2.7-dogpile.core-0.4.1": {
126 "python2.7-dogpile.core-0.4.1": {
127 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
127 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
128 },
128 },
129 "python2.7-elasticsearch-2.3.0": {
129 "python2.7-elasticsearch-2.3.0": {
130 "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0"
130 "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0"
131 },
131 },
132 "python2.7-elasticsearch-dsl-2.2.0": {
132 "python2.7-elasticsearch-dsl-2.2.0": {
133 "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0"
133 "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0"
134 },
134 },
135 "python2.7-entrypoints-0.2.2": {
135 "python2.7-entrypoints-0.2.2": {
136 "MIT License": "http://spdx.org/licenses/MIT"
136 "MIT License": "http://spdx.org/licenses/MIT"
137 },
137 },
138 "python2.7-enum34-1.1.6": {
138 "python2.7-enum34-1.1.6": {
139 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
139 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
140 },
140 },
141 "python2.7-functools32-3.2.3.post2": {
141 "python2.7-functools32-3.2.3.post2": {
142 "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0"
142 "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0"
143 },
143 },
144 "python2.7-future-0.14.3": {
144 "python2.7-future-0.14.3": {
145 "MIT License": "http://spdx.org/licenses/MIT"
145 "MIT License": "http://spdx.org/licenses/MIT"
146 },
146 },
147 "python2.7-futures-3.0.2": {
147 "python2.7-futures-3.0.2": {
148 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
148 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
149 },
149 },
150 "python2.7-gevent-1.1.2": {
150 "python2.7-gevent-1.1.2": {
151 "MIT License": "http://spdx.org/licenses/MIT"
151 "MIT License": "http://spdx.org/licenses/MIT"
152 },
152 },
153 "python2.7-gnureadline-6.3.3": {
153 "python2.7-gnureadline-6.3.3": {
154 "GNU General Public License v1.0 only": "http://spdx.org/licenses/GPL-1.0"
154 "GNU General Public License v1.0 only": "http://spdx.org/licenses/GPL-1.0"
155 },
155 },
156 "python2.7-gprof2dot-2016.10.13": {
156 "python2.7-gprof2dot-2016.10.13": {
157 "GNU Lesser General Public License v3.0 or later": "http://spdx.org/licenses/LGPL-3.0+"
157 "GNU Lesser General Public License v3.0 or later": "http://spdx.org/licenses/LGPL-3.0+"
158 },
158 },
159 "python2.7-greenlet-0.4.10": {
159 "python2.7-greenlet-0.4.10": {
160 "MIT License": "http://spdx.org/licenses/MIT"
160 "MIT License": "http://spdx.org/licenses/MIT"
161 },
161 },
162 "python2.7-gunicorn-19.6.0": {
162 "python2.7-gunicorn-19.6.0": {
163 "MIT License": "http://spdx.org/licenses/MIT"
163 "MIT License": "http://spdx.org/licenses/MIT"
164 },
164 },
165 "python2.7-html5lib-0.9999999": {
165 "python2.7-html5lib-0.9999999": {
166 "MIT License": "http://spdx.org/licenses/MIT"
166 "MIT License": "http://spdx.org/licenses/MIT"
167 },
167 },
168 "python2.7-infrae.cache-1.0.1": {
168 "python2.7-infrae.cache-1.0.1": {
169 "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1"
169 "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1"
170 },
170 },
171 "python2.7-ipython-5.1.0": {
171 "python2.7-ipython-5.1.0": {
172 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
172 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
173 },
173 },
174 "python2.7-ipython-genutils-0.2.0": {
174 "python2.7-ipython-genutils-0.2.0": {
175 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
175 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
176 },
176 },
177 "python2.7-iso8601-0.1.11": {
177 "python2.7-iso8601-0.1.11": {
178 "MIT License": "http://spdx.org/licenses/MIT"
178 "MIT License": "http://spdx.org/licenses/MIT"
179 },
179 },
180 "python2.7-itsdangerous-0.24": {
180 "python2.7-itsdangerous-0.24": {
181 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
181 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
182 },
182 },
183 "python2.7-jsonschema-2.6.0": {
183 "python2.7-jsonschema-2.6.0": {
184 "MIT License": "http://spdx.org/licenses/MIT"
184 "MIT License": "http://spdx.org/licenses/MIT"
185 },
185 },
186 "python2.7-jupyter-client-5.0.0": {
186 "python2.7-jupyter-client-5.0.0": {
187 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
187 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
188 },
188 },
189 "python2.7-jupyter-core-4.3.0": {
189 "python2.7-jupyter-core-4.3.0": {
190 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
190 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
191 },
191 },
192 "python2.7-kombu-1.5.1": {
192 "python2.7-kombu-4.1.0": {
193 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
193 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
194 },
194 },
195 "python2.7-mistune-0.7.4": {
195 "python2.7-mistune-0.7.4": {
196 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
196 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
197 },
197 },
198 "python2.7-msgpack-python-0.4.8": {
198 "python2.7-msgpack-python-0.4.8": {
199 "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0"
199 "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0"
200 },
200 },
201 "python2.7-nbconvert-5.1.1": {
201 "python2.7-nbconvert-5.1.1": {
202 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
202 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
203 },
203 },
204 "python2.7-nbformat-4.3.0": {
204 "python2.7-nbformat-4.3.0": {
205 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
205 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
206 },
206 },
207 "python2.7-packaging-15.2": {
207 "python2.7-packaging-15.2": {
208 "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0"
208 "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0"
209 },
209 },
210 "python2.7-pandocfilters-1.4.1": {
210 "python2.7-pandocfilters-1.4.1": {
211 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
211 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
212 },
212 },
213 "python2.7-pathlib2-2.1.0": {
213 "python2.7-pathlib2-2.1.0": {
214 "MIT License": "http://spdx.org/licenses/MIT"
214 "MIT License": "http://spdx.org/licenses/MIT"
215 },
215 },
216 "python2.7-peppercorn-0.5": {
216 "python2.7-peppercorn-0.5": {
217 "BSD-derived": "http://www.repoze.org/LICENSE.txt"
217 "BSD-derived": "http://www.repoze.org/LICENSE.txt"
218 },
218 },
219 "python2.7-pexpect-4.2.1": {
219 "python2.7-pexpect-4.2.1": {
220 "ISC License": "http://spdx.org/licenses/ISC"
220 "ISC License": "http://spdx.org/licenses/ISC"
221 },
221 },
222 "python2.7-pickleshare-0.7.4": {
222 "python2.7-pickleshare-0.7.4": {
223 "MIT License": "http://spdx.org/licenses/MIT"
223 "MIT License": "http://spdx.org/licenses/MIT"
224 },
224 },
225 "python2.7-prompt-toolkit-1.0.14": {
225 "python2.7-prompt-toolkit-1.0.14": {
226 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
226 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
227 },
227 },
228 "python2.7-psutil-4.3.1": {
228 "python2.7-psutil-4.3.1": {
229 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
229 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
230 },
230 },
231 "python2.7-psycopg2-2.6.1": {
231 "python2.7-psycopg2-2.6.1": {
232 "GNU Lesser General Public License v3.0 or later": "http://spdx.org/licenses/LGPL-3.0+"
232 "GNU Lesser General Public License v3.0 or later": "http://spdx.org/licenses/LGPL-3.0+"
233 },
233 },
234 "python2.7-ptyprocess-0.5.1": {
234 "python2.7-ptyprocess-0.5.1": {
235 "ISC License": "http://opensource.org/licenses/ISC"
235 "ISC License": "http://opensource.org/licenses/ISC"
236 },
236 },
237 "python2.7-py-1.4.31": {
237 "python2.7-py-1.4.31": {
238 "MIT License": "http://spdx.org/licenses/MIT"
238 "MIT License": "http://spdx.org/licenses/MIT"
239 },
239 },
240 "python2.7-py-bcrypt-0.4": {
240 "python2.7-py-bcrypt-0.4": {
241 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
241 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
242 },
242 },
243 "python2.7-py-gfm-0.1.3.rhodecode-upstream1": {
243 "python2.7-py-gfm-0.1.3.rhodecode-upstream1": {
244 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
244 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
245 },
245 },
246 "python2.7-pycrypto-2.6.1": {
246 "python2.7-pycrypto-2.6.1": {
247 "Public Domain": null
247 "Public Domain": null
248 },
248 },
249 "python2.7-pycurl-7.19.5": {
249 "python2.7-pycurl-7.19.5": {
250 "MIT License": "http://spdx.org/licenses/MIT"
250 "MIT License": "http://spdx.org/licenses/MIT"
251 },
251 },
252 "python2.7-pygments-markdown-lexer-0.1.0.dev39": {
252 "python2.7-pygments-markdown-lexer-0.1.0.dev39": {
253 "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0"
253 "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0"
254 },
254 },
255 "python2.7-pyparsing-1.5.7": {
255 "python2.7-pyparsing-1.5.7": {
256 "MIT License": "http://spdx.org/licenses/MIT"
256 "MIT License": "http://spdx.org/licenses/MIT"
257 },
257 },
258 "python2.7-pyramid-1.7.4": {
258 "python2.7-pyramid-1.7.4": {
259 "Repoze License": "http://www.repoze.org/LICENSE.txt"
259 "Repoze License": "http://www.repoze.org/LICENSE.txt"
260 },
260 },
261 "python2.7-pyramid-beaker-0.8": {
261 "python2.7-pyramid-beaker-0.8": {
262 "Repoze License": "http://www.repoze.org/LICENSE.txt"
262 "Repoze License": "http://www.repoze.org/LICENSE.txt"
263 },
263 },
264 "python2.7-pyramid-debugtoolbar-3.0.5": {
264 "python2.7-pyramid-debugtoolbar-3.0.5": {
265 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause",
265 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause",
266 "Repoze License": "http://www.repoze.org/LICENSE.txt"
266 "Repoze License": "http://www.repoze.org/LICENSE.txt"
267 },
267 },
268 "python2.7-pyramid-jinja2-2.5": {
268 "python2.7-pyramid-jinja2-2.5": {
269 "BSD-derived": "http://www.repoze.org/LICENSE.txt"
269 "BSD-derived": "http://www.repoze.org/LICENSE.txt"
270 },
270 },
271 "python2.7-pyramid-mako-1.0.2": {
271 "python2.7-pyramid-mako-1.0.2": {
272 "Repoze License": "http://www.repoze.org/LICENSE.txt"
272 "Repoze License": "http://www.repoze.org/LICENSE.txt"
273 },
273 },
274 "python2.7-pysqlite-2.6.3": {
274 "python2.7-pysqlite-2.6.3": {
275 "libpng License": "http://spdx.org/licenses/Libpng",
275 "libpng License": "http://spdx.org/licenses/Libpng",
276 "zlib License": "http://spdx.org/licenses/Zlib"
276 "zlib License": "http://spdx.org/licenses/Zlib"
277 },
277 },
278 "python2.7-pytest-3.0.5": {
278 "python2.7-pytest-3.0.5": {
279 "MIT License": "http://spdx.org/licenses/MIT"
279 "MIT License": "http://spdx.org/licenses/MIT"
280 },
280 },
281 "python2.7-pytest-profiling-1.2.2": {
281 "python2.7-pytest-profiling-1.2.2": {
282 "MIT License": "http://spdx.org/licenses/MIT"
282 "MIT License": "http://spdx.org/licenses/MIT"
283 },
283 },
284 "python2.7-pytest-runner-2.9": {
284 "python2.7-pytest-runner-2.9": {
285 "MIT License": "http://spdx.org/licenses/MIT"
285 "MIT License": "http://spdx.org/licenses/MIT"
286 },
286 },
287 "python2.7-pytest-sugar-0.7.1": {
287 "python2.7-pytest-sugar-0.7.1": {
288 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
288 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
289 },
289 },
290 "python2.7-pytest-timeout-1.2.0": {
290 "python2.7-pytest-timeout-1.2.0": {
291 "MIT License": "http://spdx.org/licenses/MIT"
291 "MIT License": "http://spdx.org/licenses/MIT"
292 },
292 },
293 "python2.7-python-dateutil-2.1": {
293 "python2.7-python-dateutil-2.1": {
294 "Simplified BSD": null
294 "Simplified BSD": null
295 },
295 },
296 "python2.7-python-editor-1.0.3": {
296 "python2.7-python-editor-1.0.3": {
297 "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0"
297 "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0"
298 },
298 },
299 "python2.7-python-ldap-2.4.19": {
299 "python2.7-python-ldap-2.4.19": {
300 "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0"
300 "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0"
301 },
301 },
302 "python2.7-python-memcached-1.57": {
302 "python2.7-python-memcached-1.57": {
303 "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0"
303 "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0"
304 },
304 },
305 "python2.7-pytz-2015.4": {
305 "python2.7-pytz-2015.4": {
306 "MIT License": "http://spdx.org/licenses/MIT"
306 "MIT License": "http://spdx.org/licenses/MIT"
307 },
307 },
308 "python2.7-pyzmq-14.6.0": {
308 "python2.7-pyzmq-14.6.0": {
309 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
309 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
310 },
310 },
311 "python2.7-recaptcha-client-1.0.6": {
311 "python2.7-recaptcha-client-1.0.6": {
312 "MIT License": "http://spdx.org/licenses/MIT"
312 "MIT License": "http://spdx.org/licenses/MIT"
313 },
313 },
314 "python2.7-repoze.lru-0.6": {
314 "python2.7-repoze.lru-0.6": {
315 "Repoze License": "http://www.repoze.org/LICENSE.txt"
315 "Repoze License": "http://www.repoze.org/LICENSE.txt"
316 },
316 },
317 "python2.7-requests-2.9.1": {
317 "python2.7-requests-2.9.1": {
318 "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0"
318 "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0"
319 },
319 },
320 "python2.7-setuptools-19.4": {
320 "python2.7-setuptools-19.4": {
321 "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0",
321 "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0",
322 "Zope Public License 2.0": "http://spdx.org/licenses/ZPL-2.0"
322 "Zope Public License 2.0": "http://spdx.org/licenses/ZPL-2.0"
323 },
323 },
324 "python2.7-setuptools-scm-1.15.6": {
324 "python2.7-setuptools-scm-1.15.6": {
325 "MIT License": "http://spdx.org/licenses/MIT"
325 "MIT License": "http://spdx.org/licenses/MIT"
326 },
326 },
327 "python2.7-simplegeneric-0.8.1": {
327 "python2.7-simplegeneric-0.8.1": {
328 "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1"
328 "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1"
329 },
329 },
330 "python2.7-simplejson-3.7.2": {
330 "python2.7-simplejson-3.7.2": {
331 "MIT License": "http://spdx.org/licenses/MIT"
331 "MIT License": "http://spdx.org/licenses/MIT"
332 },
332 },
333 "python2.7-six-1.9.0": {
333 "python2.7-six-1.9.0": {
334 "MIT License": "http://spdx.org/licenses/MIT"
334 "MIT License": "http://spdx.org/licenses/MIT"
335 },
335 },
336 "python2.7-subprocess32-3.2.6": {
336 "python2.7-subprocess32-3.2.6": {
337 "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0"
337 "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0"
338 },
338 },
339 "python2.7-termcolor-1.1.0": {
339 "python2.7-termcolor-1.1.0": {
340 "MIT License": "http://spdx.org/licenses/MIT"
340 "MIT License": "http://spdx.org/licenses/MIT"
341 },
341 },
342 "python2.7-testpath-0.1": {
342 "python2.7-testpath-0.1": {
343 "MIT License": "http://spdx.org/licenses/MIT"
343 "MIT License": "http://spdx.org/licenses/MIT"
344 },
344 },
345 "python2.7-traitlets-4.3.2": {
345 "python2.7-traitlets-4.3.2": {
346 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
346 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
347 },
347 },
348 "python2.7-translationstring-1.3": {
348 "python2.7-translationstring-1.3": {
349 "Repoze License": "http://www.repoze.org/LICENSE.txt"
349 "Repoze License": "http://www.repoze.org/LICENSE.txt"
350 },
350 },
351 "python2.7-urllib3-1.16": {
351 "python2.7-urllib3-1.16": {
352 "MIT License": "http://spdx.org/licenses/MIT"
352 "MIT License": "http://spdx.org/licenses/MIT"
353 },
353 },
354 "python2.7-venusian-1.0": {
354 "python2.7-venusian-1.0": {
355 "Repoze License": "http://www.repoze.org/LICENSE.txt"
355 "Repoze License": "http://www.repoze.org/LICENSE.txt"
356 },
356 },
357 "python2.7-waitress-1.0.1": {
357 "python2.7-waitress-1.0.1": {
358 "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1"
358 "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1"
359 },
359 },
360 "python2.7-wcwidth-0.1.7": {
360 "python2.7-wcwidth-0.1.7": {
361 "MIT License": "http://spdx.org/licenses/MIT"
361 "MIT License": "http://spdx.org/licenses/MIT"
362 },
362 },
363 "python2.7-ws4py-0.3.5": {
363 "python2.7-ws4py-0.3.5": {
364 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
364 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
365 },
365 },
366 "python2.7-zope.cachedescriptors-4.0.0": {
366 "python2.7-zope.cachedescriptors-4.0.0": {
367 "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1"
367 "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1"
368 },
368 },
369 "python2.7-zope.deprecation-4.1.2": {
369 "python2.7-zope.deprecation-4.1.2": {
370 "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1"
370 "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1"
371 },
371 },
372 "python2.7-zope.interface-4.1.3": {
372 "python2.7-zope.interface-4.1.3": {
373 "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1"
373 "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1"
374 },
374 },
375 "xz-5.2.2": {
375 "xz-5.2.2": {
376 "GNU General Public License v2.0 or later": "http://spdx.org/licenses/GPL-2.0+",
376 "GNU General Public License v2.0 or later": "http://spdx.org/licenses/GPL-2.0+",
377 "GNU Library General Public License v2.1 or later": "http://spdx.org/licenses/LGPL-2.1+"
377 "GNU Library General Public License v2.1 or later": "http://spdx.org/licenses/LGPL-2.1+"
378 }
378 }
379 } No newline at end of file
379 }
@@ -1,431 +1,436 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 import traceback
22 import traceback
23 import collections
23 import collections
24
24
25 from paste.gzipper import make_gzip_middleware
25 from paste.gzipper import make_gzip_middleware
26 from pyramid.wsgi import wsgiapp
26 from pyramid.wsgi import wsgiapp
27 from pyramid.authorization import ACLAuthorizationPolicy
27 from pyramid.authorization import ACLAuthorizationPolicy
28 from pyramid.config import Configurator
28 from pyramid.config import Configurator
29 from pyramid.settings import asbool, aslist
29 from pyramid.settings import asbool, aslist
30 from pyramid.httpexceptions import (
30 from pyramid.httpexceptions import (
31 HTTPException, HTTPError, HTTPInternalServerError, HTTPFound, HTTPNotFound)
31 HTTPException, HTTPError, HTTPInternalServerError, HTTPFound, HTTPNotFound)
32 from pyramid.events import ApplicationCreated
32 from pyramid.events import ApplicationCreated
33 from pyramid.renderers import render_to_response
33 from pyramid.renderers import render_to_response
34
34
35 from rhodecode.model import meta
35 from rhodecode.model import meta
36 from rhodecode.config import patches
36 from rhodecode.config import patches
37 from rhodecode.config import utils as config_utils
37 from rhodecode.config import utils as config_utils
38 from rhodecode.config.environment import load_pyramid_environment
38 from rhodecode.config.environment import load_pyramid_environment
39
39
40 from rhodecode.lib.middleware.vcs import VCSMiddleware
40 from rhodecode.lib.middleware.vcs import VCSMiddleware
41 from rhodecode.lib.vcs import VCSCommunicationError
41 from rhodecode.lib.vcs import VCSCommunicationError
42 from rhodecode.lib.exceptions import VCSServerUnavailable
42 from rhodecode.lib.exceptions import VCSServerUnavailable
43 from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
43 from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
44 from rhodecode.lib.middleware.https_fixup import HttpsFixup
44 from rhodecode.lib.middleware.https_fixup import HttpsFixup
45 from rhodecode.lib.celerylib.loader import configure_celery
45 from rhodecode.lib.plugins.utils import register_rhodecode_plugin
46 from rhodecode.lib.plugins.utils import register_rhodecode_plugin
46 from rhodecode.lib.utils2 import aslist as rhodecode_aslist, AttributeDict
47 from rhodecode.lib.utils2 import aslist as rhodecode_aslist, AttributeDict
47 from rhodecode.subscribers import (
48 from rhodecode.subscribers import (
48 scan_repositories_if_enabled, write_js_routes_if_enabled,
49 scan_repositories_if_enabled, write_js_routes_if_enabled,
49 write_metadata_if_needed, inject_app_settings)
50 write_metadata_if_needed, inject_app_settings)
50
51
51
52
52 log = logging.getLogger(__name__)
53 log = logging.getLogger(__name__)
53
54
54
55
55 def is_http_error(response):
56 def is_http_error(response):
56 # error which should have traceback
57 # error which should have traceback
57 return response.status_code > 499
58 return response.status_code > 499
58
59
59
60
60 def make_pyramid_app(global_config, **settings):
61 def make_pyramid_app(global_config, **settings):
61 """
62 """
62 Constructs the WSGI application based on Pyramid.
63 Constructs the WSGI application based on Pyramid.
63
64
64 Specials:
65 Specials:
65
66
66 * The application can also be integrated like a plugin via the call to
67 * The application can also be integrated like a plugin via the call to
67 `includeme`. This is accompanied with the other utility functions which
68 `includeme`. This is accompanied with the other utility functions which
68 are called. Changing this should be done with great care to not break
69 are called. Changing this should be done with great care to not break
69 cases when these fragments are assembled from another place.
70 cases when these fragments are assembled from another place.
70
71
71 """
72 """
72 sanitize_settings_and_apply_defaults(settings)
73 sanitize_settings_and_apply_defaults(settings)
73
74
74 config = Configurator(settings=settings)
75 config = Configurator(settings=settings)
75
76
76 # Apply compatibility patches
77 # Apply compatibility patches
77 patches.inspect_getargspec()
78 patches.inspect_getargspec()
78
79
79 load_pyramid_environment(global_config, settings)
80 load_pyramid_environment(global_config, settings)
80
81
81 # Static file view comes first
82 # Static file view comes first
82 includeme_first(config)
83 includeme_first(config)
83
84
84 includeme(config)
85 includeme(config)
85
86
86 pyramid_app = config.make_wsgi_app()
87 pyramid_app = config.make_wsgi_app()
87 pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config)
88 pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config)
88 pyramid_app.config = config
89 pyramid_app.config = config
89
90
91 config.configure_celery(global_config['__file__'])
90 # creating the app uses a connection - return it after we are done
92 # creating the app uses a connection - return it after we are done
91 meta.Session.remove()
93 meta.Session.remove()
92
94
95 log.info('Pyramid app %s created and configured.', pyramid_app)
93 return pyramid_app
96 return pyramid_app
94
97
95
98
96 def not_found_view(request):
99 def not_found_view(request):
97 """
100 """
98 This creates the view which should be registered as not-found-view to
101 This creates the view which should be registered as not-found-view to
99 pyramid.
102 pyramid.
100 """
103 """
101
104
102 if not getattr(request, 'vcs_call', None):
105 if not getattr(request, 'vcs_call', None):
103 # handle like regular case with our error_handler
106 # handle like regular case with our error_handler
104 return error_handler(HTTPNotFound(), request)
107 return error_handler(HTTPNotFound(), request)
105
108
106 # handle not found view as a vcs call
109 # handle not found view as a vcs call
107 settings = request.registry.settings
110 settings = request.registry.settings
108 ae_client = getattr(request, 'ae_client', None)
111 ae_client = getattr(request, 'ae_client', None)
109 vcs_app = VCSMiddleware(
112 vcs_app = VCSMiddleware(
110 HTTPNotFound(), request.registry, settings,
113 HTTPNotFound(), request.registry, settings,
111 appenlight_client=ae_client)
114 appenlight_client=ae_client)
112
115
113 return wsgiapp(vcs_app)(None, request)
116 return wsgiapp(vcs_app)(None, request)
114
117
115
118
116 def error_handler(exception, request):
119 def error_handler(exception, request):
117 import rhodecode
120 import rhodecode
118 from rhodecode.lib import helpers
121 from rhodecode.lib import helpers
119
122
120 rhodecode_title = rhodecode.CONFIG.get('rhodecode_title') or 'RhodeCode'
123 rhodecode_title = rhodecode.CONFIG.get('rhodecode_title') or 'RhodeCode'
121
124
122 base_response = HTTPInternalServerError()
125 base_response = HTTPInternalServerError()
123 # prefer original exception for the response since it may have headers set
126 # prefer original exception for the response since it may have headers set
124 if isinstance(exception, HTTPException):
127 if isinstance(exception, HTTPException):
125 base_response = exception
128 base_response = exception
126 elif isinstance(exception, VCSCommunicationError):
129 elif isinstance(exception, VCSCommunicationError):
127 base_response = VCSServerUnavailable()
130 base_response = VCSServerUnavailable()
128
131
129 if is_http_error(base_response):
132 if is_http_error(base_response):
130 log.exception(
133 log.exception(
131 'error occurred handling this request for path: %s', request.path)
134 'error occurred handling this request for path: %s', request.path)
132
135
133 error_explanation = base_response.explanation or str(base_response)
136 error_explanation = base_response.explanation or str(base_response)
134 if base_response.status_code == 404:
137 if base_response.status_code == 404:
135 error_explanation += " Or you don't have permission to access it."
138 error_explanation += " Or you don't have permission to access it."
136 c = AttributeDict()
139 c = AttributeDict()
137 c.error_message = base_response.status
140 c.error_message = base_response.status
138 c.error_explanation = error_explanation
141 c.error_explanation = error_explanation
139 c.visual = AttributeDict()
142 c.visual = AttributeDict()
140
143
141 c.visual.rhodecode_support_url = (
144 c.visual.rhodecode_support_url = (
142 request.registry.settings.get('rhodecode_support_url') or
145 request.registry.settings.get('rhodecode_support_url') or
143 request.route_url('rhodecode_support')
146 request.route_url('rhodecode_support')
144 )
147 )
145 c.redirect_time = 0
148 c.redirect_time = 0
146 c.rhodecode_name = rhodecode_title
149 c.rhodecode_name = rhodecode_title
147 if not c.rhodecode_name:
150 if not c.rhodecode_name:
148 c.rhodecode_name = 'Rhodecode'
151 c.rhodecode_name = 'Rhodecode'
149
152
150 c.causes = []
153 c.causes = []
151 if is_http_error(base_response):
154 if is_http_error(base_response):
152 c.causes.append('Server is overloaded.')
155 c.causes.append('Server is overloaded.')
153 c.causes.append('Server database connection is lost.')
156 c.causes.append('Server database connection is lost.')
154 c.causes.append('Server expected unhandled error.')
157 c.causes.append('Server expected unhandled error.')
155
158
156 if hasattr(base_response, 'causes'):
159 if hasattr(base_response, 'causes'):
157 c.causes = base_response.causes
160 c.causes = base_response.causes
158
161
159 c.messages = helpers.flash.pop_messages(request=request)
162 c.messages = helpers.flash.pop_messages(request=request)
160 c.traceback = traceback.format_exc()
163 c.traceback = traceback.format_exc()
161 response = render_to_response(
164 response = render_to_response(
162 '/errors/error_document.mako', {'c': c, 'h': helpers}, request=request,
165 '/errors/error_document.mako', {'c': c, 'h': helpers}, request=request,
163 response=base_response)
166 response=base_response)
164
167
165 return response
168 return response
166
169
167
170
168 def includeme_first(config):
171 def includeme_first(config):
169 # redirect automatic browser favicon.ico requests to correct place
172 # redirect automatic browser favicon.ico requests to correct place
170 def favicon_redirect(context, request):
173 def favicon_redirect(context, request):
171 return HTTPFound(
174 return HTTPFound(
172 request.static_path('rhodecode:public/images/favicon.ico'))
175 request.static_path('rhodecode:public/images/favicon.ico'))
173
176
174 config.add_view(favicon_redirect, route_name='favicon')
177 config.add_view(favicon_redirect, route_name='favicon')
175 config.add_route('favicon', '/favicon.ico')
178 config.add_route('favicon', '/favicon.ico')
176
179
177 def robots_redirect(context, request):
180 def robots_redirect(context, request):
178 return HTTPFound(
181 return HTTPFound(
179 request.static_path('rhodecode:public/robots.txt'))
182 request.static_path('rhodecode:public/robots.txt'))
180
183
181 config.add_view(robots_redirect, route_name='robots')
184 config.add_view(robots_redirect, route_name='robots')
182 config.add_route('robots', '/robots.txt')
185 config.add_route('robots', '/robots.txt')
183
186
184 config.add_static_view(
187 config.add_static_view(
185 '_static/deform', 'deform:static')
188 '_static/deform', 'deform:static')
186 config.add_static_view(
189 config.add_static_view(
187 '_static/rhodecode', path='rhodecode:public', cache_max_age=3600 * 24)
190 '_static/rhodecode', path='rhodecode:public', cache_max_age=3600 * 24)
188
191
189
192
190 def includeme(config):
193 def includeme(config):
191 settings = config.registry.settings
194 settings = config.registry.settings
192
195
193 # plugin information
196 # plugin information
194 config.registry.rhodecode_plugins = collections.OrderedDict()
197 config.registry.rhodecode_plugins = collections.OrderedDict()
195
198
196 config.add_directive(
199 config.add_directive(
197 'register_rhodecode_plugin', register_rhodecode_plugin)
200 'register_rhodecode_plugin', register_rhodecode_plugin)
198
201
202 config.add_directive('configure_celery', configure_celery)
203
199 if asbool(settings.get('appenlight', 'false')):
204 if asbool(settings.get('appenlight', 'false')):
200 config.include('appenlight_client.ext.pyramid_tween')
205 config.include('appenlight_client.ext.pyramid_tween')
201
206
202 # Includes which are required. The application would fail without them.
207 # Includes which are required. The application would fail without them.
203 config.include('pyramid_mako')
208 config.include('pyramid_mako')
204 config.include('pyramid_beaker')
209 config.include('pyramid_beaker')
205
210
206 config.include('rhodecode.authentication')
211 config.include('rhodecode.authentication')
207 config.include('rhodecode.integrations')
212 config.include('rhodecode.integrations')
208
213
209 # apps
214 # apps
210 config.include('rhodecode.apps._base')
215 config.include('rhodecode.apps._base')
211 config.include('rhodecode.apps.ops')
216 config.include('rhodecode.apps.ops')
212
217
213 config.include('rhodecode.apps.admin')
218 config.include('rhodecode.apps.admin')
214 config.include('rhodecode.apps.channelstream')
219 config.include('rhodecode.apps.channelstream')
215 config.include('rhodecode.apps.login')
220 config.include('rhodecode.apps.login')
216 config.include('rhodecode.apps.home')
221 config.include('rhodecode.apps.home')
217 config.include('rhodecode.apps.journal')
222 config.include('rhodecode.apps.journal')
218 config.include('rhodecode.apps.repository')
223 config.include('rhodecode.apps.repository')
219 config.include('rhodecode.apps.repo_group')
224 config.include('rhodecode.apps.repo_group')
220 config.include('rhodecode.apps.user_group')
225 config.include('rhodecode.apps.user_group')
221 config.include('rhodecode.apps.search')
226 config.include('rhodecode.apps.search')
222 config.include('rhodecode.apps.user_profile')
227 config.include('rhodecode.apps.user_profile')
223 config.include('rhodecode.apps.my_account')
228 config.include('rhodecode.apps.my_account')
224 config.include('rhodecode.apps.svn_support')
229 config.include('rhodecode.apps.svn_support')
225 config.include('rhodecode.apps.ssh_support')
230 config.include('rhodecode.apps.ssh_support')
226 config.include('rhodecode.apps.gist')
231 config.include('rhodecode.apps.gist')
227
232
228 config.include('rhodecode.apps.debug_style')
233 config.include('rhodecode.apps.debug_style')
229 config.include('rhodecode.tweens')
234 config.include('rhodecode.tweens')
230 config.include('rhodecode.api')
235 config.include('rhodecode.api')
231
236
232 config.add_route(
237 config.add_route(
233 'rhodecode_support', 'https://rhodecode.com/help/', static=True)
238 'rhodecode_support', 'https://rhodecode.com/help/', static=True)
234
239
235 config.add_translation_dirs('rhodecode:i18n/')
240 config.add_translation_dirs('rhodecode:i18n/')
236 settings['default_locale_name'] = settings.get('lang', 'en')
241 settings['default_locale_name'] = settings.get('lang', 'en')
237
242
238 # Add subscribers.
243 # Add subscribers.
239 config.add_subscriber(inject_app_settings, ApplicationCreated)
244 config.add_subscriber(inject_app_settings, ApplicationCreated)
240 config.add_subscriber(scan_repositories_if_enabled, ApplicationCreated)
245 config.add_subscriber(scan_repositories_if_enabled, ApplicationCreated)
241 config.add_subscriber(write_metadata_if_needed, ApplicationCreated)
246 config.add_subscriber(write_metadata_if_needed, ApplicationCreated)
242 config.add_subscriber(write_js_routes_if_enabled, ApplicationCreated)
247 config.add_subscriber(write_js_routes_if_enabled, ApplicationCreated)
243
248
244 # events
249 # events
245 # TODO(marcink): this should be done when pyramid migration is finished
250 # TODO(marcink): this should be done when pyramid migration is finished
246 # config.add_subscriber(
251 # config.add_subscriber(
247 # 'rhodecode.integrations.integrations_event_handler',
252 # 'rhodecode.integrations.integrations_event_handler',
248 # 'rhodecode.events.RhodecodeEvent')
253 # 'rhodecode.events.RhodecodeEvent')
249
254
250 # request custom methods
255 # request custom methods
251 config.add_request_method(
256 config.add_request_method(
252 'rhodecode.lib.partial_renderer.get_partial_renderer',
257 'rhodecode.lib.partial_renderer.get_partial_renderer',
253 'get_partial_renderer')
258 'get_partial_renderer')
254
259
255 # Set the authorization policy.
260 # Set the authorization policy.
256 authz_policy = ACLAuthorizationPolicy()
261 authz_policy = ACLAuthorizationPolicy()
257 config.set_authorization_policy(authz_policy)
262 config.set_authorization_policy(authz_policy)
258
263
259 # Set the default renderer for HTML templates to mako.
264 # Set the default renderer for HTML templates to mako.
260 config.add_mako_renderer('.html')
265 config.add_mako_renderer('.html')
261
266
262 config.add_renderer(
267 config.add_renderer(
263 name='json_ext',
268 name='json_ext',
264 factory='rhodecode.lib.ext_json_renderer.pyramid_ext_json')
269 factory='rhodecode.lib.ext_json_renderer.pyramid_ext_json')
265
270
266 # include RhodeCode plugins
271 # include RhodeCode plugins
267 includes = aslist(settings.get('rhodecode.includes', []))
272 includes = aslist(settings.get('rhodecode.includes', []))
268 for inc in includes:
273 for inc in includes:
269 config.include(inc)
274 config.include(inc)
270
275
271 # custom not found view, if our pyramid app doesn't know how to handle
276 # custom not found view, if our pyramid app doesn't know how to handle
272 # the request pass it to potential VCS handling ap
277 # the request pass it to potential VCS handling ap
273 config.add_notfound_view(not_found_view)
278 config.add_notfound_view(not_found_view)
274 if not settings.get('debugtoolbar.enabled', False):
279 if not settings.get('debugtoolbar.enabled', False):
275 # disabled debugtoolbar handle all exceptions via the error_handlers
280 # disabled debugtoolbar handle all exceptions via the error_handlers
276 config.add_view(error_handler, context=Exception)
281 config.add_view(error_handler, context=Exception)
277
282
278 # all errors including 403/404/50X
283 # all errors including 403/404/50X
279 config.add_view(error_handler, context=HTTPError)
284 config.add_view(error_handler, context=HTTPError)
280
285
281
286
282 def wrap_app_in_wsgi_middlewares(pyramid_app, config):
287 def wrap_app_in_wsgi_middlewares(pyramid_app, config):
283 """
288 """
284 Apply outer WSGI middlewares around the application.
289 Apply outer WSGI middlewares around the application.
285 """
290 """
286 settings = config.registry.settings
291 settings = config.registry.settings
287
292
288 # enable https redirects based on HTTP_X_URL_SCHEME set by proxy
293 # enable https redirects based on HTTP_X_URL_SCHEME set by proxy
289 pyramid_app = HttpsFixup(pyramid_app, settings)
294 pyramid_app = HttpsFixup(pyramid_app, settings)
290
295
291 pyramid_app, _ae_client = wrap_in_appenlight_if_enabled(
296 pyramid_app, _ae_client = wrap_in_appenlight_if_enabled(
292 pyramid_app, settings)
297 pyramid_app, settings)
293 config.registry.ae_client = _ae_client
298 config.registry.ae_client = _ae_client
294
299
295 if settings['gzip_responses']:
300 if settings['gzip_responses']:
296 pyramid_app = make_gzip_middleware(
301 pyramid_app = make_gzip_middleware(
297 pyramid_app, settings, compress_level=1)
302 pyramid_app, settings, compress_level=1)
298
303
299 # this should be the outer most middleware in the wsgi stack since
304 # this should be the outer most middleware in the wsgi stack since
300 # middleware like Routes make database calls
305 # middleware like Routes make database calls
301 def pyramid_app_with_cleanup(environ, start_response):
306 def pyramid_app_with_cleanup(environ, start_response):
302 try:
307 try:
303 return pyramid_app(environ, start_response)
308 return pyramid_app(environ, start_response)
304 finally:
309 finally:
305 # Dispose current database session and rollback uncommitted
310 # Dispose current database session and rollback uncommitted
306 # transactions.
311 # transactions.
307 meta.Session.remove()
312 meta.Session.remove()
308
313
309 # In a single threaded mode server, on non sqlite db we should have
314 # In a single threaded mode server, on non sqlite db we should have
310 # '0 Current Checked out connections' at the end of a request,
315 # '0 Current Checked out connections' at the end of a request,
311 # if not, then something, somewhere is leaving a connection open
316 # if not, then something, somewhere is leaving a connection open
312 pool = meta.Base.metadata.bind.engine.pool
317 pool = meta.Base.metadata.bind.engine.pool
313 log.debug('sa pool status: %s', pool.status())
318 log.debug('sa pool status: %s', pool.status())
314
319
315 return pyramid_app_with_cleanup
320 return pyramid_app_with_cleanup
316
321
317
322
318 def sanitize_settings_and_apply_defaults(settings):
323 def sanitize_settings_and_apply_defaults(settings):
319 """
324 """
320 Applies settings defaults and does all type conversion.
325 Applies settings defaults and does all type conversion.
321
326
322 We would move all settings parsing and preparation into this place, so that
327 We would move all settings parsing and preparation into this place, so that
323 we have only one place left which deals with this part. The remaining parts
328 we have only one place left which deals with this part. The remaining parts
324 of the application would start to rely fully on well prepared settings.
329 of the application would start to rely fully on well prepared settings.
325
330
326 This piece would later be split up per topic to avoid a big fat monster
331 This piece would later be split up per topic to avoid a big fat monster
327 function.
332 function.
328 """
333 """
329
334
330 settings.setdefault('rhodecode.edition', 'Community Edition')
335 settings.setdefault('rhodecode.edition', 'Community Edition')
331
336
332 if 'mako.default_filters' not in settings:
337 if 'mako.default_filters' not in settings:
333 # set custom default filters if we don't have it defined
338 # set custom default filters if we don't have it defined
334 settings['mako.imports'] = 'from rhodecode.lib.base import h_filter'
339 settings['mako.imports'] = 'from rhodecode.lib.base import h_filter'
335 settings['mako.default_filters'] = 'h_filter'
340 settings['mako.default_filters'] = 'h_filter'
336
341
337 if 'mako.directories' not in settings:
342 if 'mako.directories' not in settings:
338 mako_directories = settings.setdefault('mako.directories', [
343 mako_directories = settings.setdefault('mako.directories', [
339 # Base templates of the original application
344 # Base templates of the original application
340 'rhodecode:templates',
345 'rhodecode:templates',
341 ])
346 ])
342 log.debug(
347 log.debug(
343 "Using the following Mako template directories: %s",
348 "Using the following Mako template directories: %s",
344 mako_directories)
349 mako_directories)
345
350
346 # Default includes, possible to change as a user
351 # Default includes, possible to change as a user
347 pyramid_includes = settings.setdefault('pyramid.includes', [
352 pyramid_includes = settings.setdefault('pyramid.includes', [
348 'rhodecode.lib.middleware.request_wrapper',
353 'rhodecode.lib.middleware.request_wrapper',
349 ])
354 ])
350 log.debug(
355 log.debug(
351 "Using the following pyramid.includes: %s",
356 "Using the following pyramid.includes: %s",
352 pyramid_includes)
357 pyramid_includes)
353
358
354 # TODO: johbo: Re-think this, usually the call to config.include
359 # TODO: johbo: Re-think this, usually the call to config.include
355 # should allow to pass in a prefix.
360 # should allow to pass in a prefix.
356 settings.setdefault('rhodecode.api.url', '/_admin/api')
361 settings.setdefault('rhodecode.api.url', '/_admin/api')
357
362
358 # Sanitize generic settings.
363 # Sanitize generic settings.
359 _list_setting(settings, 'default_encoding', 'UTF-8')
364 _list_setting(settings, 'default_encoding', 'UTF-8')
360 _bool_setting(settings, 'is_test', 'false')
365 _bool_setting(settings, 'is_test', 'false')
361 _bool_setting(settings, 'gzip_responses', 'false')
366 _bool_setting(settings, 'gzip_responses', 'false')
362
367
363 # Call split out functions that sanitize settings for each topic.
368 # Call split out functions that sanitize settings for each topic.
364 _sanitize_appenlight_settings(settings)
369 _sanitize_appenlight_settings(settings)
365 _sanitize_vcs_settings(settings)
370 _sanitize_vcs_settings(settings)
366
371
367 # configure instance id
372 # configure instance id
368 config_utils.set_instance_id(settings)
373 config_utils.set_instance_id(settings)
369
374
370 return settings
375 return settings
371
376
372
377
373 def _sanitize_appenlight_settings(settings):
378 def _sanitize_appenlight_settings(settings):
374 _bool_setting(settings, 'appenlight', 'false')
379 _bool_setting(settings, 'appenlight', 'false')
375
380
376
381
377 def _sanitize_vcs_settings(settings):
382 def _sanitize_vcs_settings(settings):
378 """
383 """
379 Applies settings defaults and does type conversion for all VCS related
384 Applies settings defaults and does type conversion for all VCS related
380 settings.
385 settings.
381 """
386 """
382 _string_setting(settings, 'vcs.svn.compatible_version', '')
387 _string_setting(settings, 'vcs.svn.compatible_version', '')
383 _string_setting(settings, 'git_rev_filter', '--all')
388 _string_setting(settings, 'git_rev_filter', '--all')
384 _string_setting(settings, 'vcs.hooks.protocol', 'http')
389 _string_setting(settings, 'vcs.hooks.protocol', 'http')
385 _string_setting(settings, 'vcs.scm_app_implementation', 'http')
390 _string_setting(settings, 'vcs.scm_app_implementation', 'http')
386 _string_setting(settings, 'vcs.server', '')
391 _string_setting(settings, 'vcs.server', '')
387 _string_setting(settings, 'vcs.server.log_level', 'debug')
392 _string_setting(settings, 'vcs.server.log_level', 'debug')
388 _string_setting(settings, 'vcs.server.protocol', 'http')
393 _string_setting(settings, 'vcs.server.protocol', 'http')
389 _bool_setting(settings, 'startup.import_repos', 'false')
394 _bool_setting(settings, 'startup.import_repos', 'false')
390 _bool_setting(settings, 'vcs.hooks.direct_calls', 'false')
395 _bool_setting(settings, 'vcs.hooks.direct_calls', 'false')
391 _bool_setting(settings, 'vcs.server.enable', 'true')
396 _bool_setting(settings, 'vcs.server.enable', 'true')
392 _bool_setting(settings, 'vcs.start_server', 'false')
397 _bool_setting(settings, 'vcs.start_server', 'false')
393 _list_setting(settings, 'vcs.backends', 'hg, git, svn')
398 _list_setting(settings, 'vcs.backends', 'hg, git, svn')
394 _int_setting(settings, 'vcs.connection_timeout', 3600)
399 _int_setting(settings, 'vcs.connection_timeout', 3600)
395
400
396 # Support legacy values of vcs.scm_app_implementation. Legacy
401 # Support legacy values of vcs.scm_app_implementation. Legacy
397 # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http'
402 # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http'
398 # which is now mapped to 'http'.
403 # which is now mapped to 'http'.
399 scm_app_impl = settings['vcs.scm_app_implementation']
404 scm_app_impl = settings['vcs.scm_app_implementation']
400 if scm_app_impl == 'rhodecode.lib.middleware.utils.scm_app_http':
405 if scm_app_impl == 'rhodecode.lib.middleware.utils.scm_app_http':
401 settings['vcs.scm_app_implementation'] = 'http'
406 settings['vcs.scm_app_implementation'] = 'http'
402
407
403
408
404 def _int_setting(settings, name, default):
409 def _int_setting(settings, name, default):
405 settings[name] = int(settings.get(name, default))
410 settings[name] = int(settings.get(name, default))
406
411
407
412
408 def _bool_setting(settings, name, default):
413 def _bool_setting(settings, name, default):
409 input_val = settings.get(name, default)
414 input_val = settings.get(name, default)
410 if isinstance(input_val, unicode):
415 if isinstance(input_val, unicode):
411 input_val = input_val.encode('utf8')
416 input_val = input_val.encode('utf8')
412 settings[name] = asbool(input_val)
417 settings[name] = asbool(input_val)
413
418
414
419
415 def _list_setting(settings, name, default):
420 def _list_setting(settings, name, default):
416 raw_value = settings.get(name, default)
421 raw_value = settings.get(name, default)
417
422
418 old_separator = ','
423 old_separator = ','
419 if old_separator in raw_value:
424 if old_separator in raw_value:
420 # If we get a comma separated list, pass it to our own function.
425 # If we get a comma separated list, pass it to our own function.
421 settings[name] = rhodecode_aslist(raw_value, sep=old_separator)
426 settings[name] = rhodecode_aslist(raw_value, sep=old_separator)
422 else:
427 else:
423 # Otherwise we assume it uses pyramids space/newline separation.
428 # Otherwise we assume it uses pyramids space/newline separation.
424 settings[name] = aslist(raw_value)
429 settings[name] = aslist(raw_value)
425
430
426
431
427 def _string_setting(settings, name, default, lower=True):
432 def _string_setting(settings, name, default, lower=True):
428 value = settings.get(name, default)
433 value = settings.get(name, default)
429 if lower:
434 if lower:
430 value = value.lower()
435 value = value.lower()
431 settings[name] = value
436 settings[name] = value
@@ -1,254 +1,252 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2017 RhodeCode GmbH
3 # Copyright (C) 2012-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 from __future__ import unicode_literals
21 from __future__ import unicode_literals
22 import deform
22 import deform
23 import re
24 import logging
23 import logging
25 import requests
24 import requests
26 import colander
25 import colander
27 import textwrap
26 import textwrap
28 from celery.task import task
29 from mako.template import Template
27 from mako.template import Template
30
28
31 from rhodecode import events
29 from rhodecode import events
32 from rhodecode.translation import _
30 from rhodecode.translation import _
33 from rhodecode.lib import helpers as h
31 from rhodecode.lib import helpers as h
34 from rhodecode.lib.celerylib import run_task
32 from rhodecode.lib.celerylib import run_task, async_task, RequestContextTask
35 from rhodecode.lib.colander_utils import strip_whitespace
33 from rhodecode.lib.colander_utils import strip_whitespace
36 from rhodecode.integrations.types.base import IntegrationTypeBase
34 from rhodecode.integrations.types.base import IntegrationTypeBase
37
35
38 log = logging.getLogger(__name__)
36 log = logging.getLogger(__name__)
39
37
40
38
41 class HipchatSettingsSchema(colander.Schema):
39 class HipchatSettingsSchema(colander.Schema):
42 color_choices = [
40 color_choices = [
43 ('yellow', _('Yellow')),
41 ('yellow', _('Yellow')),
44 ('red', _('Red')),
42 ('red', _('Red')),
45 ('green', _('Green')),
43 ('green', _('Green')),
46 ('purple', _('Purple')),
44 ('purple', _('Purple')),
47 ('gray', _('Gray')),
45 ('gray', _('Gray')),
48 ]
46 ]
49
47
50 server_url = colander.SchemaNode(
48 server_url = colander.SchemaNode(
51 colander.String(),
49 colander.String(),
52 title=_('Hipchat server URL'),
50 title=_('Hipchat server URL'),
53 description=_('Hipchat integration url.'),
51 description=_('Hipchat integration url.'),
54 default='',
52 default='',
55 preparer=strip_whitespace,
53 preparer=strip_whitespace,
56 validator=colander.url,
54 validator=colander.url,
57 widget=deform.widget.TextInputWidget(
55 widget=deform.widget.TextInputWidget(
58 placeholder='https://?.hipchat.com/v2/room/?/notification?auth_token=?',
56 placeholder='https://?.hipchat.com/v2/room/?/notification?auth_token=?',
59 ),
57 ),
60 )
58 )
61 notify = colander.SchemaNode(
59 notify = colander.SchemaNode(
62 colander.Bool(),
60 colander.Bool(),
63 title=_('Notify'),
61 title=_('Notify'),
64 description=_('Make a notification to the users in room.'),
62 description=_('Make a notification to the users in room.'),
65 missing=False,
63 missing=False,
66 default=False,
64 default=False,
67 )
65 )
68 color = colander.SchemaNode(
66 color = colander.SchemaNode(
69 colander.String(),
67 colander.String(),
70 title=_('Color'),
68 title=_('Color'),
71 description=_('Background color of message.'),
69 description=_('Background color of message.'),
72 missing='',
70 missing='',
73 validator=colander.OneOf([x[0] for x in color_choices]),
71 validator=colander.OneOf([x[0] for x in color_choices]),
74 widget=deform.widget.Select2Widget(
72 widget=deform.widget.Select2Widget(
75 values=color_choices,
73 values=color_choices,
76 ),
74 ),
77 )
75 )
78
76
79
77
80 repo_push_template = Template('''
78 repo_push_template = Template('''
81 <b>${data['actor']['username']}</b> pushed to repo <a href="${data['repo']['url']}">${data['repo']['repo_name']}</a>:
79 <b>${data['actor']['username']}</b> pushed to repo <a href="${data['repo']['url']}">${data['repo']['repo_name']}</a>:
82 <br>
80 <br>
83 <ul>
81 <ul>
84 %for branch, branch_commits in branches_commits.items():
82 %for branch, branch_commits in branches_commits.items():
85 <li>
83 <li>
86 <a href="${branch_commits['branch']['url']}">branch: ${branch_commits['branch']['name']}</a>
84 <a href="${branch_commits['branch']['url']}">branch: ${branch_commits['branch']['name']}</a>
87 <ul>
85 <ul>
88 %for commit in branch_commits['commits']:
86 %for commit in branch_commits['commits']:
89 <li><a href="${commit['url']}">${commit['short_id']}</a> - ${commit['message_html']}</li>
87 <li><a href="${commit['url']}">${commit['short_id']}</a> - ${commit['message_html']}</li>
90 %endfor
88 %endfor
91 </ul>
89 </ul>
92 </li>
90 </li>
93 %endfor
91 %endfor
94 ''')
92 ''')
95
93
96
94
97 class HipchatIntegrationType(IntegrationTypeBase):
95 class HipchatIntegrationType(IntegrationTypeBase):
98 key = 'hipchat'
96 key = 'hipchat'
99 display_name = _('Hipchat')
97 display_name = _('Hipchat')
100 description = _('Send events such as repo pushes and pull requests to '
98 description = _('Send events such as repo pushes and pull requests to '
101 'your hipchat channel.')
99 'your hipchat channel.')
102 icon = '''<?xml version="1.0" encoding="utf-8"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="0 0 1000 1000" enable-background="new 0 0 1000 1000" xml:space="preserve"><g><g transform="translate(0.000000,511.000000) scale(0.100000,-0.100000)"><path fill="#205281" d="M4197.1,4662.4c-1661.5-260.4-3018-1171.6-3682.6-2473.3C219.9,1613.6,100,1120.3,100,462.6c0-1014,376.8-1918.4,1127-2699.4C2326.7-3377.6,3878.5-3898.3,5701-3730.5l486.5,44.5l208.9-123.3c637.2-373.4,1551.8-640.6,2240.4-650.9c304.9-6.9,335.7,0,417.9,75.4c185,174.7,147.3,411.1-89.1,548.1c-315.2,181.6-620,544.7-733.1,870.1l-51.4,157.6l472.7,472.7c349.4,349.4,520.7,551.5,657.7,774.2c784.5,1281.2,784.5,2788.5,0,4052.6c-236.4,376.8-794.8,966-1178.4,1236.7c-572.1,407.7-1264.1,709.1-1993.7,870.1c-267.2,58.2-479.6,75.4-1038,82.2C4714.4,4686.4,4310.2,4679.6,4197.1,4662.4z M5947.6,3740.9c1856.7-380.3,3127.6-1709.4,3127.6-3275c0-1000.3-534.4-1949.2-1466.2-2600.1c-188.4-133.6-287.8-226.1-301.5-284.4c-41.1-157.6,263.8-938.6,397.4-1020.8c20.5-10.3,34.3-44.5,34.3-75.4c0-167.8-811.9,195.3-1363.4,609.8l-181.6,137l-332.3-58.2c-445.3-78.8-1281.2-78.8-1702.6,0C2796-2569.2,1734.1-1832.6,1220.2-801.5C983.8-318.5,905,51.5,929,613.3c27.4,640.6,243.2,1192.1,685.1,1740.3c620,770.8,1661.5,1305.2,2822.8,1452.5C4806.9,3854,5553.7,3819.7,5947.6,3740.9z"/><path fill="#205281" d="M2381.5-345.9c-75.4-106.2-68.5-167.8,34.3-322c332.3-500.2,1010.6-928.4,1760.8-1120.2c417.9-106.2,1226.4-106.2,1644.3,0c712.5,181.6,1270.9,517.3,1685.4,1014C7681-561.7,7715.3-424.7,7616-325.4c-89.1,89.1-167.9,65.1-431.7-133.6c-835.8-630.3-2028-856.4-3086.5-585.8C3683.3-938.6,3142-685,2830.3-448.7C2576.8-253.4,2463.7-229.4,2381.5-345.9z"/></g></g><!-- Svg Vector Icons : http://www.onlinewebfonts.com/icon --></svg>'''
100 icon = '''<?xml version="1.0" encoding="utf-8"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="0 0 1000 1000" enable-background="new 0 0 1000 1000" xml:space="preserve"><g><g transform="translate(0.000000,511.000000) scale(0.100000,-0.100000)"><path fill="#205281" d="M4197.1,4662.4c-1661.5-260.4-3018-1171.6-3682.6-2473.3C219.9,1613.6,100,1120.3,100,462.6c0-1014,376.8-1918.4,1127-2699.4C2326.7-3377.6,3878.5-3898.3,5701-3730.5l486.5,44.5l208.9-123.3c637.2-373.4,1551.8-640.6,2240.4-650.9c304.9-6.9,335.7,0,417.9,75.4c185,174.7,147.3,411.1-89.1,548.1c-315.2,181.6-620,544.7-733.1,870.1l-51.4,157.6l472.7,472.7c349.4,349.4,520.7,551.5,657.7,774.2c784.5,1281.2,784.5,2788.5,0,4052.6c-236.4,376.8-794.8,966-1178.4,1236.7c-572.1,407.7-1264.1,709.1-1993.7,870.1c-267.2,58.2-479.6,75.4-1038,82.2C4714.4,4686.4,4310.2,4679.6,4197.1,4662.4z M5947.6,3740.9c1856.7-380.3,3127.6-1709.4,3127.6-3275c0-1000.3-534.4-1949.2-1466.2-2600.1c-188.4-133.6-287.8-226.1-301.5-284.4c-41.1-157.6,263.8-938.6,397.4-1020.8c20.5-10.3,34.3-44.5,34.3-75.4c0-167.8-811.9,195.3-1363.4,609.8l-181.6,137l-332.3-58.2c-445.3-78.8-1281.2-78.8-1702.6,0C2796-2569.2,1734.1-1832.6,1220.2-801.5C983.8-318.5,905,51.5,929,613.3c27.4,640.6,243.2,1192.1,685.1,1740.3c620,770.8,1661.5,1305.2,2822.8,1452.5C4806.9,3854,5553.7,3819.7,5947.6,3740.9z"/><path fill="#205281" d="M2381.5-345.9c-75.4-106.2-68.5-167.8,34.3-322c332.3-500.2,1010.6-928.4,1760.8-1120.2c417.9-106.2,1226.4-106.2,1644.3,0c712.5,181.6,1270.9,517.3,1685.4,1014C7681-561.7,7715.3-424.7,7616-325.4c-89.1,89.1-167.9,65.1-431.7-133.6c-835.8-630.3-2028-856.4-3086.5-585.8C3683.3-938.6,3142-685,2830.3-448.7C2576.8-253.4,2463.7-229.4,2381.5-345.9z"/></g></g><!-- Svg Vector Icons : http://www.onlinewebfonts.com/icon --></svg>'''
103 valid_events = [
101 valid_events = [
104 events.PullRequestCloseEvent,
102 events.PullRequestCloseEvent,
105 events.PullRequestMergeEvent,
103 events.PullRequestMergeEvent,
106 events.PullRequestUpdateEvent,
104 events.PullRequestUpdateEvent,
107 events.PullRequestCommentEvent,
105 events.PullRequestCommentEvent,
108 events.PullRequestReviewEvent,
106 events.PullRequestReviewEvent,
109 events.PullRequestCreateEvent,
107 events.PullRequestCreateEvent,
110 events.RepoPushEvent,
108 events.RepoPushEvent,
111 events.RepoCreateEvent,
109 events.RepoCreateEvent,
112 ]
110 ]
113
111
114 def send_event(self, event):
112 def send_event(self, event):
115 if event.__class__ not in self.valid_events:
113 if event.__class__ not in self.valid_events:
116 log.debug('event not valid: %r' % event)
114 log.debug('event not valid: %r' % event)
117 return
115 return
118
116
119 if event.name not in self.settings['events']:
117 if event.name not in self.settings['events']:
120 log.debug('event ignored: %r' % event)
118 log.debug('event ignored: %r' % event)
121 return
119 return
122
120
123 data = event.as_dict()
121 data = event.as_dict()
124
122
125 text = '<b>%s<b> caused a <b>%s</b> event' % (
123 text = '<b>%s<b> caused a <b>%s</b> event' % (
126 data['actor']['username'], event.name)
124 data['actor']['username'], event.name)
127
125
128 log.debug('handling hipchat event for %s' % event.name)
126 log.debug('handling hipchat event for %s' % event.name)
129
127
130 if isinstance(event, events.PullRequestCommentEvent):
128 if isinstance(event, events.PullRequestCommentEvent):
131 text = self.format_pull_request_comment_event(event, data)
129 text = self.format_pull_request_comment_event(event, data)
132 elif isinstance(event, events.PullRequestReviewEvent):
130 elif isinstance(event, events.PullRequestReviewEvent):
133 text = self.format_pull_request_review_event(event, data)
131 text = self.format_pull_request_review_event(event, data)
134 elif isinstance(event, events.PullRequestEvent):
132 elif isinstance(event, events.PullRequestEvent):
135 text = self.format_pull_request_event(event, data)
133 text = self.format_pull_request_event(event, data)
136 elif isinstance(event, events.RepoPushEvent):
134 elif isinstance(event, events.RepoPushEvent):
137 text = self.format_repo_push_event(data)
135 text = self.format_repo_push_event(data)
138 elif isinstance(event, events.RepoCreateEvent):
136 elif isinstance(event, events.RepoCreateEvent):
139 text = self.format_repo_create_event(data)
137 text = self.format_repo_create_event(data)
140 else:
138 else:
141 log.error('unhandled event type: %r' % event)
139 log.error('unhandled event type: %r' % event)
142
140
143 run_task(post_text_to_hipchat, self.settings, text)
141 run_task(post_text_to_hipchat, self.settings, text)
144
142
145 def settings_schema(self):
143 def settings_schema(self):
146 schema = HipchatSettingsSchema()
144 schema = HipchatSettingsSchema()
147 schema.add(colander.SchemaNode(
145 schema.add(colander.SchemaNode(
148 colander.Set(),
146 colander.Set(),
149 widget=deform.widget.CheckboxChoiceWidget(
147 widget=deform.widget.CheckboxChoiceWidget(
150 values=sorted(
148 values=sorted(
151 [(e.name, e.display_name) for e in self.valid_events]
149 [(e.name, e.display_name) for e in self.valid_events]
152 )
150 )
153 ),
151 ),
154 description="Events activated for this integration",
152 description="Events activated for this integration",
155 name='events'
153 name='events'
156 ))
154 ))
157
155
158 return schema
156 return schema
159
157
160 def format_pull_request_comment_event(self, event, data):
158 def format_pull_request_comment_event(self, event, data):
161 comment_text = data['comment']['text']
159 comment_text = data['comment']['text']
162 if len(comment_text) > 200:
160 if len(comment_text) > 200:
163 comment_text = '{comment_text}<a href="{comment_url}">...<a/>'.format(
161 comment_text = '{comment_text}<a href="{comment_url}">...<a/>'.format(
164 comment_text=h.html_escape(comment_text[:200]),
162 comment_text=h.html_escape(comment_text[:200]),
165 comment_url=data['comment']['url'],
163 comment_url=data['comment']['url'],
166 )
164 )
167
165
168 comment_status = ''
166 comment_status = ''
169 if data['comment']['status']:
167 if data['comment']['status']:
170 comment_status = '[{}]: '.format(data['comment']['status'])
168 comment_status = '[{}]: '.format(data['comment']['status'])
171
169
172 return (textwrap.dedent(
170 return (textwrap.dedent(
173 '''
171 '''
174 {user} commented on pull request <a href="{pr_url}">{number}</a> - {pr_title}:
172 {user} commented on pull request <a href="{pr_url}">{number}</a> - {pr_title}:
175 >>> {comment_status}{comment_text}
173 >>> {comment_status}{comment_text}
176 ''').format(
174 ''').format(
177 comment_status=comment_status,
175 comment_status=comment_status,
178 user=data['actor']['username'],
176 user=data['actor']['username'],
179 number=data['pullrequest']['pull_request_id'],
177 number=data['pullrequest']['pull_request_id'],
180 pr_url=data['pullrequest']['url'],
178 pr_url=data['pullrequest']['url'],
181 pr_status=data['pullrequest']['status'],
179 pr_status=data['pullrequest']['status'],
182 pr_title=h.html_escape(data['pullrequest']['title']),
180 pr_title=h.html_escape(data['pullrequest']['title']),
183 comment_text=h.html_escape(comment_text)
181 comment_text=h.html_escape(comment_text)
184 )
182 )
185 )
183 )
186
184
187 def format_pull_request_review_event(self, event, data):
185 def format_pull_request_review_event(self, event, data):
188 return (textwrap.dedent(
186 return (textwrap.dedent(
189 '''
187 '''
190 Status changed to {pr_status} for pull request <a href="{pr_url}">#{number}</a> - {pr_title}
188 Status changed to {pr_status} for pull request <a href="{pr_url}">#{number}</a> - {pr_title}
191 ''').format(
189 ''').format(
192 user=data['actor']['username'],
190 user=data['actor']['username'],
193 number=data['pullrequest']['pull_request_id'],
191 number=data['pullrequest']['pull_request_id'],
194 pr_url=data['pullrequest']['url'],
192 pr_url=data['pullrequest']['url'],
195 pr_status=data['pullrequest']['status'],
193 pr_status=data['pullrequest']['status'],
196 pr_title=h.html_escape(data['pullrequest']['title']),
194 pr_title=h.html_escape(data['pullrequest']['title']),
197 )
195 )
198 )
196 )
199
197
200 def format_pull_request_event(self, event, data):
198 def format_pull_request_event(self, event, data):
201 action = {
199 action = {
202 events.PullRequestCloseEvent: 'closed',
200 events.PullRequestCloseEvent: 'closed',
203 events.PullRequestMergeEvent: 'merged',
201 events.PullRequestMergeEvent: 'merged',
204 events.PullRequestUpdateEvent: 'updated',
202 events.PullRequestUpdateEvent: 'updated',
205 events.PullRequestCreateEvent: 'created',
203 events.PullRequestCreateEvent: 'created',
206 }.get(event.__class__, str(event.__class__))
204 }.get(event.__class__, str(event.__class__))
207
205
208 return ('Pull request <a href="{url}">#{number}</a> - {title} '
206 return ('Pull request <a href="{url}">#{number}</a> - {title} '
209 '{action} by <b>{user}</b>').format(
207 '{action} by <b>{user}</b>').format(
210 user=data['actor']['username'],
208 user=data['actor']['username'],
211 number=data['pullrequest']['pull_request_id'],
209 number=data['pullrequest']['pull_request_id'],
212 url=data['pullrequest']['url'],
210 url=data['pullrequest']['url'],
213 title=h.html_escape(data['pullrequest']['title']),
211 title=h.html_escape(data['pullrequest']['title']),
214 action=action
212 action=action
215 )
213 )
216
214
217 def format_repo_push_event(self, data):
215 def format_repo_push_event(self, data):
218 branch_data = {branch['name']: branch
216 branch_data = {branch['name']: branch
219 for branch in data['push']['branches']}
217 for branch in data['push']['branches']}
220
218
221 branches_commits = {}
219 branches_commits = {}
222 for commit in data['push']['commits']:
220 for commit in data['push']['commits']:
223 if commit['branch'] not in branches_commits:
221 if commit['branch'] not in branches_commits:
224 branch_commits = {'branch': branch_data[commit['branch']],
222 branch_commits = {'branch': branch_data[commit['branch']],
225 'commits': []}
223 'commits': []}
226 branches_commits[commit['branch']] = branch_commits
224 branches_commits[commit['branch']] = branch_commits
227
225
228 branch_commits = branches_commits[commit['branch']]
226 branch_commits = branches_commits[commit['branch']]
229 branch_commits['commits'].append(commit)
227 branch_commits['commits'].append(commit)
230
228
231 result = repo_push_template.render(
229 result = repo_push_template.render(
232 data=data,
230 data=data,
233 branches_commits=branches_commits,
231 branches_commits=branches_commits,
234 )
232 )
235 return result
233 return result
236
234
237 def format_repo_create_event(self, data):
235 def format_repo_create_event(self, data):
238 return '<a href="{}">{}</a> ({}) repository created by <b>{}</b>'.format(
236 return '<a href="{}">{}</a> ({}) repository created by <b>{}</b>'.format(
239 data['repo']['url'],
237 data['repo']['url'],
240 h.html_escape(data['repo']['repo_name']),
238 h.html_escape(data['repo']['repo_name']),
241 data['repo']['repo_type'],
239 data['repo']['repo_type'],
242 data['actor']['username'],
240 data['actor']['username'],
243 )
241 )
244
242
245
243
246 @task(ignore_result=True)
244 @async_task(ignore_result=True, base=RequestContextTask)
247 def post_text_to_hipchat(settings, text):
245 def post_text_to_hipchat(settings, text):
248 log.debug('sending %s to hipchat %s' % (text, settings['server_url']))
246 log.debug('sending %s to hipchat %s' % (text, settings['server_url']))
249 resp = requests.post(settings['server_url'], json={
247 resp = requests.post(settings['server_url'], json={
250 "message": text,
248 "message": text,
251 "color": settings.get('color', 'yellow'),
249 "color": settings.get('color', 'yellow'),
252 "notify": settings.get('notify', False),
250 "notify": settings.get('notify', False),
253 })
251 })
254 resp.raise_for_status() # raise exception on a failed request
252 resp.raise_for_status() # raise exception on a failed request
@@ -1,334 +1,333 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2017 RhodeCode GmbH
3 # Copyright (C) 2012-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 from __future__ import unicode_literals
21 from __future__ import unicode_literals
22 import re
22 import re
23 import time
23 import time
24 import textwrap
24 import textwrap
25 import logging
25 import logging
26
26
27 import deform
27 import deform
28 import requests
28 import requests
29 import colander
29 import colander
30 from celery.task import task
31 from mako.template import Template
30 from mako.template import Template
32
31
33 from rhodecode import events
32 from rhodecode import events
34 from rhodecode.translation import _
33 from rhodecode.translation import _
35 from rhodecode.lib import helpers as h
34 from rhodecode.lib import helpers as h
36 from rhodecode.lib.celerylib import run_task
35 from rhodecode.lib.celerylib import run_task, async_task, RequestContextTask
37 from rhodecode.lib.colander_utils import strip_whitespace
36 from rhodecode.lib.colander_utils import strip_whitespace
38 from rhodecode.integrations.types.base import IntegrationTypeBase
37 from rhodecode.integrations.types.base import IntegrationTypeBase
39
38
40 log = logging.getLogger(__name__)
39 log = logging.getLogger(__name__)
41
40
42
41
43 class SlackSettingsSchema(colander.Schema):
42 class SlackSettingsSchema(colander.Schema):
44 service = colander.SchemaNode(
43 service = colander.SchemaNode(
45 colander.String(),
44 colander.String(),
46 title=_('Slack service URL'),
45 title=_('Slack service URL'),
47 description=h.literal(_(
46 description=h.literal(_(
48 'This can be setup at the '
47 'This can be setup at the '
49 '<a href="https://my.slack.com/services/new/incoming-webhook/">'
48 '<a href="https://my.slack.com/services/new/incoming-webhook/">'
50 'slack app manager</a>')),
49 'slack app manager</a>')),
51 default='',
50 default='',
52 preparer=strip_whitespace,
51 preparer=strip_whitespace,
53 validator=colander.url,
52 validator=colander.url,
54 widget=deform.widget.TextInputWidget(
53 widget=deform.widget.TextInputWidget(
55 placeholder='https://hooks.slack.com/services/...',
54 placeholder='https://hooks.slack.com/services/...',
56 ),
55 ),
57 )
56 )
58 username = colander.SchemaNode(
57 username = colander.SchemaNode(
59 colander.String(),
58 colander.String(),
60 title=_('Username'),
59 title=_('Username'),
61 description=_('Username to show notifications coming from.'),
60 description=_('Username to show notifications coming from.'),
62 missing='Rhodecode',
61 missing='Rhodecode',
63 preparer=strip_whitespace,
62 preparer=strip_whitespace,
64 widget=deform.widget.TextInputWidget(
63 widget=deform.widget.TextInputWidget(
65 placeholder='Rhodecode'
64 placeholder='Rhodecode'
66 ),
65 ),
67 )
66 )
68 channel = colander.SchemaNode(
67 channel = colander.SchemaNode(
69 colander.String(),
68 colander.String(),
70 title=_('Channel'),
69 title=_('Channel'),
71 description=_('Channel to send notifications to.'),
70 description=_('Channel to send notifications to.'),
72 missing='',
71 missing='',
73 preparer=strip_whitespace,
72 preparer=strip_whitespace,
74 widget=deform.widget.TextInputWidget(
73 widget=deform.widget.TextInputWidget(
75 placeholder='#general'
74 placeholder='#general'
76 ),
75 ),
77 )
76 )
78 icon_emoji = colander.SchemaNode(
77 icon_emoji = colander.SchemaNode(
79 colander.String(),
78 colander.String(),
80 title=_('Emoji'),
79 title=_('Emoji'),
81 description=_('Emoji to use eg. :studio_microphone:'),
80 description=_('Emoji to use eg. :studio_microphone:'),
82 missing='',
81 missing='',
83 preparer=strip_whitespace,
82 preparer=strip_whitespace,
84 widget=deform.widget.TextInputWidget(
83 widget=deform.widget.TextInputWidget(
85 placeholder=':studio_microphone:'
84 placeholder=':studio_microphone:'
86 ),
85 ),
87 )
86 )
88
87
89
88
90 class SlackIntegrationType(IntegrationTypeBase):
89 class SlackIntegrationType(IntegrationTypeBase):
91 key = 'slack'
90 key = 'slack'
92 display_name = _('Slack')
91 display_name = _('Slack')
93 description = _('Send events such as repo pushes and pull requests to '
92 description = _('Send events such as repo pushes and pull requests to '
94 'your slack channel.')
93 'your slack channel.')
95 icon = '''<?xml version="1.0" encoding="UTF-8" standalone="no"?><svg viewBox="0 0 256 256" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid"><g><path d="M165.963541,15.8384262 C162.07318,3.86308197 149.212328,-2.69009836 137.239082,1.20236066 C125.263738,5.09272131 118.710557,17.9535738 122.603016,29.9268197 L181.550164,211.292328 C185.597902,222.478689 197.682361,228.765377 209.282098,225.426885 C221.381246,221.943607 228.756984,209.093246 224.896,197.21023 C224.749115,196.756984 165.963541,15.8384262 165.963541,15.8384262" fill="#DFA22F"></path><path d="M74.6260984,45.515541 C70.7336393,33.5422951 57.8727869,26.9891148 45.899541,30.8794754 C33.9241967,34.7698361 27.3710164,47.6306885 31.2634754,59.6060328 L90.210623,240.971541 C94.2583607,252.157902 106.34282,258.44459 117.942557,255.104 C130.041705,251.62282 137.417443,238.772459 133.556459,226.887344 C133.409574,226.436197 74.6260984,45.515541 74.6260984,45.515541" fill="#3CB187"></path><path d="M240.161574,166.045377 C252.136918,162.155016 258.688,149.294164 254.797639,137.31882 C250.907279,125.345574 238.046426,118.792393 226.07318,122.682754 L44.7076721,181.632 C33.5213115,185.677639 27.234623,197.762098 30.5731148,209.361836 C34.0563934,221.460984 46.9067541,228.836721 58.7897705,224.975738 C59.2430164,224.828852 240.161574,166.045377 240.161574,166.045377" fill="#CE1E5B"></path><path d="M82.507541,217.270557 C94.312918,213.434754 109.528131,208.491016 125.855475,203.186361 C122.019672,191.380984 117.075934,176.163672 111.76918,159.83423 L68.4191475,173.924721 L82.507541,217.270557" fill="#392538"></path><path d="M173.847082,187.591344 C190.235279,182.267803 205.467279,177.31777 217.195016,173.507148 C213.359213,161.70177 208.413377,146.480262 203.106623,130.146623 L159.75659,144.237115 L173.847082,187.591344" fill="#BB242A"></path><path d="M210.484459,74.7058361 C222.457705,70.8154754 229.010885,57.954623 225.120525,45.9792787 C221.230164,34.0060328 208.369311,27.4528525 196.393967,31.3432131 L15.028459,90.292459 C3.84209836,94.3380984 -2.44459016,106.422557 0.896,118.022295 C4.37718033,130.121443 17.227541,137.49718 29.1126557,133.636197 C29.5638033,133.489311 210.484459,74.7058361 210.484459,74.7058361" fill="#72C5CD"></path><path d="M52.8220328,125.933115 C64.6274098,122.097311 79.8468197,117.151475 96.1762623,111.84682 C90.8527213,95.4565246 85.9026885,80.2245246 82.0920656,68.4946885 L38.731541,82.5872787 L52.8220328,125.933115" fill="#248C73"></path><path d="M144.159475,96.256 C160.551869,90.9303607 175.785967,85.9803279 187.515803,82.1676066 C182.190164,65.7752131 177.240131,50.5390164 173.42741,38.807082 L130.068984,52.8996721 L144.159475,96.256" fill="#62803A"></path></g></svg>'''
94 icon = '''<?xml version="1.0" encoding="UTF-8" standalone="no"?><svg viewBox="0 0 256 256" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid"><g><path d="M165.963541,15.8384262 C162.07318,3.86308197 149.212328,-2.69009836 137.239082,1.20236066 C125.263738,5.09272131 118.710557,17.9535738 122.603016,29.9268197 L181.550164,211.292328 C185.597902,222.478689 197.682361,228.765377 209.282098,225.426885 C221.381246,221.943607 228.756984,209.093246 224.896,197.21023 C224.749115,196.756984 165.963541,15.8384262 165.963541,15.8384262" fill="#DFA22F"></path><path d="M74.6260984,45.515541 C70.7336393,33.5422951 57.8727869,26.9891148 45.899541,30.8794754 C33.9241967,34.7698361 27.3710164,47.6306885 31.2634754,59.6060328 L90.210623,240.971541 C94.2583607,252.157902 106.34282,258.44459 117.942557,255.104 C130.041705,251.62282 137.417443,238.772459 133.556459,226.887344 C133.409574,226.436197 74.6260984,45.515541 74.6260984,45.515541" fill="#3CB187"></path><path d="M240.161574,166.045377 C252.136918,162.155016 258.688,149.294164 254.797639,137.31882 C250.907279,125.345574 238.046426,118.792393 226.07318,122.682754 L44.7076721,181.632 C33.5213115,185.677639 27.234623,197.762098 30.5731148,209.361836 C34.0563934,221.460984 46.9067541,228.836721 58.7897705,224.975738 C59.2430164,224.828852 240.161574,166.045377 240.161574,166.045377" fill="#CE1E5B"></path><path d="M82.507541,217.270557 C94.312918,213.434754 109.528131,208.491016 125.855475,203.186361 C122.019672,191.380984 117.075934,176.163672 111.76918,159.83423 L68.4191475,173.924721 L82.507541,217.270557" fill="#392538"></path><path d="M173.847082,187.591344 C190.235279,182.267803 205.467279,177.31777 217.195016,173.507148 C213.359213,161.70177 208.413377,146.480262 203.106623,130.146623 L159.75659,144.237115 L173.847082,187.591344" fill="#BB242A"></path><path d="M210.484459,74.7058361 C222.457705,70.8154754 229.010885,57.954623 225.120525,45.9792787 C221.230164,34.0060328 208.369311,27.4528525 196.393967,31.3432131 L15.028459,90.292459 C3.84209836,94.3380984 -2.44459016,106.422557 0.896,118.022295 C4.37718033,130.121443 17.227541,137.49718 29.1126557,133.636197 C29.5638033,133.489311 210.484459,74.7058361 210.484459,74.7058361" fill="#72C5CD"></path><path d="M52.8220328,125.933115 C64.6274098,122.097311 79.8468197,117.151475 96.1762623,111.84682 C90.8527213,95.4565246 85.9026885,80.2245246 82.0920656,68.4946885 L38.731541,82.5872787 L52.8220328,125.933115" fill="#248C73"></path><path d="M144.159475,96.256 C160.551869,90.9303607 175.785967,85.9803279 187.515803,82.1676066 C182.190164,65.7752131 177.240131,50.5390164 173.42741,38.807082 L130.068984,52.8996721 L144.159475,96.256" fill="#62803A"></path></g></svg>'''
96 valid_events = [
95 valid_events = [
97 events.PullRequestCloseEvent,
96 events.PullRequestCloseEvent,
98 events.PullRequestMergeEvent,
97 events.PullRequestMergeEvent,
99 events.PullRequestUpdateEvent,
98 events.PullRequestUpdateEvent,
100 events.PullRequestCommentEvent,
99 events.PullRequestCommentEvent,
101 events.PullRequestReviewEvent,
100 events.PullRequestReviewEvent,
102 events.PullRequestCreateEvent,
101 events.PullRequestCreateEvent,
103 events.RepoPushEvent,
102 events.RepoPushEvent,
104 events.RepoCreateEvent,
103 events.RepoCreateEvent,
105 ]
104 ]
106
105
107 def send_event(self, event):
106 def send_event(self, event):
108 if event.__class__ not in self.valid_events:
107 if event.__class__ not in self.valid_events:
109 log.debug('event not valid: %r' % event)
108 log.debug('event not valid: %r' % event)
110 return
109 return
111
110
112 if event.name not in self.settings['events']:
111 if event.name not in self.settings['events']:
113 log.debug('event ignored: %r' % event)
112 log.debug('event ignored: %r' % event)
114 return
113 return
115
114
116 data = event.as_dict()
115 data = event.as_dict()
117
116
118 # defaults
117 # defaults
119 title = '*%s* caused a *%s* event' % (
118 title = '*%s* caused a *%s* event' % (
120 data['actor']['username'], event.name)
119 data['actor']['username'], event.name)
121 text = '*%s* caused a *%s* event' % (
120 text = '*%s* caused a *%s* event' % (
122 data['actor']['username'], event.name)
121 data['actor']['username'], event.name)
123 fields = None
122 fields = None
124 overrides = None
123 overrides = None
125
124
126 log.debug('handling slack event for %s' % event.name)
125 log.debug('handling slack event for %s' % event.name)
127
126
128 if isinstance(event, events.PullRequestCommentEvent):
127 if isinstance(event, events.PullRequestCommentEvent):
129 (title, text, fields, overrides) \
128 (title, text, fields, overrides) \
130 = self.format_pull_request_comment_event(event, data)
129 = self.format_pull_request_comment_event(event, data)
131 elif isinstance(event, events.PullRequestReviewEvent):
130 elif isinstance(event, events.PullRequestReviewEvent):
132 title, text = self.format_pull_request_review_event(event, data)
131 title, text = self.format_pull_request_review_event(event, data)
133 elif isinstance(event, events.PullRequestEvent):
132 elif isinstance(event, events.PullRequestEvent):
134 title, text = self.format_pull_request_event(event, data)
133 title, text = self.format_pull_request_event(event, data)
135 elif isinstance(event, events.RepoPushEvent):
134 elif isinstance(event, events.RepoPushEvent):
136 title, text = self.format_repo_push_event(data)
135 title, text = self.format_repo_push_event(data)
137 elif isinstance(event, events.RepoCreateEvent):
136 elif isinstance(event, events.RepoCreateEvent):
138 title, text = self.format_repo_create_event(data)
137 title, text = self.format_repo_create_event(data)
139 else:
138 else:
140 log.error('unhandled event type: %r' % event)
139 log.error('unhandled event type: %r' % event)
141
140
142 run_task(post_text_to_slack, self.settings, title, text, fields, overrides)
141 run_task(post_text_to_slack, self.settings, title, text, fields, overrides)
143
142
144 def settings_schema(self):
143 def settings_schema(self):
145 schema = SlackSettingsSchema()
144 schema = SlackSettingsSchema()
146 schema.add(colander.SchemaNode(
145 schema.add(colander.SchemaNode(
147 colander.Set(),
146 colander.Set(),
148 widget=deform.widget.CheckboxChoiceWidget(
147 widget=deform.widget.CheckboxChoiceWidget(
149 values=sorted(
148 values=sorted(
150 [(e.name, e.display_name) for e in self.valid_events]
149 [(e.name, e.display_name) for e in self.valid_events]
151 )
150 )
152 ),
151 ),
153 description="Events activated for this integration",
152 description="Events activated for this integration",
154 name='events'
153 name='events'
155 ))
154 ))
156
155
157 return schema
156 return schema
158
157
159 def format_pull_request_comment_event(self, event, data):
158 def format_pull_request_comment_event(self, event, data):
160 comment_text = data['comment']['text']
159 comment_text = data['comment']['text']
161 if len(comment_text) > 200:
160 if len(comment_text) > 200:
162 comment_text = '<{comment_url}|{comment_text}...>'.format(
161 comment_text = '<{comment_url}|{comment_text}...>'.format(
163 comment_text=comment_text[:200],
162 comment_text=comment_text[:200],
164 comment_url=data['comment']['url'],
163 comment_url=data['comment']['url'],
165 )
164 )
166
165
167 fields = None
166 fields = None
168 overrides = None
167 overrides = None
169 status_text = None
168 status_text = None
170
169
171 if data['comment']['status']:
170 if data['comment']['status']:
172 status_color = {
171 status_color = {
173 'approved': '#0ac878',
172 'approved': '#0ac878',
174 'rejected': '#e85e4d'}.get(data['comment']['status'])
173 'rejected': '#e85e4d'}.get(data['comment']['status'])
175
174
176 if status_color:
175 if status_color:
177 overrides = {"color": status_color}
176 overrides = {"color": status_color}
178
177
179 status_text = data['comment']['status']
178 status_text = data['comment']['status']
180
179
181 if data['comment']['file']:
180 if data['comment']['file']:
182 fields = [
181 fields = [
183 {
182 {
184 "title": "file",
183 "title": "file",
185 "value": data['comment']['file']
184 "value": data['comment']['file']
186 },
185 },
187 {
186 {
188 "title": "line",
187 "title": "line",
189 "value": data['comment']['line']
188 "value": data['comment']['line']
190 }
189 }
191 ]
190 ]
192
191
193 title = Template(textwrap.dedent(r'''
192 title = Template(textwrap.dedent(r'''
194 *${data['actor']['username']}* left ${data['comment']['type']} on pull request <${data['pullrequest']['url']}|#${data['pullrequest']['pull_request_id']}>:
193 *${data['actor']['username']}* left ${data['comment']['type']} on pull request <${data['pullrequest']['url']}|#${data['pullrequest']['pull_request_id']}>:
195 ''')).render(data=data, comment=event.comment)
194 ''')).render(data=data, comment=event.comment)
196
195
197 text = Template(textwrap.dedent(r'''
196 text = Template(textwrap.dedent(r'''
198 *pull request title*: ${pr_title}
197 *pull request title*: ${pr_title}
199 % if status_text:
198 % if status_text:
200 *submitted status*: `${status_text}`
199 *submitted status*: `${status_text}`
201 % endif
200 % endif
202 >>> ${comment_text}
201 >>> ${comment_text}
203 ''')).render(comment_text=comment_text,
202 ''')).render(comment_text=comment_text,
204 pr_title=data['pullrequest']['title'],
203 pr_title=data['pullrequest']['title'],
205 status_text=status_text)
204 status_text=status_text)
206
205
207 return title, text, fields, overrides
206 return title, text, fields, overrides
208
207
209 def format_pull_request_review_event(self, event, data):
208 def format_pull_request_review_event(self, event, data):
210 title = Template(textwrap.dedent(r'''
209 title = Template(textwrap.dedent(r'''
211 *${data['actor']['username']}* changed status of pull request <${data['pullrequest']['url']}|#${data['pullrequest']['pull_request_id']} to `${data['pullrequest']['status']}`>:
210 *${data['actor']['username']}* changed status of pull request <${data['pullrequest']['url']}|#${data['pullrequest']['pull_request_id']} to `${data['pullrequest']['status']}`>:
212 ''')).render(data=data)
211 ''')).render(data=data)
213
212
214 text = Template(textwrap.dedent(r'''
213 text = Template(textwrap.dedent(r'''
215 *pull request title*: ${pr_title}
214 *pull request title*: ${pr_title}
216 ''')).render(
215 ''')).render(
217 pr_title=data['pullrequest']['title'],
216 pr_title=data['pullrequest']['title'],
218 )
217 )
219
218
220 return title, text
219 return title, text
221
220
222 def format_pull_request_event(self, event, data):
221 def format_pull_request_event(self, event, data):
223 action = {
222 action = {
224 events.PullRequestCloseEvent: 'closed',
223 events.PullRequestCloseEvent: 'closed',
225 events.PullRequestMergeEvent: 'merged',
224 events.PullRequestMergeEvent: 'merged',
226 events.PullRequestUpdateEvent: 'updated',
225 events.PullRequestUpdateEvent: 'updated',
227 events.PullRequestCreateEvent: 'created',
226 events.PullRequestCreateEvent: 'created',
228 }.get(event.__class__, str(event.__class__))
227 }.get(event.__class__, str(event.__class__))
229
228
230 title = Template(textwrap.dedent(r'''
229 title = Template(textwrap.dedent(r'''
231 *${data['actor']['username']}* `${action}` pull request <${data['pullrequest']['url']}|#${data['pullrequest']['pull_request_id']}>:
230 *${data['actor']['username']}* `${action}` pull request <${data['pullrequest']['url']}|#${data['pullrequest']['pull_request_id']}>:
232 ''')).render(data=data, action=action)
231 ''')).render(data=data, action=action)
233
232
234 text = Template(textwrap.dedent(r'''
233 text = Template(textwrap.dedent(r'''
235 *pull request title*: ${pr_title}
234 *pull request title*: ${pr_title}
236 %if data['pullrequest']['commits']:
235 %if data['pullrequest']['commits']:
237 *commits*: ${len(data['pullrequest']['commits'])}
236 *commits*: ${len(data['pullrequest']['commits'])}
238 %endif
237 %endif
239 ''')).render(
238 ''')).render(
240 pr_title=data['pullrequest']['title'],
239 pr_title=data['pullrequest']['title'],
241 data=data
240 data=data
242 )
241 )
243
242
244 return title, text
243 return title, text
245
244
246 def format_repo_push_event(self, data):
245 def format_repo_push_event(self, data):
247 branch_data = {branch['name']: branch
246 branch_data = {branch['name']: branch
248 for branch in data['push']['branches']}
247 for branch in data['push']['branches']}
249
248
250 branches_commits = {}
249 branches_commits = {}
251 for commit in data['push']['commits']:
250 for commit in data['push']['commits']:
252 if commit['branch'] not in branches_commits:
251 if commit['branch'] not in branches_commits:
253 branch_commits = {'branch': branch_data[commit['branch']],
252 branch_commits = {'branch': branch_data[commit['branch']],
254 'commits': []}
253 'commits': []}
255 branches_commits[commit['branch']] = branch_commits
254 branches_commits[commit['branch']] = branch_commits
256
255
257 branch_commits = branches_commits[commit['branch']]
256 branch_commits = branches_commits[commit['branch']]
258 branch_commits['commits'].append(commit)
257 branch_commits['commits'].append(commit)
259
258
260 title = Template(r'''
259 title = Template(r'''
261 *${data['actor']['username']}* pushed to repo <${data['repo']['url']}|${data['repo']['repo_name']}>:
260 *${data['actor']['username']}* pushed to repo <${data['repo']['url']}|${data['repo']['repo_name']}>:
262 ''').render(data=data)
261 ''').render(data=data)
263
262
264 repo_push_template = Template(textwrap.dedent(r'''
263 repo_push_template = Template(textwrap.dedent(r'''
265 %for branch, branch_commits in branches_commits.items():
264 %for branch, branch_commits in branches_commits.items():
266 ${len(branch_commits['commits'])} ${'commit' if len(branch_commits['commits']) == 1 else 'commits'} on branch: <${branch_commits['branch']['url']}|${branch_commits['branch']['name']}>
265 ${len(branch_commits['commits'])} ${'commit' if len(branch_commits['commits']) == 1 else 'commits'} on branch: <${branch_commits['branch']['url']}|${branch_commits['branch']['name']}>
267 %for commit in branch_commits['commits']:
266 %for commit in branch_commits['commits']:
268 `<${commit['url']}|${commit['short_id']}>` - ${commit['message_html']|html_to_slack_links}
267 `<${commit['url']}|${commit['short_id']}>` - ${commit['message_html']|html_to_slack_links}
269 %endfor
268 %endfor
270 %endfor
269 %endfor
271 '''))
270 '''))
272
271
273 text = repo_push_template.render(
272 text = repo_push_template.render(
274 data=data,
273 data=data,
275 branches_commits=branches_commits,
274 branches_commits=branches_commits,
276 html_to_slack_links=html_to_slack_links,
275 html_to_slack_links=html_to_slack_links,
277 )
276 )
278
277
279 return title, text
278 return title, text
280
279
281 def format_repo_create_event(self, data):
280 def format_repo_create_event(self, data):
282 title = Template(r'''
281 title = Template(r'''
283 *${data['actor']['username']}* created new repository ${data['repo']['repo_name']}:
282 *${data['actor']['username']}* created new repository ${data['repo']['repo_name']}:
284 ''').render(data=data)
283 ''').render(data=data)
285
284
286 text = Template(textwrap.dedent(r'''
285 text = Template(textwrap.dedent(r'''
287 repo_url: ${data['repo']['url']}
286 repo_url: ${data['repo']['url']}
288 repo_type: ${data['repo']['repo_type']}
287 repo_type: ${data['repo']['repo_type']}
289 ''')).render(data=data)
288 ''')).render(data=data)
290
289
291 return title, text
290 return title, text
292
291
293
292
294 def html_to_slack_links(message):
293 def html_to_slack_links(message):
295 return re.compile(r'<a .*?href=["\'](.+?)".*?>(.+?)</a>').sub(
294 return re.compile(r'<a .*?href=["\'](.+?)".*?>(.+?)</a>').sub(
296 r'<\1|\2>', message)
295 r'<\1|\2>', message)
297
296
298
297
299 @task(ignore_result=True)
298 @async_task(ignore_result=True, base=RequestContextTask)
300 def post_text_to_slack(settings, title, text, fields=None, overrides=None):
299 def post_text_to_slack(settings, title, text, fields=None, overrides=None):
301 log.debug('sending %s (%s) to slack %s' % (
300 log.debug('sending %s (%s) to slack %s' % (
302 title, text, settings['service']))
301 title, text, settings['service']))
303
302
304 fields = fields or []
303 fields = fields or []
305 overrides = overrides or {}
304 overrides = overrides or {}
306
305
307 message_data = {
306 message_data = {
308 "fallback": text,
307 "fallback": text,
309 "color": "#427cc9",
308 "color": "#427cc9",
310 "pretext": title,
309 "pretext": title,
311 #"author_name": "Bobby Tables",
310 #"author_name": "Bobby Tables",
312 #"author_link": "http://flickr.com/bobby/",
311 #"author_link": "http://flickr.com/bobby/",
313 #"author_icon": "http://flickr.com/icons/bobby.jpg",
312 #"author_icon": "http://flickr.com/icons/bobby.jpg",
314 #"title": "Slack API Documentation",
313 #"title": "Slack API Documentation",
315 #"title_link": "https://api.slack.com/",
314 #"title_link": "https://api.slack.com/",
316 "text": text,
315 "text": text,
317 "fields": fields,
316 "fields": fields,
318 #"image_url": "http://my-website.com/path/to/image.jpg",
317 #"image_url": "http://my-website.com/path/to/image.jpg",
319 #"thumb_url": "http://example.com/path/to/thumb.png",
318 #"thumb_url": "http://example.com/path/to/thumb.png",
320 "footer": "RhodeCode",
319 "footer": "RhodeCode",
321 #"footer_icon": "",
320 #"footer_icon": "",
322 "ts": time.time(),
321 "ts": time.time(),
323 "mrkdwn_in": ["pretext", "text"]
322 "mrkdwn_in": ["pretext", "text"]
324 }
323 }
325 message_data.update(overrides)
324 message_data.update(overrides)
326 json_message = {
325 json_message = {
327 "icon_emoji": settings.get('icon_emoji', ':studio_microphone:'),
326 "icon_emoji": settings.get('icon_emoji', ':studio_microphone:'),
328 "channel": settings.get('channel', ''),
327 "channel": settings.get('channel', ''),
329 "username": settings.get('username', 'Rhodecode'),
328 "username": settings.get('username', 'Rhodecode'),
330 "attachments": [message_data]
329 "attachments": [message_data]
331 }
330 }
332
331
333 resp = requests.post(settings['service'], json=json_message)
332 resp = requests.post(settings['service'], json=json_message)
334 resp.raise_for_status() # raise exception on a failed request
333 resp.raise_for_status() # raise exception on a failed request
@@ -1,350 +1,351 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2017 RhodeCode GmbH
3 # Copyright (C) 2012-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 from __future__ import unicode_literals
21 from __future__ import unicode_literals
22 import string
22 import string
23 from collections import OrderedDict
23 from collections import OrderedDict
24
24
25 import deform
25 import deform
26 import deform.widget
26 import deform.widget
27 import logging
27 import logging
28 import requests
28 import requests
29 import requests.adapters
29 import requests.adapters
30 import colander
30 import colander
31 from celery.task import task
32 from requests.packages.urllib3.util.retry import Retry
31 from requests.packages.urllib3.util.retry import Retry
33
32
34 import rhodecode
33 import rhodecode
35 from rhodecode import events
34 from rhodecode import events
36 from rhodecode.translation import _
35 from rhodecode.translation import _
37 from rhodecode.integrations.types.base import IntegrationTypeBase
36 from rhodecode.integrations.types.base import IntegrationTypeBase
37 from rhodecode.lib.celerylib import async_task, RequestContextTask
38
38
39 log = logging.getLogger(__name__)
39 log = logging.getLogger(__name__)
40
40
41
41 # updating this required to update the `common_vars` passed in url calling func
42 # updating this required to update the `common_vars` passed in url calling func
42 WEBHOOK_URL_VARS = [
43 WEBHOOK_URL_VARS = [
43 'repo_name',
44 'repo_name',
44 'repo_type',
45 'repo_type',
45 'repo_id',
46 'repo_id',
46 'repo_url',
47 'repo_url',
47 # extra repo fields
48 # extra repo fields
48 'extra:<extra_key_name>',
49 'extra:<extra_key_name>',
49
50
50 # special attrs below that we handle, using multi-call
51 # special attrs below that we handle, using multi-call
51 'branch',
52 'branch',
52 'commit_id',
53 'commit_id',
53
54
54 # pr events vars
55 # pr events vars
55 'pull_request_id',
56 'pull_request_id',
56 'pull_request_url',
57 'pull_request_url',
57
58
58 # user who triggers the call
59 # user who triggers the call
59 'username',
60 'username',
60 'user_id',
61 'user_id',
61
62
62 ]
63 ]
63 URL_VARS = ', '.join('${' + x + '}' for x in WEBHOOK_URL_VARS)
64 URL_VARS = ', '.join('${' + x + '}' for x in WEBHOOK_URL_VARS)
64
65
65
66
66 def get_auth(settings):
67 def get_auth(settings):
67 from requests.auth import HTTPBasicAuth
68 from requests.auth import HTTPBasicAuth
68 username = settings.get('username')
69 username = settings.get('username')
69 password = settings.get('password')
70 password = settings.get('password')
70 if username and password:
71 if username and password:
71 return HTTPBasicAuth(username, password)
72 return HTTPBasicAuth(username, password)
72 return None
73 return None
73
74
74
75
75 class WebhookHandler(object):
76 class WebhookHandler(object):
76 def __init__(self, template_url, secret_token, headers):
77 def __init__(self, template_url, secret_token, headers):
77 self.template_url = template_url
78 self.template_url = template_url
78 self.secret_token = secret_token
79 self.secret_token = secret_token
79 self.headers = headers
80 self.headers = headers
80
81
81 def get_base_parsed_template(self, data):
82 def get_base_parsed_template(self, data):
82 """
83 """
83 initially parses the passed in template with some common variables
84 initially parses the passed in template with some common variables
84 available on ALL calls
85 available on ALL calls
85 """
86 """
86 # note: make sure to update the `WEBHOOK_URL_VARS` if this changes
87 # note: make sure to update the `WEBHOOK_URL_VARS` if this changes
87 common_vars = {
88 common_vars = {
88 'repo_name': data['repo']['repo_name'],
89 'repo_name': data['repo']['repo_name'],
89 'repo_type': data['repo']['repo_type'],
90 'repo_type': data['repo']['repo_type'],
90 'repo_id': data['repo']['repo_id'],
91 'repo_id': data['repo']['repo_id'],
91 'repo_url': data['repo']['url'],
92 'repo_url': data['repo']['url'],
92 'username': data['actor']['username'],
93 'username': data['actor']['username'],
93 'user_id': data['actor']['user_id']
94 'user_id': data['actor']['user_id']
94 }
95 }
95 extra_vars = {}
96 extra_vars = {}
96 for extra_key, extra_val in data['repo']['extra_fields'].items():
97 for extra_key, extra_val in data['repo']['extra_fields'].items():
97 extra_vars['extra:{}'.format(extra_key)] = extra_val
98 extra_vars['extra:{}'.format(extra_key)] = extra_val
98 common_vars.update(extra_vars)
99 common_vars.update(extra_vars)
99
100
100 return string.Template(
101 return string.Template(
101 self.template_url).safe_substitute(**common_vars)
102 self.template_url).safe_substitute(**common_vars)
102
103
103 def repo_push_event_handler(self, event, data):
104 def repo_push_event_handler(self, event, data):
104 url = self.get_base_parsed_template(data)
105 url = self.get_base_parsed_template(data)
105 url_cals = []
106 url_cals = []
106 branch_data = OrderedDict()
107 branch_data = OrderedDict()
107 for obj in data['push']['branches']:
108 for obj in data['push']['branches']:
108 branch_data[obj['name']] = obj
109 branch_data[obj['name']] = obj
109
110
110 branches_commits = OrderedDict()
111 branches_commits = OrderedDict()
111 for commit in data['push']['commits']:
112 for commit in data['push']['commits']:
112 if commit['branch'] not in branches_commits:
113 if commit['branch'] not in branches_commits:
113 branch_commits = {'branch': branch_data[commit['branch']],
114 branch_commits = {'branch': branch_data[commit['branch']],
114 'commits': []}
115 'commits': []}
115 branches_commits[commit['branch']] = branch_commits
116 branches_commits[commit['branch']] = branch_commits
116
117
117 branch_commits = branches_commits[commit['branch']]
118 branch_commits = branches_commits[commit['branch']]
118 branch_commits['commits'].append(commit)
119 branch_commits['commits'].append(commit)
119
120
120 if '${branch}' in url:
121 if '${branch}' in url:
121 # call it multiple times, for each branch if used in variables
122 # call it multiple times, for each branch if used in variables
122 for branch, commit_ids in branches_commits.items():
123 for branch, commit_ids in branches_commits.items():
123 branch_url = string.Template(url).safe_substitute(branch=branch)
124 branch_url = string.Template(url).safe_substitute(branch=branch)
124 # call further down for each commit if used
125 # call further down for each commit if used
125 if '${commit_id}' in branch_url:
126 if '${commit_id}' in branch_url:
126 for commit_data in commit_ids['commits']:
127 for commit_data in commit_ids['commits']:
127 commit_id = commit_data['raw_id']
128 commit_id = commit_data['raw_id']
128 commit_url = string.Template(branch_url).safe_substitute(
129 commit_url = string.Template(branch_url).safe_substitute(
129 commit_id=commit_id)
130 commit_id=commit_id)
130 # register per-commit call
131 # register per-commit call
131 log.debug(
132 log.debug(
132 'register webhook call(%s) to url %s', event, commit_url)
133 'register webhook call(%s) to url %s', event, commit_url)
133 url_cals.append((commit_url, self.secret_token, self.headers, data))
134 url_cals.append((commit_url, self.secret_token, self.headers, data))
134
135
135 else:
136 else:
136 # register per-branch call
137 # register per-branch call
137 log.debug(
138 log.debug(
138 'register webhook call(%s) to url %s', event, branch_url)
139 'register webhook call(%s) to url %s', event, branch_url)
139 url_cals.append((branch_url, self.secret_token, self.headers, data))
140 url_cals.append((branch_url, self.secret_token, self.headers, data))
140
141
141 else:
142 else:
142 log.debug(
143 log.debug(
143 'register webhook call(%s) to url %s', event, url)
144 'register webhook call(%s) to url %s', event, url)
144 url_cals.append((url, self.secret_token, self.headers, data))
145 url_cals.append((url, self.secret_token, self.headers, data))
145
146
146 return url_cals
147 return url_cals
147
148
148 def repo_create_event_handler(self, event, data):
149 def repo_create_event_handler(self, event, data):
149 url = self.get_base_parsed_template(data)
150 url = self.get_base_parsed_template(data)
150 log.debug(
151 log.debug(
151 'register webhook call(%s) to url %s', event, url)
152 'register webhook call(%s) to url %s', event, url)
152 return [(url, self.secret_token, self.headers, data)]
153 return [(url, self.secret_token, self.headers, data)]
153
154
154 def pull_request_event_handler(self, event, data):
155 def pull_request_event_handler(self, event, data):
155 url = self.get_base_parsed_template(data)
156 url = self.get_base_parsed_template(data)
156 log.debug(
157 log.debug(
157 'register webhook call(%s) to url %s', event, url)
158 'register webhook call(%s) to url %s', event, url)
158 url = string.Template(url).safe_substitute(
159 url = string.Template(url).safe_substitute(
159 pull_request_id=data['pullrequest']['pull_request_id'],
160 pull_request_id=data['pullrequest']['pull_request_id'],
160 pull_request_url=data['pullrequest']['url'])
161 pull_request_url=data['pullrequest']['url'])
161 return [(url, self.secret_token, self.headers, data)]
162 return [(url, self.secret_token, self.headers, data)]
162
163
163 def __call__(self, event, data):
164 def __call__(self, event, data):
164 if isinstance(event, events.RepoPushEvent):
165 if isinstance(event, events.RepoPushEvent):
165 return self.repo_push_event_handler(event, data)
166 return self.repo_push_event_handler(event, data)
166 elif isinstance(event, events.RepoCreateEvent):
167 elif isinstance(event, events.RepoCreateEvent):
167 return self.repo_create_event_handler(event, data)
168 return self.repo_create_event_handler(event, data)
168 elif isinstance(event, events.PullRequestEvent):
169 elif isinstance(event, events.PullRequestEvent):
169 return self.pull_request_event_handler(event, data)
170 return self.pull_request_event_handler(event, data)
170 else:
171 else:
171 raise ValueError('event type not supported: %s' % events)
172 raise ValueError('event type not supported: %s' % events)
172
173
173
174
174 class WebhookSettingsSchema(colander.Schema):
175 class WebhookSettingsSchema(colander.Schema):
175 url = colander.SchemaNode(
176 url = colander.SchemaNode(
176 colander.String(),
177 colander.String(),
177 title=_('Webhook URL'),
178 title=_('Webhook URL'),
178 description=
179 description=
179 _('URL to which Webhook should submit data. Following variables '
180 _('URL to which Webhook should submit data. Following variables '
180 'are allowed to be used: {vars}. Some of the variables would '
181 'are allowed to be used: {vars}. Some of the variables would '
181 'trigger multiple calls, like ${{branch}} or ${{commit_id}}. '
182 'trigger multiple calls, like ${{branch}} or ${{commit_id}}. '
182 'Webhook will be called as many times as unique objects in '
183 'Webhook will be called as many times as unique objects in '
183 'data in such cases.').format(vars=URL_VARS),
184 'data in such cases.').format(vars=URL_VARS),
184 missing=colander.required,
185 missing=colander.required,
185 required=True,
186 required=True,
186 validator=colander.url,
187 validator=colander.url,
187 widget=deform.widget.TextInputWidget(
188 widget=deform.widget.TextInputWidget(
188 placeholder='https://www.example.com/webhook'
189 placeholder='https://www.example.com/webhook'
189 ),
190 ),
190 )
191 )
191 secret_token = colander.SchemaNode(
192 secret_token = colander.SchemaNode(
192 colander.String(),
193 colander.String(),
193 title=_('Secret Token'),
194 title=_('Secret Token'),
194 description=_('Optional string used to validate received payloads. '
195 description=_('Optional string used to validate received payloads. '
195 'It will be sent together with event data in JSON'),
196 'It will be sent together with event data in JSON'),
196 default='',
197 default='',
197 missing='',
198 missing='',
198 widget=deform.widget.TextInputWidget(
199 widget=deform.widget.TextInputWidget(
199 placeholder='e.g. secret_token'
200 placeholder='e.g. secret_token'
200 ),
201 ),
201 )
202 )
202 username = colander.SchemaNode(
203 username = colander.SchemaNode(
203 colander.String(),
204 colander.String(),
204 title=_('Username'),
205 title=_('Username'),
205 description=_('Optional username to authenticate the call.'),
206 description=_('Optional username to authenticate the call.'),
206 default='',
207 default='',
207 missing='',
208 missing='',
208 widget=deform.widget.TextInputWidget(
209 widget=deform.widget.TextInputWidget(
209 placeholder='e.g. admin'
210 placeholder='e.g. admin'
210 ),
211 ),
211 )
212 )
212 password = colander.SchemaNode(
213 password = colander.SchemaNode(
213 colander.String(),
214 colander.String(),
214 title=_('Password'),
215 title=_('Password'),
215 description=_('Optional password to authenticate the call.'),
216 description=_('Optional password to authenticate the call.'),
216 default='',
217 default='',
217 missing='',
218 missing='',
218 widget=deform.widget.PasswordWidget(
219 widget=deform.widget.PasswordWidget(
219 placeholder='e.g. secret.',
220 placeholder='e.g. secret.',
220 redisplay=True,
221 redisplay=True,
221 ),
222 ),
222 )
223 )
223 custom_header_key = colander.SchemaNode(
224 custom_header_key = colander.SchemaNode(
224 colander.String(),
225 colander.String(),
225 title=_('Custom Header Key'),
226 title=_('Custom Header Key'),
226 description=_('Custom Header name to be set when calling endpoint.'),
227 description=_('Custom Header name to be set when calling endpoint.'),
227 default='',
228 default='',
228 missing='',
229 missing='',
229 widget=deform.widget.TextInputWidget(
230 widget=deform.widget.TextInputWidget(
230 placeholder='e.g.Authorization'
231 placeholder='e.g.Authorization'
231 ),
232 ),
232 )
233 )
233 custom_header_val = colander.SchemaNode(
234 custom_header_val = colander.SchemaNode(
234 colander.String(),
235 colander.String(),
235 title=_('Custom Header Value'),
236 title=_('Custom Header Value'),
236 description=_('Custom Header value to be set when calling endpoint.'),
237 description=_('Custom Header value to be set when calling endpoint.'),
237 default='',
238 default='',
238 missing='',
239 missing='',
239 widget=deform.widget.TextInputWidget(
240 widget=deform.widget.TextInputWidget(
240 placeholder='e.g. RcLogin auth=xxxx'
241 placeholder='e.g. RcLogin auth=xxxx'
241 ),
242 ),
242 )
243 )
243 method_type = colander.SchemaNode(
244 method_type = colander.SchemaNode(
244 colander.String(),
245 colander.String(),
245 title=_('Call Method'),
246 title=_('Call Method'),
246 description=_('Select if the Webhook call should be made '
247 description=_('Select if the Webhook call should be made '
247 'with POST or GET.'),
248 'with POST or GET.'),
248 default='post',
249 default='post',
249 missing='',
250 missing='',
250 widget=deform.widget.RadioChoiceWidget(
251 widget=deform.widget.RadioChoiceWidget(
251 values=[('get', 'GET'), ('post', 'POST')],
252 values=[('get', 'GET'), ('post', 'POST')],
252 inline=True
253 inline=True
253 ),
254 ),
254 )
255 )
255
256
256
257
257 class WebhookIntegrationType(IntegrationTypeBase):
258 class WebhookIntegrationType(IntegrationTypeBase):
258 key = 'webhook'
259 key = 'webhook'
259 display_name = _('Webhook')
260 display_name = _('Webhook')
260 description = _('Post json events to a Webhook endpoint')
261 description = _('Post json events to a Webhook endpoint')
261 icon = '''<?xml version="1.0" encoding="UTF-8" standalone="no"?><svg viewBox="0 0 256 239" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid"><g><path d="M119.540432,100.502743 C108.930124,118.338815 98.7646301,135.611455 88.3876025,152.753617 C85.7226696,157.154315 84.4040417,160.738531 86.5332204,166.333309 C92.4107024,181.787152 84.1193605,196.825836 68.5350381,200.908244 C53.8383677,204.759349 39.5192953,195.099955 36.6032893,179.365384 C34.0194114,165.437749 44.8274148,151.78491 60.1824106,149.608284 C61.4694072,149.424428 62.7821041,149.402681 64.944891,149.240571 C72.469175,136.623655 80.1773157,123.700312 88.3025935,110.073173 C73.611854,95.4654658 64.8677898,78.3885437 66.803227,57.2292132 C68.1712787,42.2715849 74.0527146,29.3462646 84.8033863,18.7517722 C105.393354,-1.53572199 136.805164,-4.82141828 161.048542,10.7510424 C184.333097,25.7086706 194.996783,54.8450075 185.906752,79.7822957 C179.052655,77.9239597 172.151111,76.049808 164.563565,73.9917997 C167.418285,60.1274266 165.306899,47.6765751 155.95591,37.0109123 C149.777932,29.9690049 141.850349,26.2780332 132.835442,24.9178894 C114.764113,22.1877169 97.0209573,33.7983633 91.7563309,51.5355878 C85.7800012,71.6669027 94.8245623,88.1111998 119.540432,100.502743 L119.540432,100.502743 Z" fill="#C73A63"></path><path d="M149.841194,79.4106285 C157.316054,92.5969067 164.905578,105.982857 172.427885,119.246236 C210.44865,107.483365 239.114472,128.530009 249.398582,151.063322 C261.81978,178.282014 253.328765,210.520191 228.933162,227.312431 C203.893073,244.551464 172.226236,241.605803 150.040866,219.46195 C155.694953,214.729124 161.376716,209.974552 167.44794,204.895759 C189.360489,219.088306 208.525074,218.420096 222.753207,201.614016 C234.885769,187.277151 234.622834,165.900356 222.138374,151.863988 C207.730339,135.66681 188.431321,135.172572 165.103273,150.721309 C155.426087,133.553447 145.58086,116.521995 136.210101,99.2295848 C133.05093,93.4015266 129.561608,90.0209366 122.440622,88.7873178 C110.547271,86.7253555 102.868785,76.5124151 102.408155,65.0698097 C101.955433,53.7537294 108.621719,43.5249733 119.04224,39.5394355 C129.363912,35.5914599 141.476705,38.7783085 148.419765,47.554004 C154.093621,54.7244134 155.896602,62.7943365 152.911402,71.6372484 C152.081082,74.1025091 151.00562,76.4886916 149.841194,79.4106285 L149.841194,79.4106285 Z" fill="#4B4B4B"></path><path d="M167.706921,187.209935 L121.936499,187.209935 C117.54964,205.253587 108.074103,219.821756 91.7464461,229.085759 C79.0544063,236.285822 65.3738898,238.72736 50.8136292,236.376762 C24.0061432,232.053165 2.08568567,207.920497 0.156179306,180.745298 C-2.02835403,149.962159 19.1309765,122.599149 47.3341915,116.452801 C49.2814904,123.524363 51.2485589,130.663141 53.1958579,137.716911 C27.3195169,150.919004 18.3639187,167.553089 25.6054984,188.352614 C31.9811726,206.657224 50.0900643,216.690262 69.7528413,212.809503 C89.8327554,208.847688 99.9567329,192.160226 98.7211371,165.37844 C117.75722,165.37844 136.809118,165.180745 155.847178,165.475311 C163.280522,165.591951 169.019617,164.820939 174.620326,158.267339 C183.840836,147.48306 200.811003,148.455721 210.741239,158.640984 C220.88894,169.049642 220.402609,185.79839 209.663799,195.768166 C199.302587,205.38802 182.933414,204.874012 173.240413,194.508846 C171.247644,192.37176 169.677943,189.835329 167.706921,187.209935 L167.706921,187.209935 Z" fill="#4A4A4A"></path></g></svg>'''
262 icon = '''<?xml version="1.0" encoding="UTF-8" standalone="no"?><svg viewBox="0 0 256 239" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid"><g><path d="M119.540432,100.502743 C108.930124,118.338815 98.7646301,135.611455 88.3876025,152.753617 C85.7226696,157.154315 84.4040417,160.738531 86.5332204,166.333309 C92.4107024,181.787152 84.1193605,196.825836 68.5350381,200.908244 C53.8383677,204.759349 39.5192953,195.099955 36.6032893,179.365384 C34.0194114,165.437749 44.8274148,151.78491 60.1824106,149.608284 C61.4694072,149.424428 62.7821041,149.402681 64.944891,149.240571 C72.469175,136.623655 80.1773157,123.700312 88.3025935,110.073173 C73.611854,95.4654658 64.8677898,78.3885437 66.803227,57.2292132 C68.1712787,42.2715849 74.0527146,29.3462646 84.8033863,18.7517722 C105.393354,-1.53572199 136.805164,-4.82141828 161.048542,10.7510424 C184.333097,25.7086706 194.996783,54.8450075 185.906752,79.7822957 C179.052655,77.9239597 172.151111,76.049808 164.563565,73.9917997 C167.418285,60.1274266 165.306899,47.6765751 155.95591,37.0109123 C149.777932,29.9690049 141.850349,26.2780332 132.835442,24.9178894 C114.764113,22.1877169 97.0209573,33.7983633 91.7563309,51.5355878 C85.7800012,71.6669027 94.8245623,88.1111998 119.540432,100.502743 L119.540432,100.502743 Z" fill="#C73A63"></path><path d="M149.841194,79.4106285 C157.316054,92.5969067 164.905578,105.982857 172.427885,119.246236 C210.44865,107.483365 239.114472,128.530009 249.398582,151.063322 C261.81978,178.282014 253.328765,210.520191 228.933162,227.312431 C203.893073,244.551464 172.226236,241.605803 150.040866,219.46195 C155.694953,214.729124 161.376716,209.974552 167.44794,204.895759 C189.360489,219.088306 208.525074,218.420096 222.753207,201.614016 C234.885769,187.277151 234.622834,165.900356 222.138374,151.863988 C207.730339,135.66681 188.431321,135.172572 165.103273,150.721309 C155.426087,133.553447 145.58086,116.521995 136.210101,99.2295848 C133.05093,93.4015266 129.561608,90.0209366 122.440622,88.7873178 C110.547271,86.7253555 102.868785,76.5124151 102.408155,65.0698097 C101.955433,53.7537294 108.621719,43.5249733 119.04224,39.5394355 C129.363912,35.5914599 141.476705,38.7783085 148.419765,47.554004 C154.093621,54.7244134 155.896602,62.7943365 152.911402,71.6372484 C152.081082,74.1025091 151.00562,76.4886916 149.841194,79.4106285 L149.841194,79.4106285 Z" fill="#4B4B4B"></path><path d="M167.706921,187.209935 L121.936499,187.209935 C117.54964,205.253587 108.074103,219.821756 91.7464461,229.085759 C79.0544063,236.285822 65.3738898,238.72736 50.8136292,236.376762 C24.0061432,232.053165 2.08568567,207.920497 0.156179306,180.745298 C-2.02835403,149.962159 19.1309765,122.599149 47.3341915,116.452801 C49.2814904,123.524363 51.2485589,130.663141 53.1958579,137.716911 C27.3195169,150.919004 18.3639187,167.553089 25.6054984,188.352614 C31.9811726,206.657224 50.0900643,216.690262 69.7528413,212.809503 C89.8327554,208.847688 99.9567329,192.160226 98.7211371,165.37844 C117.75722,165.37844 136.809118,165.180745 155.847178,165.475311 C163.280522,165.591951 169.019617,164.820939 174.620326,158.267339 C183.840836,147.48306 200.811003,148.455721 210.741239,158.640984 C220.88894,169.049642 220.402609,185.79839 209.663799,195.768166 C199.302587,205.38802 182.933414,204.874012 173.240413,194.508846 C171.247644,192.37176 169.677943,189.835329 167.706921,187.209935 L167.706921,187.209935 Z" fill="#4A4A4A"></path></g></svg>'''
262
263
263 valid_events = [
264 valid_events = [
264 events.PullRequestCloseEvent,
265 events.PullRequestCloseEvent,
265 events.PullRequestMergeEvent,
266 events.PullRequestMergeEvent,
266 events.PullRequestUpdateEvent,
267 events.PullRequestUpdateEvent,
267 events.PullRequestCommentEvent,
268 events.PullRequestCommentEvent,
268 events.PullRequestReviewEvent,
269 events.PullRequestReviewEvent,
269 events.PullRequestCreateEvent,
270 events.PullRequestCreateEvent,
270 events.RepoPushEvent,
271 events.RepoPushEvent,
271 events.RepoCreateEvent,
272 events.RepoCreateEvent,
272 ]
273 ]
273
274
274 def settings_schema(self):
275 def settings_schema(self):
275 schema = WebhookSettingsSchema()
276 schema = WebhookSettingsSchema()
276 schema.add(colander.SchemaNode(
277 schema.add(colander.SchemaNode(
277 colander.Set(),
278 colander.Set(),
278 widget=deform.widget.CheckboxChoiceWidget(
279 widget=deform.widget.CheckboxChoiceWidget(
279 values=sorted(
280 values=sorted(
280 [(e.name, e.display_name) for e in self.valid_events]
281 [(e.name, e.display_name) for e in self.valid_events]
281 )
282 )
282 ),
283 ),
283 description="Events activated for this integration",
284 description="Events activated for this integration",
284 name='events'
285 name='events'
285 ))
286 ))
286 return schema
287 return schema
287
288
288 def send_event(self, event):
289 def send_event(self, event):
289 log.debug('handling event %s with Webhook integration %s',
290 log.debug('handling event %s with Webhook integration %s',
290 event.name, self)
291 event.name, self)
291
292
292 if event.__class__ not in self.valid_events:
293 if event.__class__ not in self.valid_events:
293 log.debug('event not valid: %r' % event)
294 log.debug('event not valid: %r' % event)
294 return
295 return
295
296
296 if event.name not in self.settings['events']:
297 if event.name not in self.settings['events']:
297 log.debug('event ignored: %r' % event)
298 log.debug('event ignored: %r' % event)
298 return
299 return
299
300
300 data = event.as_dict()
301 data = event.as_dict()
301 template_url = self.settings['url']
302 template_url = self.settings['url']
302
303
303 headers = {}
304 headers = {}
304 head_key = self.settings.get('custom_header_key')
305 head_key = self.settings.get('custom_header_key')
305 head_val = self.settings.get('custom_header_val')
306 head_val = self.settings.get('custom_header_val')
306 if head_key and head_val:
307 if head_key and head_val:
307 headers = {head_key: head_val}
308 headers = {head_key: head_val}
308
309
309 handler = WebhookHandler(
310 handler = WebhookHandler(
310 template_url, self.settings['secret_token'], headers)
311 template_url, self.settings['secret_token'], headers)
311
312
312 url_calls = handler(event, data)
313 url_calls = handler(event, data)
313 log.debug('webhook: calling following urls: %s',
314 log.debug('webhook: calling following urls: %s',
314 [x[0] for x in url_calls])
315 [x[0] for x in url_calls])
315 post_to_webhook(url_calls, self.settings)
316 post_to_webhook(url_calls, self.settings)
316
317
317
318
318 @task(ignore_result=True)
319 @async_task(ignore_result=True, base=RequestContextTask)
319 def post_to_webhook(url_calls, settings):
320 def post_to_webhook(url_calls, settings):
320 max_retries = 3
321 max_retries = 3
321 retries = Retry(
322 retries = Retry(
322 total=max_retries,
323 total=max_retries,
323 backoff_factor=0.15,
324 backoff_factor=0.15,
324 status_forcelist=[500, 502, 503, 504])
325 status_forcelist=[500, 502, 503, 504])
325 call_headers = {
326 call_headers = {
326 'User-Agent': 'RhodeCode-webhook-caller/{}'.format(
327 'User-Agent': 'RhodeCode-webhook-caller/{}'.format(
327 rhodecode.__version__)
328 rhodecode.__version__)
328 } # updated below with custom ones, allows override
329 } # updated below with custom ones, allows override
329
330
330 for url, token, headers, data in url_calls:
331 for url, token, headers, data in url_calls:
331 req_session = requests.Session()
332 req_session = requests.Session()
332 req_session.mount( # retry max N times
333 req_session.mount( # retry max N times
333 'http://', requests.adapters.HTTPAdapter(max_retries=retries))
334 'http://', requests.adapters.HTTPAdapter(max_retries=retries))
334
335
335 method = settings.get('method_type') or 'post'
336 method = settings.get('method_type') or 'post'
336 call_method = getattr(req_session, method)
337 call_method = getattr(req_session, method)
337
338
338 headers = headers or {}
339 headers = headers or {}
339 call_headers.update(headers)
340 call_headers.update(headers)
340 auth = get_auth(settings)
341 auth = get_auth(settings)
341
342
342 log.debug('calling Webhook with method: %s, and auth:%s',
343 log.debug('calling Webhook with method: %s, and auth:%s',
343 call_method, auth)
344 call_method, auth)
344 resp = call_method(url, json={
345 resp = call_method(url, json={
345 'token': token,
346 'token': token,
346 'event': data
347 'event': data
347 }, headers=call_headers, auth=auth)
348 }, headers=call_headers, auth=auth)
348 log.debug('Got Webhook response: %s', resp)
349 log.debug('Got Webhook response: %s', resp)
349
350
350 resp.raise_for_status() # raise exception on a failed request
351 resp.raise_for_status() # raise exception on a failed request
@@ -1,236 +1,72 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 """
21 celery libs for RhodeCode
22 """
23
20
24
25 import pylons
26 import socket
21 import socket
27 import logging
22 import logging
28
23
29 import rhodecode
24 import rhodecode
30
25 from zope.cachedescriptors.property import Lazy as LazyProperty
31 from os.path import join as jn
26 from rhodecode.lib.celerylib.loader import (
32 from pylons import config
27 celery_app, RequestContextTask, get_logger)
33 from celery.task import Task
34 from pyramid.request import Request
35 from pyramid.scripting import prepare
36 from pyramid.threadlocal import get_current_request
37
38 from decorator import decorator
39
28
40 from zope.cachedescriptors.property import Lazy as LazyProperty
29 async_task = celery_app.task
41
30
42 from rhodecode.config import utils
43 from rhodecode.lib.utils2 import (
44 safe_str, md5_safe, aslist, get_routes_generator_for_server_url,
45 get_server_url)
46 from rhodecode.lib.pidlock import DaemonLock, LockHeld
47 from rhodecode.lib.vcs import connect_vcs
48 from rhodecode.model import meta
49 from rhodecode.lib.auth import AuthUser
50
31
51 log = logging.getLogger(__name__)
32 log = logging.getLogger(__name__)
52
33
53
34
54 class ResultWrapper(object):
35 class ResultWrapper(object):
55 def __init__(self, task):
36 def __init__(self, task):
56 self.task = task
37 self.task = task
57
38
58 @LazyProperty
39 @LazyProperty
59 def result(self):
40 def result(self):
60 return self.task
41 return self.task
61
42
62
43
63 class RhodecodeCeleryTask(Task):
64 """
65 This is a celery task which will create a rhodecode app instance context
66 for the task, patch pyramid + pylons threadlocals with the original request
67 that created the task and also add the user to the context.
68
69 This class as a whole should be removed once the pylons port is complete
70 and a pyramid only solution for celery is implemented as per issue #4139
71 """
72
73 def apply_async(self, args=None, kwargs=None, task_id=None, producer=None,
74 link=None, link_error=None, **options):
75 """ queue the job to run (we are in web request context here) """
76
77 request = get_current_request()
78
79 if hasattr(request, 'user'):
80 ip_addr = request.user.ip_addr
81 user_id = request.user.user_id
82 elif hasattr(request, 'rpc_params'):
83 # TODO(marcink) remove when migration is finished
84 # api specific call on Pyramid.
85 ip_addr = request.rpc_params['apiuser'].ip_addr
86 user_id = request.rpc_params['apiuser'].user_id
87 else:
88 raise Exception('Unable to fetch data from request: {}'.format(
89 request))
90
91 if request:
92 # we hook into kwargs since it is the only way to pass our data to
93 # the celery worker in celery 2.2
94 kwargs.update({
95 '_rhodecode_proxy_data': {
96 'environ': {
97 'PATH_INFO': request.environ['PATH_INFO'],
98 'SCRIPT_NAME': request.environ['SCRIPT_NAME'],
99 'HTTP_HOST': request.environ.get('HTTP_HOST',
100 request.environ['SERVER_NAME']),
101 'SERVER_NAME': request.environ['SERVER_NAME'],
102 'SERVER_PORT': request.environ['SERVER_PORT'],
103 'wsgi.url_scheme': request.environ['wsgi.url_scheme'],
104 },
105 'auth_user': {
106 'ip_addr': ip_addr,
107 'user_id': user_id
108 },
109 }
110 })
111 return super(RhodecodeCeleryTask, self).apply_async(
112 args, kwargs, task_id, producer, link, link_error, **options)
113
114 def __call__(self, *args, **kwargs):
115 """ rebuild the context and then run task on celery worker """
116 proxy_data = kwargs.pop('_rhodecode_proxy_data', {})
117
118 if not proxy_data:
119 return super(RhodecodeCeleryTask, self).__call__(*args, **kwargs)
120
121 log.debug('using celery proxy data to run task: %r', proxy_data)
122
123 from rhodecode.config.routing import make_map
124
125 request = Request.blank('/', environ=proxy_data['environ'])
126 request.user = AuthUser(user_id=proxy_data['auth_user']['user_id'],
127 ip_addr=proxy_data['auth_user']['ip_addr'])
128
129 pyramid_request = prepare(request) # set pyramid threadlocal request
130
131 # pylons routing
132 if not rhodecode.CONFIG.get('routes.map'):
133 rhodecode.CONFIG['routes.map'] = make_map(config)
134 pylons.url._push_object(get_routes_generator_for_server_url(
135 get_server_url(request.environ)
136 ))
137
138 try:
139 return super(RhodecodeCeleryTask, self).__call__(*args, **kwargs)
140 finally:
141 pyramid_request['closer']()
142 pylons.url._pop_object()
143
144
145 def run_task(task, *args, **kwargs):
44 def run_task(task, *args, **kwargs):
146 if rhodecode.CELERY_ENABLED:
45 if rhodecode.CELERY_ENABLED:
147 celery_is_up = False
46 celery_is_up = False
148 try:
47 try:
149 t = task.apply_async(args=args, kwargs=kwargs)
48 t = task.apply_async(args=args, kwargs=kwargs)
150 log.info('running task %s:%s', t.task_id, task)
151 celery_is_up = True
49 celery_is_up = True
50 log.debug('executing task %s:%s in async mode', t.task_id, task)
152 return t
51 return t
153
52
154 except socket.error as e:
53 except socket.error as e:
155 if isinstance(e, IOError) and e.errno == 111:
54 if isinstance(e, IOError) and e.errno == 111:
156 log.error('Unable to connect to celeryd. Sync execution')
55 log.error('Unable to connect to celeryd. Sync execution')
157 else:
56 else:
158 log.exception("Exception while connecting to celeryd.")
57 log.exception("Exception while connecting to celeryd.")
159 except KeyError as e:
58 except KeyError as e:
160 log.error('Unable to connect to celeryd. Sync execution')
59 log.error('Unable to connect to celeryd. Sync execution')
161 except Exception as e:
60 except Exception as e:
162 log.exception(
61 log.exception(
163 "Exception while trying to run task asynchronous. "
62 "Exception while trying to run task asynchronous. "
164 "Fallback to sync execution.")
63 "Fallback to sync execution.")
165
64
166 # keep in mind there maybe a subtle race condition where something
65 # keep in mind there maybe a subtle race condition where something
167 # depending on rhodecode.CELERY_ENABLED such as @dbsession decorator
66 # depending on rhodecode.CELERY_ENABLED
168 # will see CELERY_ENABLED as True before this has a chance to set False
67 # will see CELERY_ENABLED as True before this has a chance to set False
169 rhodecode.CELERY_ENABLED = celery_is_up
68 rhodecode.CELERY_ENABLED = celery_is_up
170 else:
69 else:
171 log.debug('executing task %s in sync mode', task)
70 log.debug('executing task %s:%s in sync mode', 'TASK', task)
172 return ResultWrapper(task(*args, **kwargs))
173
174
175 def __get_lockkey(func, *fargs, **fkwargs):
176 params = list(fargs)
177 params.extend(['%s-%s' % ar for ar in fkwargs.items()])
178
179 func_name = str(func.__name__) if hasattr(func, '__name__') else str(func)
180 _lock_key = func_name + '-' + '-'.join(map(safe_str, params))
181 return 'task_%s.lock' % (md5_safe(_lock_key),)
182
183
184 def locked_task(func):
185 def __wrapper(func, *fargs, **fkwargs):
186 lockkey = __get_lockkey(func, *fargs, **fkwargs)
187 lockkey_path = config['app_conf']['cache_dir']
188
189 log.info('running task with lockkey %s' % lockkey)
190 try:
191 l = DaemonLock(file_=jn(lockkey_path, lockkey))
192 ret = func(*fargs, **fkwargs)
193 l.release()
194 return ret
195 except LockHeld:
196 log.info('LockHeld')
197 return 'Task with key %s already running' % lockkey
198
199 return decorator(__wrapper, func)
200
201
71
202 def get_session():
72 return ResultWrapper(task(*args, **kwargs))
203 if rhodecode.CELERY_ENABLED:
204 utils.initialize_database(config)
205 sa = meta.Session()
206 return sa
207
208
209 def dbsession(func):
210 def __wrapper(func, *fargs, **fkwargs):
211 try:
212 ret = func(*fargs, **fkwargs)
213 return ret
214 finally:
215 if rhodecode.CELERY_ENABLED and not rhodecode.CELERY_EAGER:
216 meta.Session.remove()
217
218 return decorator(__wrapper, func)
219
220
221 def vcsconnection(func):
222 def __wrapper(func, *fargs, **fkwargs):
223 if rhodecode.CELERY_ENABLED and not rhodecode.CELERY_EAGER:
224 settings = rhodecode.PYRAMID_SETTINGS
225 backends = settings['vcs.backends']
226 for alias in rhodecode.BACKENDS.keys():
227 if alias not in backends:
228 del rhodecode.BACKENDS[alias]
229 utils.configure_vcs(settings)
230 connect_vcs(
231 settings['vcs.server'],
232 utils.get_vcs_server_protocol(settings))
233 ret = func(*fargs, **fkwargs)
234 return ret
235
236 return decorator(__wrapper, func)
@@ -1,293 +1,275 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2017 RhodeCode GmbH
3 # Copyright (C) 2012-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 RhodeCode task modules, containing all task that suppose to be run
22 RhodeCode task modules, containing all task that suppose to be run
23 by celery daemon
23 by celery daemon
24 """
24 """
25
25
26
27 import os
26 import os
28 import logging
29
30 from celery.task import task
31
27
32 import rhodecode
28 import rhodecode
33 from rhodecode.lib import audit_logger
29 from rhodecode.lib import audit_logger
34 from rhodecode.lib.celerylib import (
30 from rhodecode.lib.celerylib import get_logger, async_task, RequestContextTask
35 run_task, dbsession, __get_lockkey, LockHeld, DaemonLock,
36 get_session, vcsconnection, RhodecodeCeleryTask)
37 from rhodecode.lib.hooks_base import log_create_repository
31 from rhodecode.lib.hooks_base import log_create_repository
38 from rhodecode.lib.rcmail.smtp_mailer import SmtpMailer
32 from rhodecode.lib.rcmail.smtp_mailer import SmtpMailer
39 from rhodecode.lib.utils import add_cache
40 from rhodecode.lib.utils2 import safe_int, str2bool
33 from rhodecode.lib.utils2 import safe_int, str2bool
41 from rhodecode.model.db import Repository, User
34 from rhodecode.model.db import Session, Repository, User
42
35
43
36
44 def get_logger(cls):
37 @async_task(ignore_result=True, base=RequestContextTask)
45 if rhodecode.CELERY_ENABLED:
46 try:
47 log = cls.get_logger()
48 except Exception:
49 log = logging.getLogger(__name__)
50 else:
51 log = logging.getLogger(__name__)
52
53 return log
54
55
56 @task(ignore_result=True, base=RhodecodeCeleryTask)
57 @dbsession
58 def send_email(recipients, subject, body='', html_body='', email_config=None):
38 def send_email(recipients, subject, body='', html_body='', email_config=None):
59 """
39 """
60 Sends an email with defined parameters from the .ini files.
40 Sends an email with defined parameters from the .ini files.
61
41
62 :param recipients: list of recipients, it this is empty the defined email
42 :param recipients: list of recipients, it this is empty the defined email
63 address from field 'email_to' is used instead
43 address from field 'email_to' is used instead
64 :param subject: subject of the mail
44 :param subject: subject of the mail
65 :param body: body of the mail
45 :param body: body of the mail
66 :param html_body: html version of body
46 :param html_body: html version of body
67 """
47 """
68 log = get_logger(send_email)
48 log = get_logger(send_email)
69
49
70 email_config = email_config or rhodecode.CONFIG
50 email_config = email_config or rhodecode.CONFIG
71 subject = "%s %s" % (email_config.get('email_prefix', ''), subject)
51 subject = "%s %s" % (email_config.get('email_prefix', ''), subject)
72 if not recipients:
52 if not recipients:
73 # if recipients are not defined we send to email_config + all admins
53 # if recipients are not defined we send to email_config + all admins
74 admins = [
54 admins = [
75 u.email for u in User.query().filter(User.admin == True).all()]
55 u.email for u in User.query().filter(User.admin == True).all()]
76 recipients = [email_config.get('email_to')] + admins
56 recipients = [email_config.get('email_to')] + admins
77
57
78 mail_server = email_config.get('smtp_server') or None
58 mail_server = email_config.get('smtp_server') or None
79 if mail_server is None:
59 if mail_server is None:
80 log.error("SMTP server information missing. Sending email failed. "
60 log.error("SMTP server information missing. Sending email failed. "
81 "Make sure that `smtp_server` variable is configured "
61 "Make sure that `smtp_server` variable is configured "
82 "inside the .ini file")
62 "inside the .ini file")
83 return False
63 return False
84
64
85 mail_from = email_config.get('app_email_from', 'RhodeCode')
65 mail_from = email_config.get('app_email_from', 'RhodeCode')
86 user = email_config.get('smtp_username')
66 user = email_config.get('smtp_username')
87 passwd = email_config.get('smtp_password')
67 passwd = email_config.get('smtp_password')
88 mail_port = email_config.get('smtp_port')
68 mail_port = email_config.get('smtp_port')
89 tls = str2bool(email_config.get('smtp_use_tls'))
69 tls = str2bool(email_config.get('smtp_use_tls'))
90 ssl = str2bool(email_config.get('smtp_use_ssl'))
70 ssl = str2bool(email_config.get('smtp_use_ssl'))
91 debug = str2bool(email_config.get('debug'))
71 debug = str2bool(email_config.get('debug'))
92 smtp_auth = email_config.get('smtp_auth')
72 smtp_auth = email_config.get('smtp_auth')
93
73
94 try:
74 try:
95 m = SmtpMailer(mail_from, user, passwd, mail_server, smtp_auth,
75 m = SmtpMailer(mail_from, user, passwd, mail_server, smtp_auth,
96 mail_port, ssl, tls, debug=debug)
76 mail_port, ssl, tls, debug=debug)
97 m.send(recipients, subject, body, html_body)
77 m.send(recipients, subject, body, html_body)
98 except Exception:
78 except Exception:
99 log.exception('Mail sending failed')
79 log.exception('Mail sending failed')
100 return False
80 return False
101 return True
81 return True
102
82
103
83
104 @task(ignore_result=True, base=RhodecodeCeleryTask)
84 @async_task(ignore_result=True, base=RequestContextTask)
105 @dbsession
106 @vcsconnection
107 def create_repo(form_data, cur_user):
85 def create_repo(form_data, cur_user):
108 from rhodecode.model.repo import RepoModel
86 from rhodecode.model.repo import RepoModel
109 from rhodecode.model.user import UserModel
87 from rhodecode.model.user import UserModel
110 from rhodecode.model.settings import SettingsModel
88 from rhodecode.model.settings import SettingsModel
111
89
112 log = get_logger(create_repo)
90 log = get_logger(create_repo)
113 DBS = get_session()
114
91
115 cur_user = UserModel(DBS)._get_user(cur_user)
92 cur_user = UserModel()._get_user(cur_user)
116 owner = cur_user
93 owner = cur_user
117
94
118 repo_name = form_data['repo_name']
95 repo_name = form_data['repo_name']
119 repo_name_full = form_data['repo_name_full']
96 repo_name_full = form_data['repo_name_full']
120 repo_type = form_data['repo_type']
97 repo_type = form_data['repo_type']
121 description = form_data['repo_description']
98 description = form_data['repo_description']
122 private = form_data['repo_private']
99 private = form_data['repo_private']
123 clone_uri = form_data.get('clone_uri')
100 clone_uri = form_data.get('clone_uri')
124 repo_group = safe_int(form_data['repo_group'])
101 repo_group = safe_int(form_data['repo_group'])
125 landing_rev = form_data['repo_landing_rev']
102 landing_rev = form_data['repo_landing_rev']
126 copy_fork_permissions = form_data.get('copy_permissions')
103 copy_fork_permissions = form_data.get('copy_permissions')
127 copy_group_permissions = form_data.get('repo_copy_permissions')
104 copy_group_permissions = form_data.get('repo_copy_permissions')
128 fork_of = form_data.get('fork_parent_id')
105 fork_of = form_data.get('fork_parent_id')
129 state = form_data.get('repo_state', Repository.STATE_PENDING)
106 state = form_data.get('repo_state', Repository.STATE_PENDING)
130
107
131 # repo creation defaults, private and repo_type are filled in form
108 # repo creation defaults, private and repo_type are filled in form
132 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
109 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
133 enable_statistics = form_data.get(
110 enable_statistics = form_data.get(
134 'enable_statistics', defs.get('repo_enable_statistics'))
111 'enable_statistics', defs.get('repo_enable_statistics'))
135 enable_locking = form_data.get(
112 enable_locking = form_data.get(
136 'enable_locking', defs.get('repo_enable_locking'))
113 'enable_locking', defs.get('repo_enable_locking'))
137 enable_downloads = form_data.get(
114 enable_downloads = form_data.get(
138 'enable_downloads', defs.get('repo_enable_downloads'))
115 'enable_downloads', defs.get('repo_enable_downloads'))
139
116
140 try:
117 try:
141 repo = RepoModel(DBS)._create_repo(
118 repo = RepoModel()._create_repo(
142 repo_name=repo_name_full,
119 repo_name=repo_name_full,
143 repo_type=repo_type,
120 repo_type=repo_type,
144 description=description,
121 description=description,
145 owner=owner,
122 owner=owner,
146 private=private,
123 private=private,
147 clone_uri=clone_uri,
124 clone_uri=clone_uri,
148 repo_group=repo_group,
125 repo_group=repo_group,
149 landing_rev=landing_rev,
126 landing_rev=landing_rev,
150 fork_of=fork_of,
127 fork_of=fork_of,
151 copy_fork_permissions=copy_fork_permissions,
128 copy_fork_permissions=copy_fork_permissions,
152 copy_group_permissions=copy_group_permissions,
129 copy_group_permissions=copy_group_permissions,
153 enable_statistics=enable_statistics,
130 enable_statistics=enable_statistics,
154 enable_locking=enable_locking,
131 enable_locking=enable_locking,
155 enable_downloads=enable_downloads,
132 enable_downloads=enable_downloads,
156 state=state
133 state=state
157 )
134 )
158 DBS.commit()
135 Session().commit()
159
136
160 # now create this repo on Filesystem
137 # now create this repo on Filesystem
161 RepoModel(DBS)._create_filesystem_repo(
138 RepoModel()._create_filesystem_repo(
162 repo_name=repo_name,
139 repo_name=repo_name,
163 repo_type=repo_type,
140 repo_type=repo_type,
164 repo_group=RepoModel(DBS)._get_repo_group(repo_group),
141 repo_group=RepoModel()._get_repo_group(repo_group),
165 clone_uri=clone_uri,
142 clone_uri=clone_uri,
166 )
143 )
167 repo = Repository.get_by_repo_name(repo_name_full)
144 repo = Repository.get_by_repo_name(repo_name_full)
168 log_create_repository(created_by=owner.username, **repo.get_dict())
145 log_create_repository(created_by=owner.username, **repo.get_dict())
169
146
170 # update repo commit caches initially
147 # update repo commit caches initially
171 repo.update_commit_cache()
148 repo.update_commit_cache()
172
149
173 # set new created state
150 # set new created state
174 repo.set_state(Repository.STATE_CREATED)
151 repo.set_state(Repository.STATE_CREATED)
175 repo_id = repo.repo_id
152 repo_id = repo.repo_id
176 repo_data = repo.get_api_data()
153 repo_data = repo.get_api_data()
177
154
178 audit_logger.store(
155 audit_logger.store(
179 'repo.create', action_data={'data': repo_data},
156 'repo.create', action_data={'data': repo_data},
180 user=cur_user,
157 user=cur_user,
181 repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id))
158 repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id))
182
159
183 DBS.commit()
160 Session().commit()
184 except Exception:
161 except Exception:
185 log.warning('Exception occurred when creating repository, '
162 log.warning('Exception occurred when creating repository, '
186 'doing cleanup...', exc_info=True)
163 'doing cleanup...', exc_info=True)
187 # rollback things manually !
164 # rollback things manually !
188 repo = Repository.get_by_repo_name(repo_name_full)
165 repo = Repository.get_by_repo_name(repo_name_full)
189 if repo:
166 if repo:
190 Repository.delete(repo.repo_id)
167 Repository.delete(repo.repo_id)
191 DBS.commit()
168 Session().commit()
192 RepoModel(DBS)._delete_filesystem_repo(repo)
169 RepoModel()._delete_filesystem_repo(repo)
193 raise
170 raise
194
171
195 # it's an odd fix to make celery fail task when exception occurs
172 # it's an odd fix to make celery fail task when exception occurs
196 def on_failure(self, *args, **kwargs):
173 def on_failure(self, *args, **kwargs):
197 pass
174 pass
198
175
199 return True
176 return True
200
177
201
178
202 @task(ignore_result=True, base=RhodecodeCeleryTask)
179 @async_task(ignore_result=True, base=RequestContextTask)
203 @dbsession
204 @vcsconnection
205 def create_repo_fork(form_data, cur_user):
180 def create_repo_fork(form_data, cur_user):
206 """
181 """
207 Creates a fork of repository using internal VCS methods
182 Creates a fork of repository using internal VCS methods
208
209 :param form_data:
210 :param cur_user:
211 """
183 """
212 from rhodecode.model.repo import RepoModel
184 from rhodecode.model.repo import RepoModel
213 from rhodecode.model.user import UserModel
185 from rhodecode.model.user import UserModel
214
186
215 log = get_logger(create_repo_fork)
187 log = get_logger(create_repo_fork)
216 DBS = get_session()
217
188
218 cur_user = UserModel(DBS)._get_user(cur_user)
189 cur_user = UserModel()._get_user(cur_user)
219 owner = cur_user
190 owner = cur_user
220
191
221 repo_name = form_data['repo_name'] # fork in this case
192 repo_name = form_data['repo_name'] # fork in this case
222 repo_name_full = form_data['repo_name_full']
193 repo_name_full = form_data['repo_name_full']
223 repo_type = form_data['repo_type']
194 repo_type = form_data['repo_type']
224 description = form_data['description']
195 description = form_data['description']
225 private = form_data['private']
196 private = form_data['private']
226 clone_uri = form_data.get('clone_uri')
197 clone_uri = form_data.get('clone_uri')
227 repo_group = safe_int(form_data['repo_group'])
198 repo_group = safe_int(form_data['repo_group'])
228 landing_rev = form_data['landing_rev']
199 landing_rev = form_data['landing_rev']
229 copy_fork_permissions = form_data.get('copy_permissions')
200 copy_fork_permissions = form_data.get('copy_permissions')
230 fork_id = safe_int(form_data.get('fork_parent_id'))
201 fork_id = safe_int(form_data.get('fork_parent_id'))
231
202
232 try:
203 try:
233 fork_of = RepoModel(DBS)._get_repo(fork_id)
204 fork_of = RepoModel()._get_repo(fork_id)
234 RepoModel(DBS)._create_repo(
205 RepoModel()._create_repo(
235 repo_name=repo_name_full,
206 repo_name=repo_name_full,
236 repo_type=repo_type,
207 repo_type=repo_type,
237 description=description,
208 description=description,
238 owner=owner,
209 owner=owner,
239 private=private,
210 private=private,
240 clone_uri=clone_uri,
211 clone_uri=clone_uri,
241 repo_group=repo_group,
212 repo_group=repo_group,
242 landing_rev=landing_rev,
213 landing_rev=landing_rev,
243 fork_of=fork_of,
214 fork_of=fork_of,
244 copy_fork_permissions=copy_fork_permissions
215 copy_fork_permissions=copy_fork_permissions
245 )
216 )
246
217
247 DBS.commit()
218 Session().commit()
248
219
249 base_path = Repository.base_path()
220 base_path = Repository.base_path()
250 source_repo_path = os.path.join(base_path, fork_of.repo_name)
221 source_repo_path = os.path.join(base_path, fork_of.repo_name)
251
222
252 # now create this repo on Filesystem
223 # now create this repo on Filesystem
253 RepoModel(DBS)._create_filesystem_repo(
224 RepoModel()._create_filesystem_repo(
254 repo_name=repo_name,
225 repo_name=repo_name,
255 repo_type=repo_type,
226 repo_type=repo_type,
256 repo_group=RepoModel(DBS)._get_repo_group(repo_group),
227 repo_group=RepoModel()._get_repo_group(repo_group),
257 clone_uri=source_repo_path,
228 clone_uri=source_repo_path,
258 )
229 )
259 repo = Repository.get_by_repo_name(repo_name_full)
230 repo = Repository.get_by_repo_name(repo_name_full)
260 log_create_repository(created_by=owner.username, **repo.get_dict())
231 log_create_repository(created_by=owner.username, **repo.get_dict())
261
232
262 # update repo commit caches initially
233 # update repo commit caches initially
263 config = repo._config
234 config = repo._config
264 config.set('extensions', 'largefiles', '')
235 config.set('extensions', 'largefiles', '')
265 repo.update_commit_cache(config=config)
236 repo.update_commit_cache(config=config)
266
237
267 # set new created state
238 # set new created state
268 repo.set_state(Repository.STATE_CREATED)
239 repo.set_state(Repository.STATE_CREATED)
269
240
270 repo_id = repo.repo_id
241 repo_id = repo.repo_id
271 repo_data = repo.get_api_data()
242 repo_data = repo.get_api_data()
272 audit_logger.store(
243 audit_logger.store(
273 'repo.fork', action_data={'data': repo_data},
244 'repo.fork', action_data={'data': repo_data},
274 user=cur_user,
245 user=cur_user,
275 repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id))
246 repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id))
276
247
277 DBS.commit()
248 Session().commit()
278 except Exception as e:
249 except Exception as e:
279 log.warning('Exception %s occurred when forking repository, '
250 log.warning('Exception %s occurred when forking repository, '
280 'doing cleanup...', e)
251 'doing cleanup...', e)
281 # rollback things manually !
252 # rollback things manually !
282 repo = Repository.get_by_repo_name(repo_name_full)
253 repo = Repository.get_by_repo_name(repo_name_full)
283 if repo:
254 if repo:
284 Repository.delete(repo.repo_id)
255 Repository.delete(repo.repo_id)
285 DBS.commit()
256 Session().commit()
286 RepoModel(DBS)._delete_filesystem_repo(repo)
257 RepoModel()._delete_filesystem_repo(repo)
287 raise
258 raise
288
259
289 # it's an odd fix to make celery fail task when exception occurs
260 # it's an odd fix to make celery fail task when exception occurs
290 def on_failure(self, *args, **kwargs):
261 def on_failure(self, *args, **kwargs):
291 pass
262 pass
292
263
293 return True
264 return True
265
266
267 @async_task(ignore_result=True)
268 def sync_repo(*args, **kwargs):
269 from rhodecode.model.scm import ScmModel
270 log = get_logger(sync_repo)
271
272 log.info('Pulling from %s', kwargs['repo_name'])
273 ScmModel().pull_changes(kwargs['repo_name'], kwargs['username'])
274
275
@@ -1,49 +1,56 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2017 RhodeCode GmbH
3 # Copyright (C) 2016-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22 from pyramid.compat import configparser
22 from pyramid.compat import configparser
23 from pyramid.paster import bootstrap as pyramid_bootstrap, setup_logging # noqa
23 from pyramid.paster import bootstrap as pyramid_bootstrap, setup_logging # noqa
24 from pyramid.request import Request
24 from pyramid.request import Request
25 from pyramid.scripting import prepare
25
26
26
27
27 def get_config(ini_path, **kwargs):
28 def get_config(ini_path, **kwargs):
28 parser = configparser.ConfigParser(**kwargs)
29 parser = configparser.ConfigParser(**kwargs)
29 parser.read(ini_path)
30 parser.read(ini_path)
30 return parser
31 return parser
31
32
32
33
33 def get_app_config(ini_path):
34 def get_app_config(ini_path):
34 from paste.deploy.loadwsgi import appconfig
35 from paste.deploy.loadwsgi import appconfig
35 return appconfig('config:{}'.format(ini_path), relative_to=os.getcwd())
36 return appconfig('config:{}'.format(ini_path), relative_to=os.getcwd())
36
37
37
38
38 def bootstrap(config_uri, request=None, options=None):
39 def bootstrap(config_uri, request=None, options=None):
39
40
40 config = get_config(config_uri)
41 config = get_config(config_uri)
41 base_url = 'http://rhodecode.local'
42 base_url = 'http://rhodecode.local'
42 try:
43 try:
43 base_url = config.get('app:main', 'app.base_url')
44 base_url = config.get('app:main', 'app.base_url')
44 except (configparser.NoSectionError, configparser.NoOptionError):
45 except (configparser.NoSectionError, configparser.NoOptionError):
45 pass
46 pass
46
47
47 request = request or Request.blank('/', base_url=base_url)
48 request = request or Request.blank('/', base_url=base_url)
48
49
49 return pyramid_bootstrap(config_uri, request=request, options=options)
50 return pyramid_bootstrap(config_uri, request=request, options=options)
51
52
53 def prepare_request(environ):
54 request = Request.blank('/', environ=environ)
55 prepare(request) # set pyramid threadlocal request
56 return request
@@ -1,882 +1,802 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Utilities library for RhodeCode
22 Utilities library for RhodeCode
23 """
23 """
24
24
25 import datetime
25 import datetime
26 import decorator
26 import decorator
27 import json
27 import json
28 import logging
28 import logging
29 import os
29 import os
30 import re
30 import re
31 import shutil
31 import shutil
32 import tempfile
32 import tempfile
33 import traceback
33 import traceback
34 import tarfile
34 import tarfile
35 import warnings
35 import warnings
36 import hashlib
36 import hashlib
37 from os.path import join as jn
37 from os.path import join as jn
38
38
39 import paste
39 import paste
40 import pkg_resources
40 import pkg_resources
41 from paste.script.command import Command, BadCommand
42 from webhelpers.text import collapse, remove_formatting, strip_tags
41 from webhelpers.text import collapse, remove_formatting, strip_tags
43 from mako import exceptions
42 from mako import exceptions
44 from pyramid.threadlocal import get_current_registry
43 from pyramid.threadlocal import get_current_registry
45 from pyramid.request import Request
44 from pyramid.request import Request
46
45
47 from rhodecode.lib.fakemod import create_module
46 from rhodecode.lib.fakemod import create_module
48 from rhodecode.lib.vcs.backends.base import Config
47 from rhodecode.lib.vcs.backends.base import Config
49 from rhodecode.lib.vcs.exceptions import VCSError
48 from rhodecode.lib.vcs.exceptions import VCSError
50 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
49 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
51 from rhodecode.lib.utils2 import (
50 from rhodecode.lib.utils2 import (
52 safe_str, safe_unicode, get_current_rhodecode_user, md5)
51 safe_str, safe_unicode, get_current_rhodecode_user, md5)
53 from rhodecode.model import meta
52 from rhodecode.model import meta
54 from rhodecode.model.db import (
53 from rhodecode.model.db import (
55 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
54 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
56 from rhodecode.model.meta import Session
55 from rhodecode.model.meta import Session
57
56
58
57
59 log = logging.getLogger(__name__)
58 log = logging.getLogger(__name__)
60
59
61 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
60 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
62
61
63 # String which contains characters that are not allowed in slug names for
62 # String which contains characters that are not allowed in slug names for
64 # repositories or repository groups. It is properly escaped to use it in
63 # repositories or repository groups. It is properly escaped to use it in
65 # regular expressions.
64 # regular expressions.
66 SLUG_BAD_CHARS = re.escape('`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
65 SLUG_BAD_CHARS = re.escape('`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
67
66
68 # Regex that matches forbidden characters in repo/group slugs.
67 # Regex that matches forbidden characters in repo/group slugs.
69 SLUG_BAD_CHAR_RE = re.compile('[{}]'.format(SLUG_BAD_CHARS))
68 SLUG_BAD_CHAR_RE = re.compile('[{}]'.format(SLUG_BAD_CHARS))
70
69
71 # Regex that matches allowed characters in repo/group slugs.
70 # Regex that matches allowed characters in repo/group slugs.
72 SLUG_GOOD_CHAR_RE = re.compile('[^{}]'.format(SLUG_BAD_CHARS))
71 SLUG_GOOD_CHAR_RE = re.compile('[^{}]'.format(SLUG_BAD_CHARS))
73
72
74 # Regex that matches whole repo/group slugs.
73 # Regex that matches whole repo/group slugs.
75 SLUG_RE = re.compile('[^{}]+'.format(SLUG_BAD_CHARS))
74 SLUG_RE = re.compile('[^{}]+'.format(SLUG_BAD_CHARS))
76
75
77 _license_cache = None
76 _license_cache = None
78
77
79
78
80 def repo_name_slug(value):
79 def repo_name_slug(value):
81 """
80 """
82 Return slug of name of repository
81 Return slug of name of repository
83 This function is called on each creation/modification
82 This function is called on each creation/modification
84 of repository to prevent bad names in repo
83 of repository to prevent bad names in repo
85 """
84 """
86 replacement_char = '-'
85 replacement_char = '-'
87
86
88 slug = remove_formatting(value)
87 slug = remove_formatting(value)
89 slug = SLUG_BAD_CHAR_RE.sub('', slug)
88 slug = SLUG_BAD_CHAR_RE.sub('', slug)
90 slug = re.sub('[\s]+', '-', slug)
89 slug = re.sub('[\s]+', '-', slug)
91 slug = collapse(slug, replacement_char)
90 slug = collapse(slug, replacement_char)
92 return slug
91 return slug
93
92
94
93
95 #==============================================================================
94 #==============================================================================
96 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
95 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
97 #==============================================================================
96 #==============================================================================
98 def get_repo_slug(request):
97 def get_repo_slug(request):
99 _repo = ''
98 _repo = ''
100
99
101 if hasattr(request, 'db_repo'):
100 if hasattr(request, 'db_repo'):
102 # if our requests has set db reference use it for name, this
101 # if our requests has set db reference use it for name, this
103 # translates the example.com/_<id> into proper repo names
102 # translates the example.com/_<id> into proper repo names
104 _repo = request.db_repo.repo_name
103 _repo = request.db_repo.repo_name
105 elif getattr(request, 'matchdict', None):
104 elif getattr(request, 'matchdict', None):
106 # pyramid
105 # pyramid
107 _repo = request.matchdict.get('repo_name')
106 _repo = request.matchdict.get('repo_name')
108
107
109 if _repo:
108 if _repo:
110 _repo = _repo.rstrip('/')
109 _repo = _repo.rstrip('/')
111 return _repo
110 return _repo
112
111
113
112
114 def get_repo_group_slug(request):
113 def get_repo_group_slug(request):
115 _group = ''
114 _group = ''
116 if hasattr(request, 'db_repo_group'):
115 if hasattr(request, 'db_repo_group'):
117 # if our requests has set db reference use it for name, this
116 # if our requests has set db reference use it for name, this
118 # translates the example.com/_<id> into proper repo group names
117 # translates the example.com/_<id> into proper repo group names
119 _group = request.db_repo_group.group_name
118 _group = request.db_repo_group.group_name
120 elif getattr(request, 'matchdict', None):
119 elif getattr(request, 'matchdict', None):
121 # pyramid
120 # pyramid
122 _group = request.matchdict.get('repo_group_name')
121 _group = request.matchdict.get('repo_group_name')
123
122
124
123
125 if _group:
124 if _group:
126 _group = _group.rstrip('/')
125 _group = _group.rstrip('/')
127 return _group
126 return _group
128
127
129
128
130 def get_user_group_slug(request):
129 def get_user_group_slug(request):
131 _user_group = ''
130 _user_group = ''
132
131
133 if hasattr(request, 'db_user_group'):
132 if hasattr(request, 'db_user_group'):
134 _user_group = request.db_user_group.users_group_name
133 _user_group = request.db_user_group.users_group_name
135 elif getattr(request, 'matchdict', None):
134 elif getattr(request, 'matchdict', None):
136 # pyramid
135 # pyramid
137 _user_group = request.matchdict.get('user_group_id')
136 _user_group = request.matchdict.get('user_group_id')
138
137
139 try:
138 try:
140 _user_group = UserGroup.get(_user_group)
139 _user_group = UserGroup.get(_user_group)
141 if _user_group:
140 if _user_group:
142 _user_group = _user_group.users_group_name
141 _user_group = _user_group.users_group_name
143 except Exception:
142 except Exception:
144 log.exception('Failed to get user group by id')
143 log.exception('Failed to get user group by id')
145 # catch all failures here
144 # catch all failures here
146 return None
145 return None
147
146
148 return _user_group
147 return _user_group
149
148
150
149
151 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
150 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
152 """
151 """
153 Scans given path for repos and return (name,(type,path)) tuple
152 Scans given path for repos and return (name,(type,path)) tuple
154
153
155 :param path: path to scan for repositories
154 :param path: path to scan for repositories
156 :param recursive: recursive search and return names with subdirs in front
155 :param recursive: recursive search and return names with subdirs in front
157 """
156 """
158
157
159 # remove ending slash for better results
158 # remove ending slash for better results
160 path = path.rstrip(os.sep)
159 path = path.rstrip(os.sep)
161 log.debug('now scanning in %s location recursive:%s...', path, recursive)
160 log.debug('now scanning in %s location recursive:%s...', path, recursive)
162
161
163 def _get_repos(p):
162 def _get_repos(p):
164 dirpaths = _get_dirpaths(p)
163 dirpaths = _get_dirpaths(p)
165 if not _is_dir_writable(p):
164 if not _is_dir_writable(p):
166 log.warning('repo path without write access: %s', p)
165 log.warning('repo path without write access: %s', p)
167
166
168 for dirpath in dirpaths:
167 for dirpath in dirpaths:
169 if os.path.isfile(os.path.join(p, dirpath)):
168 if os.path.isfile(os.path.join(p, dirpath)):
170 continue
169 continue
171 cur_path = os.path.join(p, dirpath)
170 cur_path = os.path.join(p, dirpath)
172
171
173 # skip removed repos
172 # skip removed repos
174 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
173 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
175 continue
174 continue
176
175
177 #skip .<somethin> dirs
176 #skip .<somethin> dirs
178 if dirpath.startswith('.'):
177 if dirpath.startswith('.'):
179 continue
178 continue
180
179
181 try:
180 try:
182 scm_info = get_scm(cur_path)
181 scm_info = get_scm(cur_path)
183 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
182 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
184 except VCSError:
183 except VCSError:
185 if not recursive:
184 if not recursive:
186 continue
185 continue
187 #check if this dir containts other repos for recursive scan
186 #check if this dir containts other repos for recursive scan
188 rec_path = os.path.join(p, dirpath)
187 rec_path = os.path.join(p, dirpath)
189 if os.path.isdir(rec_path):
188 if os.path.isdir(rec_path):
190 for inner_scm in _get_repos(rec_path):
189 for inner_scm in _get_repos(rec_path):
191 yield inner_scm
190 yield inner_scm
192
191
193 return _get_repos(path)
192 return _get_repos(path)
194
193
195
194
196 def _get_dirpaths(p):
195 def _get_dirpaths(p):
197 try:
196 try:
198 # OS-independable way of checking if we have at least read-only
197 # OS-independable way of checking if we have at least read-only
199 # access or not.
198 # access or not.
200 dirpaths = os.listdir(p)
199 dirpaths = os.listdir(p)
201 except OSError:
200 except OSError:
202 log.warning('ignoring repo path without read access: %s', p)
201 log.warning('ignoring repo path without read access: %s', p)
203 return []
202 return []
204
203
205 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
204 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
206 # decode paths and suddenly returns unicode objects itself. The items it
205 # decode paths and suddenly returns unicode objects itself. The items it
207 # cannot decode are returned as strings and cause issues.
206 # cannot decode are returned as strings and cause issues.
208 #
207 #
209 # Those paths are ignored here until a solid solution for path handling has
208 # Those paths are ignored here until a solid solution for path handling has
210 # been built.
209 # been built.
211 expected_type = type(p)
210 expected_type = type(p)
212
211
213 def _has_correct_type(item):
212 def _has_correct_type(item):
214 if type(item) is not expected_type:
213 if type(item) is not expected_type:
215 log.error(
214 log.error(
216 u"Ignoring path %s since it cannot be decoded into unicode.",
215 u"Ignoring path %s since it cannot be decoded into unicode.",
217 # Using "repr" to make sure that we see the byte value in case
216 # Using "repr" to make sure that we see the byte value in case
218 # of support.
217 # of support.
219 repr(item))
218 repr(item))
220 return False
219 return False
221 return True
220 return True
222
221
223 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
222 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
224
223
225 return dirpaths
224 return dirpaths
226
225
227
226
228 def _is_dir_writable(path):
227 def _is_dir_writable(path):
229 """
228 """
230 Probe if `path` is writable.
229 Probe if `path` is writable.
231
230
232 Due to trouble on Cygwin / Windows, this is actually probing if it is
231 Due to trouble on Cygwin / Windows, this is actually probing if it is
233 possible to create a file inside of `path`, stat does not produce reliable
232 possible to create a file inside of `path`, stat does not produce reliable
234 results in this case.
233 results in this case.
235 """
234 """
236 try:
235 try:
237 with tempfile.TemporaryFile(dir=path):
236 with tempfile.TemporaryFile(dir=path):
238 pass
237 pass
239 except OSError:
238 except OSError:
240 return False
239 return False
241 return True
240 return True
242
241
243
242
244 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None):
243 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None):
245 """
244 """
246 Returns True if given path is a valid repository False otherwise.
245 Returns True if given path is a valid repository False otherwise.
247 If expect_scm param is given also, compare if given scm is the same
246 If expect_scm param is given also, compare if given scm is the same
248 as expected from scm parameter. If explicit_scm is given don't try to
247 as expected from scm parameter. If explicit_scm is given don't try to
249 detect the scm, just use the given one to check if repo is valid
248 detect the scm, just use the given one to check if repo is valid
250
249
251 :param repo_name:
250 :param repo_name:
252 :param base_path:
251 :param base_path:
253 :param expect_scm:
252 :param expect_scm:
254 :param explicit_scm:
253 :param explicit_scm:
255
254
256 :return True: if given path is a valid repository
255 :return True: if given path is a valid repository
257 """
256 """
258 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
257 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
259 log.debug('Checking if `%s` is a valid path for repository. '
258 log.debug('Checking if `%s` is a valid path for repository. '
260 'Explicit type: %s', repo_name, explicit_scm)
259 'Explicit type: %s', repo_name, explicit_scm)
261
260
262 try:
261 try:
263 if explicit_scm:
262 if explicit_scm:
264 detected_scms = [get_scm_backend(explicit_scm)]
263 detected_scms = [get_scm_backend(explicit_scm)]
265 else:
264 else:
266 detected_scms = get_scm(full_path)
265 detected_scms = get_scm(full_path)
267
266
268 if expect_scm:
267 if expect_scm:
269 return detected_scms[0] == expect_scm
268 return detected_scms[0] == expect_scm
270 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
269 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
271 return True
270 return True
272 except VCSError:
271 except VCSError:
273 log.debug('path: %s is not a valid repo !', full_path)
272 log.debug('path: %s is not a valid repo !', full_path)
274 return False
273 return False
275
274
276
275
277 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
276 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
278 """
277 """
279 Returns True if given path is a repository group, False otherwise
278 Returns True if given path is a repository group, False otherwise
280
279
281 :param repo_name:
280 :param repo_name:
282 :param base_path:
281 :param base_path:
283 """
282 """
284 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
283 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
285 log.debug('Checking if `%s` is a valid path for repository group',
284 log.debug('Checking if `%s` is a valid path for repository group',
286 repo_group_name)
285 repo_group_name)
287
286
288 # check if it's not a repo
287 # check if it's not a repo
289 if is_valid_repo(repo_group_name, base_path):
288 if is_valid_repo(repo_group_name, base_path):
290 log.debug('Repo called %s exist, it is not a valid '
289 log.debug('Repo called %s exist, it is not a valid '
291 'repo group' % repo_group_name)
290 'repo group' % repo_group_name)
292 return False
291 return False
293
292
294 try:
293 try:
295 # we need to check bare git repos at higher level
294 # we need to check bare git repos at higher level
296 # since we might match branches/hooks/info/objects or possible
295 # since we might match branches/hooks/info/objects or possible
297 # other things inside bare git repo
296 # other things inside bare git repo
298 scm_ = get_scm(os.path.dirname(full_path))
297 scm_ = get_scm(os.path.dirname(full_path))
299 log.debug('path: %s is a vcs object:%s, not valid '
298 log.debug('path: %s is a vcs object:%s, not valid '
300 'repo group' % (full_path, scm_))
299 'repo group' % (full_path, scm_))
301 return False
300 return False
302 except VCSError:
301 except VCSError:
303 pass
302 pass
304
303
305 # check if it's a valid path
304 # check if it's a valid path
306 if skip_path_check or os.path.isdir(full_path):
305 if skip_path_check or os.path.isdir(full_path):
307 log.debug('path: %s is a valid repo group !', full_path)
306 log.debug('path: %s is a valid repo group !', full_path)
308 return True
307 return True
309
308
310 log.debug('path: %s is not a valid repo group !', full_path)
309 log.debug('path: %s is not a valid repo group !', full_path)
311 return False
310 return False
312
311
313
312
314 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
313 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
315 while True:
314 while True:
316 ok = raw_input(prompt)
315 ok = raw_input(prompt)
317 if ok.lower() in ('y', 'ye', 'yes'):
316 if ok.lower() in ('y', 'ye', 'yes'):
318 return True
317 return True
319 if ok.lower() in ('n', 'no', 'nop', 'nope'):
318 if ok.lower() in ('n', 'no', 'nop', 'nope'):
320 return False
319 return False
321 retries = retries - 1
320 retries = retries - 1
322 if retries < 0:
321 if retries < 0:
323 raise IOError
322 raise IOError
324 print(complaint)
323 print(complaint)
325
324
326 # propagated from mercurial documentation
325 # propagated from mercurial documentation
327 ui_sections = [
326 ui_sections = [
328 'alias', 'auth',
327 'alias', 'auth',
329 'decode/encode', 'defaults',
328 'decode/encode', 'defaults',
330 'diff', 'email',
329 'diff', 'email',
331 'extensions', 'format',
330 'extensions', 'format',
332 'merge-patterns', 'merge-tools',
331 'merge-patterns', 'merge-tools',
333 'hooks', 'http_proxy',
332 'hooks', 'http_proxy',
334 'smtp', 'patch',
333 'smtp', 'patch',
335 'paths', 'profiling',
334 'paths', 'profiling',
336 'server', 'trusted',
335 'server', 'trusted',
337 'ui', 'web', ]
336 'ui', 'web', ]
338
337
339
338
340 def config_data_from_db(clear_session=True, repo=None):
339 def config_data_from_db(clear_session=True, repo=None):
341 """
340 """
342 Read the configuration data from the database and return configuration
341 Read the configuration data from the database and return configuration
343 tuples.
342 tuples.
344 """
343 """
345 from rhodecode.model.settings import VcsSettingsModel
344 from rhodecode.model.settings import VcsSettingsModel
346
345
347 config = []
346 config = []
348
347
349 sa = meta.Session()
348 sa = meta.Session()
350 settings_model = VcsSettingsModel(repo=repo, sa=sa)
349 settings_model = VcsSettingsModel(repo=repo, sa=sa)
351
350
352 ui_settings = settings_model.get_ui_settings()
351 ui_settings = settings_model.get_ui_settings()
353
352
354 for setting in ui_settings:
353 for setting in ui_settings:
355 if setting.active:
354 if setting.active:
356 log.debug(
355 log.debug(
357 'settings ui from db: [%s] %s=%s',
356 'settings ui from db: [%s] %s=%s',
358 setting.section, setting.key, setting.value)
357 setting.section, setting.key, setting.value)
359 config.append((
358 config.append((
360 safe_str(setting.section), safe_str(setting.key),
359 safe_str(setting.section), safe_str(setting.key),
361 safe_str(setting.value)))
360 safe_str(setting.value)))
362 if setting.key == 'push_ssl':
361 if setting.key == 'push_ssl':
363 # force set push_ssl requirement to False, rhodecode
362 # force set push_ssl requirement to False, rhodecode
364 # handles that
363 # handles that
365 config.append((
364 config.append((
366 safe_str(setting.section), safe_str(setting.key), False))
365 safe_str(setting.section), safe_str(setting.key), False))
367 if clear_session:
366 if clear_session:
368 meta.Session.remove()
367 meta.Session.remove()
369
368
370 # TODO: mikhail: probably it makes no sense to re-read hooks information.
369 # TODO: mikhail: probably it makes no sense to re-read hooks information.
371 # It's already there and activated/deactivated
370 # It's already there and activated/deactivated
372 skip_entries = []
371 skip_entries = []
373 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
372 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
374 if 'pull' not in enabled_hook_classes:
373 if 'pull' not in enabled_hook_classes:
375 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
374 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
376 if 'push' not in enabled_hook_classes:
375 if 'push' not in enabled_hook_classes:
377 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
376 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
378 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH))
377 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH))
379 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY))
378 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY))
380
379
381 config = [entry for entry in config if entry[:2] not in skip_entries]
380 config = [entry for entry in config if entry[:2] not in skip_entries]
382
381
383 return config
382 return config
384
383
385
384
386 def make_db_config(clear_session=True, repo=None):
385 def make_db_config(clear_session=True, repo=None):
387 """
386 """
388 Create a :class:`Config` instance based on the values in the database.
387 Create a :class:`Config` instance based on the values in the database.
389 """
388 """
390 config = Config()
389 config = Config()
391 config_data = config_data_from_db(clear_session=clear_session, repo=repo)
390 config_data = config_data_from_db(clear_session=clear_session, repo=repo)
392 for section, option, value in config_data:
391 for section, option, value in config_data:
393 config.set(section, option, value)
392 config.set(section, option, value)
394 return config
393 return config
395
394
396
395
397 def get_enabled_hook_classes(ui_settings):
396 def get_enabled_hook_classes(ui_settings):
398 """
397 """
399 Return the enabled hook classes.
398 Return the enabled hook classes.
400
399
401 :param ui_settings: List of ui_settings as returned
400 :param ui_settings: List of ui_settings as returned
402 by :meth:`VcsSettingsModel.get_ui_settings`
401 by :meth:`VcsSettingsModel.get_ui_settings`
403
402
404 :return: a list with the enabled hook classes. The order is not guaranteed.
403 :return: a list with the enabled hook classes. The order is not guaranteed.
405 :rtype: list
404 :rtype: list
406 """
405 """
407 enabled_hooks = []
406 enabled_hooks = []
408 active_hook_keys = [
407 active_hook_keys = [
409 key for section, key, value, active in ui_settings
408 key for section, key, value, active in ui_settings
410 if section == 'hooks' and active]
409 if section == 'hooks' and active]
411
410
412 hook_names = {
411 hook_names = {
413 RhodeCodeUi.HOOK_PUSH: 'push',
412 RhodeCodeUi.HOOK_PUSH: 'push',
414 RhodeCodeUi.HOOK_PULL: 'pull',
413 RhodeCodeUi.HOOK_PULL: 'pull',
415 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
414 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
416 }
415 }
417
416
418 for key in active_hook_keys:
417 for key in active_hook_keys:
419 hook = hook_names.get(key)
418 hook = hook_names.get(key)
420 if hook:
419 if hook:
421 enabled_hooks.append(hook)
420 enabled_hooks.append(hook)
422
421
423 return enabled_hooks
422 return enabled_hooks
424
423
425
424
426 def set_rhodecode_config(config):
425 def set_rhodecode_config(config):
427 """
426 """
428 Updates pyramid config with new settings from database
427 Updates pyramid config with new settings from database
429
428
430 :param config:
429 :param config:
431 """
430 """
432 from rhodecode.model.settings import SettingsModel
431 from rhodecode.model.settings import SettingsModel
433 app_settings = SettingsModel().get_all_settings()
432 app_settings = SettingsModel().get_all_settings()
434
433
435 for k, v in app_settings.items():
434 for k, v in app_settings.items():
436 config[k] = v
435 config[k] = v
437
436
438
437
439 def get_rhodecode_realm():
438 def get_rhodecode_realm():
440 """
439 """
441 Return the rhodecode realm from database.
440 Return the rhodecode realm from database.
442 """
441 """
443 from rhodecode.model.settings import SettingsModel
442 from rhodecode.model.settings import SettingsModel
444 realm = SettingsModel().get_setting_by_name('realm')
443 realm = SettingsModel().get_setting_by_name('realm')
445 return safe_str(realm.app_settings_value)
444 return safe_str(realm.app_settings_value)
446
445
447
446
448 def get_rhodecode_base_path():
447 def get_rhodecode_base_path():
449 """
448 """
450 Returns the base path. The base path is the filesystem path which points
449 Returns the base path. The base path is the filesystem path which points
451 to the repository store.
450 to the repository store.
452 """
451 """
453 from rhodecode.model.settings import SettingsModel
452 from rhodecode.model.settings import SettingsModel
454 paths_ui = SettingsModel().get_ui_by_section_and_key('paths', '/')
453 paths_ui = SettingsModel().get_ui_by_section_and_key('paths', '/')
455 return safe_str(paths_ui.ui_value)
454 return safe_str(paths_ui.ui_value)
456
455
457
456
458 def map_groups(path):
457 def map_groups(path):
459 """
458 """
460 Given a full path to a repository, create all nested groups that this
459 Given a full path to a repository, create all nested groups that this
461 repo is inside. This function creates parent-child relationships between
460 repo is inside. This function creates parent-child relationships between
462 groups and creates default perms for all new groups.
461 groups and creates default perms for all new groups.
463
462
464 :param paths: full path to repository
463 :param paths: full path to repository
465 """
464 """
466 from rhodecode.model.repo_group import RepoGroupModel
465 from rhodecode.model.repo_group import RepoGroupModel
467 sa = meta.Session()
466 sa = meta.Session()
468 groups = path.split(Repository.NAME_SEP)
467 groups = path.split(Repository.NAME_SEP)
469 parent = None
468 parent = None
470 group = None
469 group = None
471
470
472 # last element is repo in nested groups structure
471 # last element is repo in nested groups structure
473 groups = groups[:-1]
472 groups = groups[:-1]
474 rgm = RepoGroupModel(sa)
473 rgm = RepoGroupModel(sa)
475 owner = User.get_first_super_admin()
474 owner = User.get_first_super_admin()
476 for lvl, group_name in enumerate(groups):
475 for lvl, group_name in enumerate(groups):
477 group_name = '/'.join(groups[:lvl] + [group_name])
476 group_name = '/'.join(groups[:lvl] + [group_name])
478 group = RepoGroup.get_by_group_name(group_name)
477 group = RepoGroup.get_by_group_name(group_name)
479 desc = '%s group' % group_name
478 desc = '%s group' % group_name
480
479
481 # skip folders that are now removed repos
480 # skip folders that are now removed repos
482 if REMOVED_REPO_PAT.match(group_name):
481 if REMOVED_REPO_PAT.match(group_name):
483 break
482 break
484
483
485 if group is None:
484 if group is None:
486 log.debug('creating group level: %s group_name: %s',
485 log.debug('creating group level: %s group_name: %s',
487 lvl, group_name)
486 lvl, group_name)
488 group = RepoGroup(group_name, parent)
487 group = RepoGroup(group_name, parent)
489 group.group_description = desc
488 group.group_description = desc
490 group.user = owner
489 group.user = owner
491 sa.add(group)
490 sa.add(group)
492 perm_obj = rgm._create_default_perms(group)
491 perm_obj = rgm._create_default_perms(group)
493 sa.add(perm_obj)
492 sa.add(perm_obj)
494 sa.flush()
493 sa.flush()
495
494
496 parent = group
495 parent = group
497 return group
496 return group
498
497
499
498
500 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
499 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
501 """
500 """
502 maps all repos given in initial_repo_list, non existing repositories
501 maps all repos given in initial_repo_list, non existing repositories
503 are created, if remove_obsolete is True it also checks for db entries
502 are created, if remove_obsolete is True it also checks for db entries
504 that are not in initial_repo_list and removes them.
503 that are not in initial_repo_list and removes them.
505
504
506 :param initial_repo_list: list of repositories found by scanning methods
505 :param initial_repo_list: list of repositories found by scanning methods
507 :param remove_obsolete: check for obsolete entries in database
506 :param remove_obsolete: check for obsolete entries in database
508 """
507 """
509 from rhodecode.model.repo import RepoModel
508 from rhodecode.model.repo import RepoModel
510 from rhodecode.model.scm import ScmModel
509 from rhodecode.model.scm import ScmModel
511 from rhodecode.model.repo_group import RepoGroupModel
510 from rhodecode.model.repo_group import RepoGroupModel
512 from rhodecode.model.settings import SettingsModel
511 from rhodecode.model.settings import SettingsModel
513
512
514 sa = meta.Session()
513 sa = meta.Session()
515 repo_model = RepoModel()
514 repo_model = RepoModel()
516 user = User.get_first_super_admin()
515 user = User.get_first_super_admin()
517 added = []
516 added = []
518
517
519 # creation defaults
518 # creation defaults
520 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
519 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
521 enable_statistics = defs.get('repo_enable_statistics')
520 enable_statistics = defs.get('repo_enable_statistics')
522 enable_locking = defs.get('repo_enable_locking')
521 enable_locking = defs.get('repo_enable_locking')
523 enable_downloads = defs.get('repo_enable_downloads')
522 enable_downloads = defs.get('repo_enable_downloads')
524 private = defs.get('repo_private')
523 private = defs.get('repo_private')
525
524
526 for name, repo in initial_repo_list.items():
525 for name, repo in initial_repo_list.items():
527 group = map_groups(name)
526 group = map_groups(name)
528 unicode_name = safe_unicode(name)
527 unicode_name = safe_unicode(name)
529 db_repo = repo_model.get_by_repo_name(unicode_name)
528 db_repo = repo_model.get_by_repo_name(unicode_name)
530 # found repo that is on filesystem not in RhodeCode database
529 # found repo that is on filesystem not in RhodeCode database
531 if not db_repo:
530 if not db_repo:
532 log.info('repository %s not found, creating now', name)
531 log.info('repository %s not found, creating now', name)
533 added.append(name)
532 added.append(name)
534 desc = (repo.description
533 desc = (repo.description
535 if repo.description != 'unknown'
534 if repo.description != 'unknown'
536 else '%s repository' % name)
535 else '%s repository' % name)
537
536
538 db_repo = repo_model._create_repo(
537 db_repo = repo_model._create_repo(
539 repo_name=name,
538 repo_name=name,
540 repo_type=repo.alias,
539 repo_type=repo.alias,
541 description=desc,
540 description=desc,
542 repo_group=getattr(group, 'group_id', None),
541 repo_group=getattr(group, 'group_id', None),
543 owner=user,
542 owner=user,
544 enable_locking=enable_locking,
543 enable_locking=enable_locking,
545 enable_downloads=enable_downloads,
544 enable_downloads=enable_downloads,
546 enable_statistics=enable_statistics,
545 enable_statistics=enable_statistics,
547 private=private,
546 private=private,
548 state=Repository.STATE_CREATED
547 state=Repository.STATE_CREATED
549 )
548 )
550 sa.commit()
549 sa.commit()
551 # we added that repo just now, and make sure we updated server info
550 # we added that repo just now, and make sure we updated server info
552 if db_repo.repo_type == 'git':
551 if db_repo.repo_type == 'git':
553 git_repo = db_repo.scm_instance()
552 git_repo = db_repo.scm_instance()
554 # update repository server-info
553 # update repository server-info
555 log.debug('Running update server info')
554 log.debug('Running update server info')
556 git_repo._update_server_info()
555 git_repo._update_server_info()
557
556
558 db_repo.update_commit_cache()
557 db_repo.update_commit_cache()
559
558
560 config = db_repo._config
559 config = db_repo._config
561 config.set('extensions', 'largefiles', '')
560 config.set('extensions', 'largefiles', '')
562 ScmModel().install_hooks(
561 ScmModel().install_hooks(
563 db_repo.scm_instance(config=config),
562 db_repo.scm_instance(config=config),
564 repo_type=db_repo.repo_type)
563 repo_type=db_repo.repo_type)
565
564
566 removed = []
565 removed = []
567 if remove_obsolete:
566 if remove_obsolete:
568 # remove from database those repositories that are not in the filesystem
567 # remove from database those repositories that are not in the filesystem
569 for repo in sa.query(Repository).all():
568 for repo in sa.query(Repository).all():
570 if repo.repo_name not in initial_repo_list.keys():
569 if repo.repo_name not in initial_repo_list.keys():
571 log.debug("Removing non-existing repository found in db `%s`",
570 log.debug("Removing non-existing repository found in db `%s`",
572 repo.repo_name)
571 repo.repo_name)
573 try:
572 try:
574 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
573 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
575 sa.commit()
574 sa.commit()
576 removed.append(repo.repo_name)
575 removed.append(repo.repo_name)
577 except Exception:
576 except Exception:
578 # don't hold further removals on error
577 # don't hold further removals on error
579 log.error(traceback.format_exc())
578 log.error(traceback.format_exc())
580 sa.rollback()
579 sa.rollback()
581
580
582 def splitter(full_repo_name):
581 def splitter(full_repo_name):
583 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
582 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
584 gr_name = None
583 gr_name = None
585 if len(_parts) == 2:
584 if len(_parts) == 2:
586 gr_name = _parts[0]
585 gr_name = _parts[0]
587 return gr_name
586 return gr_name
588
587
589 initial_repo_group_list = [splitter(x) for x in
588 initial_repo_group_list = [splitter(x) for x in
590 initial_repo_list.keys() if splitter(x)]
589 initial_repo_list.keys() if splitter(x)]
591
590
592 # remove from database those repository groups that are not in the
591 # remove from database those repository groups that are not in the
593 # filesystem due to parent child relationships we need to delete them
592 # filesystem due to parent child relationships we need to delete them
594 # in a specific order of most nested first
593 # in a specific order of most nested first
595 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
594 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
596 nested_sort = lambda gr: len(gr.split('/'))
595 nested_sort = lambda gr: len(gr.split('/'))
597 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
596 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
598 if group_name not in initial_repo_group_list:
597 if group_name not in initial_repo_group_list:
599 repo_group = RepoGroup.get_by_group_name(group_name)
598 repo_group = RepoGroup.get_by_group_name(group_name)
600 if (repo_group.children.all() or
599 if (repo_group.children.all() or
601 not RepoGroupModel().check_exist_filesystem(
600 not RepoGroupModel().check_exist_filesystem(
602 group_name=group_name, exc_on_failure=False)):
601 group_name=group_name, exc_on_failure=False)):
603 continue
602 continue
604
603
605 log.info(
604 log.info(
606 'Removing non-existing repository group found in db `%s`',
605 'Removing non-existing repository group found in db `%s`',
607 group_name)
606 group_name)
608 try:
607 try:
609 RepoGroupModel(sa).delete(group_name, fs_remove=False)
608 RepoGroupModel(sa).delete(group_name, fs_remove=False)
610 sa.commit()
609 sa.commit()
611 removed.append(group_name)
610 removed.append(group_name)
612 except Exception:
611 except Exception:
613 # don't hold further removals on error
612 # don't hold further removals on error
614 log.exception(
613 log.exception(
615 'Unable to remove repository group `%s`',
614 'Unable to remove repository group `%s`',
616 group_name)
615 group_name)
617 sa.rollback()
616 sa.rollback()
618 raise
617 raise
619
618
620 return added, removed
619 return added, removed
621
620
622
621
623 def get_default_cache_settings(settings):
622 def get_default_cache_settings(settings):
624 cache_settings = {}
623 cache_settings = {}
625 for key in settings.keys():
624 for key in settings.keys():
626 for prefix in ['beaker.cache.', 'cache.']:
625 for prefix in ['beaker.cache.', 'cache.']:
627 if key.startswith(prefix):
626 if key.startswith(prefix):
628 name = key.split(prefix)[1].strip()
627 name = key.split(prefix)[1].strip()
629 cache_settings[name] = settings[key].strip()
628 cache_settings[name] = settings[key].strip()
630 return cache_settings
629 return cache_settings
631
630
632
631
633 # set cache regions for beaker so celery can utilise it
632 # set cache regions for beaker so celery can utilise it
634 def add_cache(settings):
633 def add_cache(settings):
635 from rhodecode.lib import caches
634 from rhodecode.lib import caches
636 cache_settings = {'regions': None}
635 cache_settings = {'regions': None}
637 # main cache settings used as default ...
636 # main cache settings used as default ...
638 cache_settings.update(get_default_cache_settings(settings))
637 cache_settings.update(get_default_cache_settings(settings))
639
638
640 if cache_settings['regions']:
639 if cache_settings['regions']:
641 for region in cache_settings['regions'].split(','):
640 for region in cache_settings['regions'].split(','):
642 region = region.strip()
641 region = region.strip()
643 region_settings = {}
642 region_settings = {}
644 for key, value in cache_settings.items():
643 for key, value in cache_settings.items():
645 if key.startswith(region):
644 if key.startswith(region):
646 region_settings[key.split('.')[1]] = value
645 region_settings[key.split('.')[1]] = value
647
646
648 caches.configure_cache_region(
647 caches.configure_cache_region(
649 region, region_settings, cache_settings)
648 region, region_settings, cache_settings)
650
649
651
650
652 def load_rcextensions(root_path):
651 def load_rcextensions(root_path):
653 import rhodecode
652 import rhodecode
654 from rhodecode.config import conf
653 from rhodecode.config import conf
655
654
656 path = os.path.join(root_path, 'rcextensions', '__init__.py')
655 path = os.path.join(root_path, 'rcextensions', '__init__.py')
657 if os.path.isfile(path):
656 if os.path.isfile(path):
658 rcext = create_module('rc', path)
657 rcext = create_module('rc', path)
659 EXT = rhodecode.EXTENSIONS = rcext
658 EXT = rhodecode.EXTENSIONS = rcext
660 log.debug('Found rcextensions now loading %s...', rcext)
659 log.debug('Found rcextensions now loading %s...', rcext)
661
660
662 # Additional mappings that are not present in the pygments lexers
661 # Additional mappings that are not present in the pygments lexers
663 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
662 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
664
663
665 # auto check if the module is not missing any data, set to default if is
664 # auto check if the module is not missing any data, set to default if is
666 # this will help autoupdate new feature of rcext module
665 # this will help autoupdate new feature of rcext module
667 #from rhodecode.config import rcextensions
666 #from rhodecode.config import rcextensions
668 #for k in dir(rcextensions):
667 #for k in dir(rcextensions):
669 # if not k.startswith('_') and not hasattr(EXT, k):
668 # if not k.startswith('_') and not hasattr(EXT, k):
670 # setattr(EXT, k, getattr(rcextensions, k))
669 # setattr(EXT, k, getattr(rcextensions, k))
671
670
672
671
673 def get_custom_lexer(extension):
672 def get_custom_lexer(extension):
674 """
673 """
675 returns a custom lexer if it is defined in rcextensions module, or None
674 returns a custom lexer if it is defined in rcextensions module, or None
676 if there's no custom lexer defined
675 if there's no custom lexer defined
677 """
676 """
678 import rhodecode
677 import rhodecode
679 from pygments import lexers
678 from pygments import lexers
680
679
681 # custom override made by RhodeCode
680 # custom override made by RhodeCode
682 if extension in ['mako']:
681 if extension in ['mako']:
683 return lexers.get_lexer_by_name('html+mako')
682 return lexers.get_lexer_by_name('html+mako')
684
683
685 # check if we didn't define this extension as other lexer
684 # check if we didn't define this extension as other lexer
686 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
685 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
687 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
686 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
688 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
687 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
689 return lexers.get_lexer_by_name(_lexer_name)
688 return lexers.get_lexer_by_name(_lexer_name)
690
689
691
690
692 #==============================================================================
691 #==============================================================================
693 # TEST FUNCTIONS AND CREATORS
692 # TEST FUNCTIONS AND CREATORS
694 #==============================================================================
693 #==============================================================================
695 def create_test_index(repo_location, config):
694 def create_test_index(repo_location, config):
696 """
695 """
697 Makes default test index.
696 Makes default test index.
698 """
697 """
699 import rc_testdata
698 import rc_testdata
700
699
701 rc_testdata.extract_search_index(
700 rc_testdata.extract_search_index(
702 'vcs_search_index', os.path.dirname(config['search.location']))
701 'vcs_search_index', os.path.dirname(config['search.location']))
703
702
704
703
705 def create_test_directory(test_path):
704 def create_test_directory(test_path):
706 """
705 """
707 Create test directory if it doesn't exist.
706 Create test directory if it doesn't exist.
708 """
707 """
709 if not os.path.isdir(test_path):
708 if not os.path.isdir(test_path):
710 log.debug('Creating testdir %s', test_path)
709 log.debug('Creating testdir %s', test_path)
711 os.makedirs(test_path)
710 os.makedirs(test_path)
712
711
713
712
714 def create_test_database(test_path, config):
713 def create_test_database(test_path, config):
715 """
714 """
716 Makes a fresh database.
715 Makes a fresh database.
717 """
716 """
718 from rhodecode.lib.db_manage import DbManage
717 from rhodecode.lib.db_manage import DbManage
719
718
720 # PART ONE create db
719 # PART ONE create db
721 dbconf = config['sqlalchemy.db1.url']
720 dbconf = config['sqlalchemy.db1.url']
722 log.debug('making test db %s', dbconf)
721 log.debug('making test db %s', dbconf)
723
722
724 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
723 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
725 tests=True, cli_args={'force_ask': True})
724 tests=True, cli_args={'force_ask': True})
726 dbmanage.create_tables(override=True)
725 dbmanage.create_tables(override=True)
727 dbmanage.set_db_version()
726 dbmanage.set_db_version()
728 # for tests dynamically set new root paths based on generated content
727 # for tests dynamically set new root paths based on generated content
729 dbmanage.create_settings(dbmanage.config_prompt(test_path))
728 dbmanage.create_settings(dbmanage.config_prompt(test_path))
730 dbmanage.create_default_user()
729 dbmanage.create_default_user()
731 dbmanage.create_test_admin_and_users()
730 dbmanage.create_test_admin_and_users()
732 dbmanage.create_permissions()
731 dbmanage.create_permissions()
733 dbmanage.populate_default_permissions()
732 dbmanage.populate_default_permissions()
734 Session().commit()
733 Session().commit()
735
734
736
735
737 def create_test_repositories(test_path, config):
736 def create_test_repositories(test_path, config):
738 """
737 """
739 Creates test repositories in the temporary directory. Repositories are
738 Creates test repositories in the temporary directory. Repositories are
740 extracted from archives within the rc_testdata package.
739 extracted from archives within the rc_testdata package.
741 """
740 """
742 import rc_testdata
741 import rc_testdata
743 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
742 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
744
743
745 log.debug('making test vcs repositories')
744 log.debug('making test vcs repositories')
746
745
747 idx_path = config['search.location']
746 idx_path = config['search.location']
748 data_path = config['cache_dir']
747 data_path = config['cache_dir']
749
748
750 # clean index and data
749 # clean index and data
751 if idx_path and os.path.exists(idx_path):
750 if idx_path and os.path.exists(idx_path):
752 log.debug('remove %s', idx_path)
751 log.debug('remove %s', idx_path)
753 shutil.rmtree(idx_path)
752 shutil.rmtree(idx_path)
754
753
755 if data_path and os.path.exists(data_path):
754 if data_path and os.path.exists(data_path):
756 log.debug('remove %s', data_path)
755 log.debug('remove %s', data_path)
757 shutil.rmtree(data_path)
756 shutil.rmtree(data_path)
758
757
759 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
758 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
760 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
759 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
761
760
762 # Note: Subversion is in the process of being integrated with the system,
761 # Note: Subversion is in the process of being integrated with the system,
763 # until we have a properly packed version of the test svn repository, this
762 # until we have a properly packed version of the test svn repository, this
764 # tries to copy over the repo from a package "rc_testdata"
763 # tries to copy over the repo from a package "rc_testdata"
765 svn_repo_path = rc_testdata.get_svn_repo_archive()
764 svn_repo_path = rc_testdata.get_svn_repo_archive()
766 with tarfile.open(svn_repo_path) as tar:
765 with tarfile.open(svn_repo_path) as tar:
767 tar.extractall(jn(test_path, SVN_REPO))
766 tar.extractall(jn(test_path, SVN_REPO))
768
767
769
768
770 #==============================================================================
771 # PASTER COMMANDS
772 #==============================================================================
773 class BasePasterCommand(Command):
774 """
775 Abstract Base Class for paster commands.
776
777 The celery commands are somewhat aggressive about loading
778 celery.conf, and since our module sets the `CELERY_LOADER`
779 environment variable to our loader, we have to bootstrap a bit and
780 make sure we've had a chance to load the pylons config off of the
781 command line, otherwise everything fails.
782 """
783 min_args = 1
784 min_args_error = "Please provide a paster config file as an argument."
785 takes_config_file = 1
786 requires_config_file = True
787
788 def notify_msg(self, msg, log=False):
789 """Make a notification to user, additionally if logger is passed
790 it logs this action using given logger
791
792 :param msg: message that will be printed to user
793 :param log: logging instance, to use to additionally log this message
794
795 """
796 if log and isinstance(log, logging):
797 log(msg)
798
799 def run(self, args):
800 """
801 Overrides Command.run
802
803 Checks for a config file argument and loads it.
804 """
805 if len(args) < self.min_args:
806 raise BadCommand(
807 self.min_args_error % {'min_args': self.min_args,
808 'actual_args': len(args)})
809
810 # Decrement because we're going to lob off the first argument.
811 # @@ This is hacky
812 self.min_args -= 1
813 self.bootstrap_config(args[0])
814 self.update_parser()
815 return super(BasePasterCommand, self).run(args[1:])
816
817 def update_parser(self):
818 """
819 Abstract method. Allows for the class' parser to be updated
820 before the superclass' `run` method is called. Necessary to
821 allow options/arguments to be passed through to the underlying
822 celery command.
823 """
824 raise NotImplementedError("Abstract Method.")
825
826 def bootstrap_config(self, conf):
827 """
828 Loads the pylons configuration.
829 """
830 from pylons import config as pylonsconfig
831
832 self.path_to_ini_file = os.path.realpath(conf)
833 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
834 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
835
836 def _init_session(self):
837 """
838 Inits SqlAlchemy Session
839 """
840 logging.config.fileConfig(self.path_to_ini_file)
841 from pylons import config
842 from rhodecode.config.utils import initialize_database
843
844 # get to remove repos !!
845 add_cache(config)
846 initialize_database(config)
847
848
849 def password_changed(auth_user, session):
769 def password_changed(auth_user, session):
850 # Never report password change in case of default user or anonymous user.
770 # Never report password change in case of default user or anonymous user.
851 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
771 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
852 return False
772 return False
853
773
854 password_hash = md5(auth_user.password) if auth_user.password else None
774 password_hash = md5(auth_user.password) if auth_user.password else None
855 rhodecode_user = session.get('rhodecode_user', {})
775 rhodecode_user = session.get('rhodecode_user', {})
856 session_password_hash = rhodecode_user.get('password', '')
776 session_password_hash = rhodecode_user.get('password', '')
857 return password_hash != session_password_hash
777 return password_hash != session_password_hash
858
778
859
779
860 def read_opensource_licenses():
780 def read_opensource_licenses():
861 global _license_cache
781 global _license_cache
862
782
863 if not _license_cache:
783 if not _license_cache:
864 licenses = pkg_resources.resource_string(
784 licenses = pkg_resources.resource_string(
865 'rhodecode', 'config/licenses.json')
785 'rhodecode', 'config/licenses.json')
866 _license_cache = json.loads(licenses)
786 _license_cache = json.loads(licenses)
867
787
868 return _license_cache
788 return _license_cache
869
789
870
790
871 def generate_platform_uuid():
791 def generate_platform_uuid():
872 """
792 """
873 Generates platform UUID based on it's name
793 Generates platform UUID based on it's name
874 """
794 """
875 import platform
795 import platform
876
796
877 try:
797 try:
878 uuid_list = [platform.platform()]
798 uuid_list = [platform.platform()]
879 return hashlib.sha256(':'.join(uuid_list)).hexdigest()
799 return hashlib.sha256(':'.join(uuid_list)).hexdigest()
880 except Exception as e:
800 except Exception as e:
881 log.error('Failed to generate host uuid: %s' % e)
801 log.error('Failed to generate host uuid: %s' % e)
882 return 'UNDEFINED'
802 return 'UNDEFINED'
@@ -1,1007 +1,980 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2017 RhodeCode GmbH
3 # Copyright (C) 2011-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 Some simple helper functions
23 Some simple helper functions
24 """
24 """
25
25
26
27 import collections
26 import collections
28 import datetime
27 import datetime
29 import dateutil.relativedelta
28 import dateutil.relativedelta
30 import hashlib
29 import hashlib
31 import logging
30 import logging
32 import re
31 import re
33 import sys
32 import sys
34 import time
33 import time
35 import urllib
34 import urllib
36 import urlobject
35 import urlobject
37 import uuid
36 import uuid
38
37
39 import pygments.lexers
38 import pygments.lexers
40 import sqlalchemy
39 import sqlalchemy
41 import sqlalchemy.engine.url
40 import sqlalchemy.engine.url
42 import sqlalchemy.exc
41 import sqlalchemy.exc
43 import sqlalchemy.sql
42 import sqlalchemy.sql
44 import webob
43 import webob
45 import routes.util
46 import pyramid.threadlocal
44 import pyramid.threadlocal
47
45
48 import rhodecode
46 import rhodecode
49 from rhodecode.translation import _, _pluralize
47 from rhodecode.translation import _, _pluralize
50
48
51
49
52 def md5(s):
50 def md5(s):
53 return hashlib.md5(s).hexdigest()
51 return hashlib.md5(s).hexdigest()
54
52
55
53
56 def md5_safe(s):
54 def md5_safe(s):
57 return md5(safe_str(s))
55 return md5(safe_str(s))
58
56
59
57
60 def __get_lem(extra_mapping=None):
58 def __get_lem(extra_mapping=None):
61 """
59 """
62 Get language extension map based on what's inside pygments lexers
60 Get language extension map based on what's inside pygments lexers
63 """
61 """
64 d = collections.defaultdict(lambda: [])
62 d = collections.defaultdict(lambda: [])
65
63
66 def __clean(s):
64 def __clean(s):
67 s = s.lstrip('*')
65 s = s.lstrip('*')
68 s = s.lstrip('.')
66 s = s.lstrip('.')
69
67
70 if s.find('[') != -1:
68 if s.find('[') != -1:
71 exts = []
69 exts = []
72 start, stop = s.find('['), s.find(']')
70 start, stop = s.find('['), s.find(']')
73
71
74 for suffix in s[start + 1:stop]:
72 for suffix in s[start + 1:stop]:
75 exts.append(s[:s.find('[')] + suffix)
73 exts.append(s[:s.find('[')] + suffix)
76 return [e.lower() for e in exts]
74 return [e.lower() for e in exts]
77 else:
75 else:
78 return [s.lower()]
76 return [s.lower()]
79
77
80 for lx, t in sorted(pygments.lexers.LEXERS.items()):
78 for lx, t in sorted(pygments.lexers.LEXERS.items()):
81 m = map(__clean, t[-2])
79 m = map(__clean, t[-2])
82 if m:
80 if m:
83 m = reduce(lambda x, y: x + y, m)
81 m = reduce(lambda x, y: x + y, m)
84 for ext in m:
82 for ext in m:
85 desc = lx.replace('Lexer', '')
83 desc = lx.replace('Lexer', '')
86 d[ext].append(desc)
84 d[ext].append(desc)
87
85
88 data = dict(d)
86 data = dict(d)
89
87
90 extra_mapping = extra_mapping or {}
88 extra_mapping = extra_mapping or {}
91 if extra_mapping:
89 if extra_mapping:
92 for k, v in extra_mapping.items():
90 for k, v in extra_mapping.items():
93 if k not in data:
91 if k not in data:
94 # register new mapping2lexer
92 # register new mapping2lexer
95 data[k] = [v]
93 data[k] = [v]
96
94
97 return data
95 return data
98
96
99
97
100 def str2bool(_str):
98 def str2bool(_str):
101 """
99 """
102 returns True/False value from given string, it tries to translate the
100 returns True/False value from given string, it tries to translate the
103 string into boolean
101 string into boolean
104
102
105 :param _str: string value to translate into boolean
103 :param _str: string value to translate into boolean
106 :rtype: boolean
104 :rtype: boolean
107 :returns: boolean from given string
105 :returns: boolean from given string
108 """
106 """
109 if _str is None:
107 if _str is None:
110 return False
108 return False
111 if _str in (True, False):
109 if _str in (True, False):
112 return _str
110 return _str
113 _str = str(_str).strip().lower()
111 _str = str(_str).strip().lower()
114 return _str in ('t', 'true', 'y', 'yes', 'on', '1')
112 return _str in ('t', 'true', 'y', 'yes', 'on', '1')
115
113
116
114
117 def aslist(obj, sep=None, strip=True):
115 def aslist(obj, sep=None, strip=True):
118 """
116 """
119 Returns given string separated by sep as list
117 Returns given string separated by sep as list
120
118
121 :param obj:
119 :param obj:
122 :param sep:
120 :param sep:
123 :param strip:
121 :param strip:
124 """
122 """
125 if isinstance(obj, (basestring,)):
123 if isinstance(obj, (basestring,)):
126 lst = obj.split(sep)
124 lst = obj.split(sep)
127 if strip:
125 if strip:
128 lst = [v.strip() for v in lst]
126 lst = [v.strip() for v in lst]
129 return lst
127 return lst
130 elif isinstance(obj, (list, tuple)):
128 elif isinstance(obj, (list, tuple)):
131 return obj
129 return obj
132 elif obj is None:
130 elif obj is None:
133 return []
131 return []
134 else:
132 else:
135 return [obj]
133 return [obj]
136
134
137
135
138 def convert_line_endings(line, mode):
136 def convert_line_endings(line, mode):
139 """
137 """
140 Converts a given line "line end" accordingly to given mode
138 Converts a given line "line end" accordingly to given mode
141
139
142 Available modes are::
140 Available modes are::
143 0 - Unix
141 0 - Unix
144 1 - Mac
142 1 - Mac
145 2 - DOS
143 2 - DOS
146
144
147 :param line: given line to convert
145 :param line: given line to convert
148 :param mode: mode to convert to
146 :param mode: mode to convert to
149 :rtype: str
147 :rtype: str
150 :return: converted line according to mode
148 :return: converted line according to mode
151 """
149 """
152 if mode == 0:
150 if mode == 0:
153 line = line.replace('\r\n', '\n')
151 line = line.replace('\r\n', '\n')
154 line = line.replace('\r', '\n')
152 line = line.replace('\r', '\n')
155 elif mode == 1:
153 elif mode == 1:
156 line = line.replace('\r\n', '\r')
154 line = line.replace('\r\n', '\r')
157 line = line.replace('\n', '\r')
155 line = line.replace('\n', '\r')
158 elif mode == 2:
156 elif mode == 2:
159 line = re.sub('\r(?!\n)|(?<!\r)\n', '\r\n', line)
157 line = re.sub('\r(?!\n)|(?<!\r)\n', '\r\n', line)
160 return line
158 return line
161
159
162
160
163 def detect_mode(line, default):
161 def detect_mode(line, default):
164 """
162 """
165 Detects line break for given line, if line break couldn't be found
163 Detects line break for given line, if line break couldn't be found
166 given default value is returned
164 given default value is returned
167
165
168 :param line: str line
166 :param line: str line
169 :param default: default
167 :param default: default
170 :rtype: int
168 :rtype: int
171 :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
169 :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
172 """
170 """
173 if line.endswith('\r\n'):
171 if line.endswith('\r\n'):
174 return 2
172 return 2
175 elif line.endswith('\n'):
173 elif line.endswith('\n'):
176 return 0
174 return 0
177 elif line.endswith('\r'):
175 elif line.endswith('\r'):
178 return 1
176 return 1
179 else:
177 else:
180 return default
178 return default
181
179
182
180
183 def safe_int(val, default=None):
181 def safe_int(val, default=None):
184 """
182 """
185 Returns int() of val if val is not convertable to int use default
183 Returns int() of val if val is not convertable to int use default
186 instead
184 instead
187
185
188 :param val:
186 :param val:
189 :param default:
187 :param default:
190 """
188 """
191
189
192 try:
190 try:
193 val = int(val)
191 val = int(val)
194 except (ValueError, TypeError):
192 except (ValueError, TypeError):
195 val = default
193 val = default
196
194
197 return val
195 return val
198
196
199
197
200 def safe_unicode(str_, from_encoding=None):
198 def safe_unicode(str_, from_encoding=None):
201 """
199 """
202 safe unicode function. Does few trick to turn str_ into unicode
200 safe unicode function. Does few trick to turn str_ into unicode
203
201
204 In case of UnicodeDecode error, we try to return it with encoding detected
202 In case of UnicodeDecode error, we try to return it with encoding detected
205 by chardet library if it fails fallback to unicode with errors replaced
203 by chardet library if it fails fallback to unicode with errors replaced
206
204
207 :param str_: string to decode
205 :param str_: string to decode
208 :rtype: unicode
206 :rtype: unicode
209 :returns: unicode object
207 :returns: unicode object
210 """
208 """
211 if isinstance(str_, unicode):
209 if isinstance(str_, unicode):
212 return str_
210 return str_
213
211
214 if not from_encoding:
212 if not from_encoding:
215 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
213 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
216 'utf8'), sep=',')
214 'utf8'), sep=',')
217 from_encoding = DEFAULT_ENCODINGS
215 from_encoding = DEFAULT_ENCODINGS
218
216
219 if not isinstance(from_encoding, (list, tuple)):
217 if not isinstance(from_encoding, (list, tuple)):
220 from_encoding = [from_encoding]
218 from_encoding = [from_encoding]
221
219
222 try:
220 try:
223 return unicode(str_)
221 return unicode(str_)
224 except UnicodeDecodeError:
222 except UnicodeDecodeError:
225 pass
223 pass
226
224
227 for enc in from_encoding:
225 for enc in from_encoding:
228 try:
226 try:
229 return unicode(str_, enc)
227 return unicode(str_, enc)
230 except UnicodeDecodeError:
228 except UnicodeDecodeError:
231 pass
229 pass
232
230
233 try:
231 try:
234 import chardet
232 import chardet
235 encoding = chardet.detect(str_)['encoding']
233 encoding = chardet.detect(str_)['encoding']
236 if encoding is None:
234 if encoding is None:
237 raise Exception()
235 raise Exception()
238 return str_.decode(encoding)
236 return str_.decode(encoding)
239 except (ImportError, UnicodeDecodeError, Exception):
237 except (ImportError, UnicodeDecodeError, Exception):
240 return unicode(str_, from_encoding[0], 'replace')
238 return unicode(str_, from_encoding[0], 'replace')
241
239
242
240
243 def safe_str(unicode_, to_encoding=None):
241 def safe_str(unicode_, to_encoding=None):
244 """
242 """
245 safe str function. Does few trick to turn unicode_ into string
243 safe str function. Does few trick to turn unicode_ into string
246
244
247 In case of UnicodeEncodeError, we try to return it with encoding detected
245 In case of UnicodeEncodeError, we try to return it with encoding detected
248 by chardet library if it fails fallback to string with errors replaced
246 by chardet library if it fails fallback to string with errors replaced
249
247
250 :param unicode_: unicode to encode
248 :param unicode_: unicode to encode
251 :rtype: str
249 :rtype: str
252 :returns: str object
250 :returns: str object
253 """
251 """
254
252
255 # if it's not basestr cast to str
253 # if it's not basestr cast to str
256 if not isinstance(unicode_, basestring):
254 if not isinstance(unicode_, basestring):
257 return str(unicode_)
255 return str(unicode_)
258
256
259 if isinstance(unicode_, str):
257 if isinstance(unicode_, str):
260 return unicode_
258 return unicode_
261
259
262 if not to_encoding:
260 if not to_encoding:
263 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
261 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
264 'utf8'), sep=',')
262 'utf8'), sep=',')
265 to_encoding = DEFAULT_ENCODINGS
263 to_encoding = DEFAULT_ENCODINGS
266
264
267 if not isinstance(to_encoding, (list, tuple)):
265 if not isinstance(to_encoding, (list, tuple)):
268 to_encoding = [to_encoding]
266 to_encoding = [to_encoding]
269
267
270 for enc in to_encoding:
268 for enc in to_encoding:
271 try:
269 try:
272 return unicode_.encode(enc)
270 return unicode_.encode(enc)
273 except UnicodeEncodeError:
271 except UnicodeEncodeError:
274 pass
272 pass
275
273
276 try:
274 try:
277 import chardet
275 import chardet
278 encoding = chardet.detect(unicode_)['encoding']
276 encoding = chardet.detect(unicode_)['encoding']
279 if encoding is None:
277 if encoding is None:
280 raise UnicodeEncodeError()
278 raise UnicodeEncodeError()
281
279
282 return unicode_.encode(encoding)
280 return unicode_.encode(encoding)
283 except (ImportError, UnicodeEncodeError):
281 except (ImportError, UnicodeEncodeError):
284 return unicode_.encode(to_encoding[0], 'replace')
282 return unicode_.encode(to_encoding[0], 'replace')
285
283
286
284
287 def remove_suffix(s, suffix):
285 def remove_suffix(s, suffix):
288 if s.endswith(suffix):
286 if s.endswith(suffix):
289 s = s[:-1 * len(suffix)]
287 s = s[:-1 * len(suffix)]
290 return s
288 return s
291
289
292
290
293 def remove_prefix(s, prefix):
291 def remove_prefix(s, prefix):
294 if s.startswith(prefix):
292 if s.startswith(prefix):
295 s = s[len(prefix):]
293 s = s[len(prefix):]
296 return s
294 return s
297
295
298
296
299 def find_calling_context(ignore_modules=None):
297 def find_calling_context(ignore_modules=None):
300 """
298 """
301 Look through the calling stack and return the frame which called
299 Look through the calling stack and return the frame which called
302 this function and is part of core module ( ie. rhodecode.* )
300 this function and is part of core module ( ie. rhodecode.* )
303
301
304 :param ignore_modules: list of modules to ignore eg. ['rhodecode.lib']
302 :param ignore_modules: list of modules to ignore eg. ['rhodecode.lib']
305 """
303 """
306
304
307 ignore_modules = ignore_modules or []
305 ignore_modules = ignore_modules or []
308
306
309 f = sys._getframe(2)
307 f = sys._getframe(2)
310 while f.f_back is not None:
308 while f.f_back is not None:
311 name = f.f_globals.get('__name__')
309 name = f.f_globals.get('__name__')
312 if name and name.startswith(__name__.split('.')[0]):
310 if name and name.startswith(__name__.split('.')[0]):
313 if name not in ignore_modules:
311 if name not in ignore_modules:
314 return f
312 return f
315 f = f.f_back
313 f = f.f_back
316 return None
314 return None
317
315
318
316
319 def ping_connection(connection, branch):
317 def ping_connection(connection, branch):
320 if branch:
318 if branch:
321 # "branch" refers to a sub-connection of a connection,
319 # "branch" refers to a sub-connection of a connection,
322 # we don't want to bother pinging on these.
320 # we don't want to bother pinging on these.
323 return
321 return
324
322
325 # turn off "close with result". This flag is only used with
323 # turn off "close with result". This flag is only used with
326 # "connectionless" execution, otherwise will be False in any case
324 # "connectionless" execution, otherwise will be False in any case
327 save_should_close_with_result = connection.should_close_with_result
325 save_should_close_with_result = connection.should_close_with_result
328 connection.should_close_with_result = False
326 connection.should_close_with_result = False
329
327
330 try:
328 try:
331 # run a SELECT 1. use a core select() so that
329 # run a SELECT 1. use a core select() so that
332 # the SELECT of a scalar value without a table is
330 # the SELECT of a scalar value without a table is
333 # appropriately formatted for the backend
331 # appropriately formatted for the backend
334 connection.scalar(sqlalchemy.sql.select([1]))
332 connection.scalar(sqlalchemy.sql.select([1]))
335 except sqlalchemy.exc.DBAPIError as err:
333 except sqlalchemy.exc.DBAPIError as err:
336 # catch SQLAlchemy's DBAPIError, which is a wrapper
334 # catch SQLAlchemy's DBAPIError, which is a wrapper
337 # for the DBAPI's exception. It includes a .connection_invalidated
335 # for the DBAPI's exception. It includes a .connection_invalidated
338 # attribute which specifies if this connection is a "disconnect"
336 # attribute which specifies if this connection is a "disconnect"
339 # condition, which is based on inspection of the original exception
337 # condition, which is based on inspection of the original exception
340 # by the dialect in use.
338 # by the dialect in use.
341 if err.connection_invalidated:
339 if err.connection_invalidated:
342 # run the same SELECT again - the connection will re-validate
340 # run the same SELECT again - the connection will re-validate
343 # itself and establish a new connection. The disconnect detection
341 # itself and establish a new connection. The disconnect detection
344 # here also causes the whole connection pool to be invalidated
342 # here also causes the whole connection pool to be invalidated
345 # so that all stale connections are discarded.
343 # so that all stale connections are discarded.
346 connection.scalar(sqlalchemy.sql.select([1]))
344 connection.scalar(sqlalchemy.sql.select([1]))
347 else:
345 else:
348 raise
346 raise
349 finally:
347 finally:
350 # restore "close with result"
348 # restore "close with result"
351 connection.should_close_with_result = save_should_close_with_result
349 connection.should_close_with_result = save_should_close_with_result
352
350
353
351
354 def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
352 def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
355 """Custom engine_from_config functions."""
353 """Custom engine_from_config functions."""
356 log = logging.getLogger('sqlalchemy.engine')
354 log = logging.getLogger('sqlalchemy.engine')
357 engine = sqlalchemy.engine_from_config(configuration, prefix, **kwargs)
355 engine = sqlalchemy.engine_from_config(configuration, prefix, **kwargs)
358
356
359 def color_sql(sql):
357 def color_sql(sql):
360 color_seq = '\033[1;33m' # This is yellow: code 33
358 color_seq = '\033[1;33m' # This is yellow: code 33
361 normal = '\x1b[0m'
359 normal = '\x1b[0m'
362 return ''.join([color_seq, sql, normal])
360 return ''.join([color_seq, sql, normal])
363
361
364 if configuration['debug']:
362 if configuration['debug']:
365 # attach events only for debug configuration
363 # attach events only for debug configuration
366
364
367 def before_cursor_execute(conn, cursor, statement,
365 def before_cursor_execute(conn, cursor, statement,
368 parameters, context, executemany):
366 parameters, context, executemany):
369 setattr(conn, 'query_start_time', time.time())
367 setattr(conn, 'query_start_time', time.time())
370 log.info(color_sql(">>>>> STARTING QUERY >>>>>"))
368 log.info(color_sql(">>>>> STARTING QUERY >>>>>"))
371 calling_context = find_calling_context(ignore_modules=[
369 calling_context = find_calling_context(ignore_modules=[
372 'rhodecode.lib.caching_query',
370 'rhodecode.lib.caching_query',
373 'rhodecode.model.settings',
371 'rhodecode.model.settings',
374 ])
372 ])
375 if calling_context:
373 if calling_context:
376 log.info(color_sql('call context %s:%s' % (
374 log.info(color_sql('call context %s:%s' % (
377 calling_context.f_code.co_filename,
375 calling_context.f_code.co_filename,
378 calling_context.f_lineno,
376 calling_context.f_lineno,
379 )))
377 )))
380
378
381 def after_cursor_execute(conn, cursor, statement,
379 def after_cursor_execute(conn, cursor, statement,
382 parameters, context, executemany):
380 parameters, context, executemany):
383 delattr(conn, 'query_start_time')
381 delattr(conn, 'query_start_time')
384
382
385 sqlalchemy.event.listen(engine, "engine_connect",
383 sqlalchemy.event.listen(engine, "engine_connect",
386 ping_connection)
384 ping_connection)
387 sqlalchemy.event.listen(engine, "before_cursor_execute",
385 sqlalchemy.event.listen(engine, "before_cursor_execute",
388 before_cursor_execute)
386 before_cursor_execute)
389 sqlalchemy.event.listen(engine, "after_cursor_execute",
387 sqlalchemy.event.listen(engine, "after_cursor_execute",
390 after_cursor_execute)
388 after_cursor_execute)
391
389
392 return engine
390 return engine
393
391
394
392
395 def get_encryption_key(config):
393 def get_encryption_key(config):
396 secret = config.get('rhodecode.encrypted_values.secret')
394 secret = config.get('rhodecode.encrypted_values.secret')
397 default = config['beaker.session.secret']
395 default = config['beaker.session.secret']
398 return secret or default
396 return secret or default
399
397
400
398
401 def age(prevdate, now=None, show_short_version=False, show_suffix=True,
399 def age(prevdate, now=None, show_short_version=False, show_suffix=True,
402 short_format=False):
400 short_format=False):
403 """
401 """
404 Turns a datetime into an age string.
402 Turns a datetime into an age string.
405 If show_short_version is True, this generates a shorter string with
403 If show_short_version is True, this generates a shorter string with
406 an approximate age; ex. '1 day ago', rather than '1 day and 23 hours ago'.
404 an approximate age; ex. '1 day ago', rather than '1 day and 23 hours ago'.
407
405
408 * IMPORTANT*
406 * IMPORTANT*
409 Code of this function is written in special way so it's easier to
407 Code of this function is written in special way so it's easier to
410 backport it to javascript. If you mean to update it, please also update
408 backport it to javascript. If you mean to update it, please also update
411 `jquery.timeago-extension.js` file
409 `jquery.timeago-extension.js` file
412
410
413 :param prevdate: datetime object
411 :param prevdate: datetime object
414 :param now: get current time, if not define we use
412 :param now: get current time, if not define we use
415 `datetime.datetime.now()`
413 `datetime.datetime.now()`
416 :param show_short_version: if it should approximate the date and
414 :param show_short_version: if it should approximate the date and
417 return a shorter string
415 return a shorter string
418 :param show_suffix:
416 :param show_suffix:
419 :param short_format: show short format, eg 2D instead of 2 days
417 :param short_format: show short format, eg 2D instead of 2 days
420 :rtype: unicode
418 :rtype: unicode
421 :returns: unicode words describing age
419 :returns: unicode words describing age
422 """
420 """
423
421
424 def _get_relative_delta(now, prevdate):
422 def _get_relative_delta(now, prevdate):
425 base = dateutil.relativedelta.relativedelta(now, prevdate)
423 base = dateutil.relativedelta.relativedelta(now, prevdate)
426 return {
424 return {
427 'year': base.years,
425 'year': base.years,
428 'month': base.months,
426 'month': base.months,
429 'day': base.days,
427 'day': base.days,
430 'hour': base.hours,
428 'hour': base.hours,
431 'minute': base.minutes,
429 'minute': base.minutes,
432 'second': base.seconds,
430 'second': base.seconds,
433 }
431 }
434
432
435 def _is_leap_year(year):
433 def _is_leap_year(year):
436 return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
434 return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
437
435
438 def get_month(prevdate):
436 def get_month(prevdate):
439 return prevdate.month
437 return prevdate.month
440
438
441 def get_year(prevdate):
439 def get_year(prevdate):
442 return prevdate.year
440 return prevdate.year
443
441
444 now = now or datetime.datetime.now()
442 now = now or datetime.datetime.now()
445 order = ['year', 'month', 'day', 'hour', 'minute', 'second']
443 order = ['year', 'month', 'day', 'hour', 'minute', 'second']
446 deltas = {}
444 deltas = {}
447 future = False
445 future = False
448
446
449 if prevdate > now:
447 if prevdate > now:
450 now_old = now
448 now_old = now
451 now = prevdate
449 now = prevdate
452 prevdate = now_old
450 prevdate = now_old
453 future = True
451 future = True
454 if future:
452 if future:
455 prevdate = prevdate.replace(microsecond=0)
453 prevdate = prevdate.replace(microsecond=0)
456 # Get date parts deltas
454 # Get date parts deltas
457 for part in order:
455 for part in order:
458 rel_delta = _get_relative_delta(now, prevdate)
456 rel_delta = _get_relative_delta(now, prevdate)
459 deltas[part] = rel_delta[part]
457 deltas[part] = rel_delta[part]
460
458
461 # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
459 # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
462 # not 1 hour, -59 minutes and -59 seconds)
460 # not 1 hour, -59 minutes and -59 seconds)
463 offsets = [[5, 60], [4, 60], [3, 24]]
461 offsets = [[5, 60], [4, 60], [3, 24]]
464 for element in offsets: # seconds, minutes, hours
462 for element in offsets: # seconds, minutes, hours
465 num = element[0]
463 num = element[0]
466 length = element[1]
464 length = element[1]
467
465
468 part = order[num]
466 part = order[num]
469 carry_part = order[num - 1]
467 carry_part = order[num - 1]
470
468
471 if deltas[part] < 0:
469 if deltas[part] < 0:
472 deltas[part] += length
470 deltas[part] += length
473 deltas[carry_part] -= 1
471 deltas[carry_part] -= 1
474
472
475 # Same thing for days except that the increment depends on the (variable)
473 # Same thing for days except that the increment depends on the (variable)
476 # number of days in the month
474 # number of days in the month
477 month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
475 month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
478 if deltas['day'] < 0:
476 if deltas['day'] < 0:
479 if get_month(prevdate) == 2 and _is_leap_year(get_year(prevdate)):
477 if get_month(prevdate) == 2 and _is_leap_year(get_year(prevdate)):
480 deltas['day'] += 29
478 deltas['day'] += 29
481 else:
479 else:
482 deltas['day'] += month_lengths[get_month(prevdate) - 1]
480 deltas['day'] += month_lengths[get_month(prevdate) - 1]
483
481
484 deltas['month'] -= 1
482 deltas['month'] -= 1
485
483
486 if deltas['month'] < 0:
484 if deltas['month'] < 0:
487 deltas['month'] += 12
485 deltas['month'] += 12
488 deltas['year'] -= 1
486 deltas['year'] -= 1
489
487
490 # Format the result
488 # Format the result
491 if short_format:
489 if short_format:
492 fmt_funcs = {
490 fmt_funcs = {
493 'year': lambda d: u'%dy' % d,
491 'year': lambda d: u'%dy' % d,
494 'month': lambda d: u'%dm' % d,
492 'month': lambda d: u'%dm' % d,
495 'day': lambda d: u'%dd' % d,
493 'day': lambda d: u'%dd' % d,
496 'hour': lambda d: u'%dh' % d,
494 'hour': lambda d: u'%dh' % d,
497 'minute': lambda d: u'%dmin' % d,
495 'minute': lambda d: u'%dmin' % d,
498 'second': lambda d: u'%dsec' % d,
496 'second': lambda d: u'%dsec' % d,
499 }
497 }
500 else:
498 else:
501 fmt_funcs = {
499 fmt_funcs = {
502 'year': lambda d: _pluralize(u'${num} year', u'${num} years', d, mapping={'num': d}).interpolate(),
500 'year': lambda d: _pluralize(u'${num} year', u'${num} years', d, mapping={'num': d}).interpolate(),
503 'month': lambda d: _pluralize(u'${num} month', u'${num} months', d, mapping={'num': d}).interpolate(),
501 'month': lambda d: _pluralize(u'${num} month', u'${num} months', d, mapping={'num': d}).interpolate(),
504 'day': lambda d: _pluralize(u'${num} day', u'${num} days', d, mapping={'num': d}).interpolate(),
502 'day': lambda d: _pluralize(u'${num} day', u'${num} days', d, mapping={'num': d}).interpolate(),
505 'hour': lambda d: _pluralize(u'${num} hour', u'${num} hours', d, mapping={'num': d}).interpolate(),
503 'hour': lambda d: _pluralize(u'${num} hour', u'${num} hours', d, mapping={'num': d}).interpolate(),
506 'minute': lambda d: _pluralize(u'${num} minute', u'${num} minutes', d, mapping={'num': d}).interpolate(),
504 'minute': lambda d: _pluralize(u'${num} minute', u'${num} minutes', d, mapping={'num': d}).interpolate(),
507 'second': lambda d: _pluralize(u'${num} second', u'${num} seconds', d, mapping={'num': d}).interpolate(),
505 'second': lambda d: _pluralize(u'${num} second', u'${num} seconds', d, mapping={'num': d}).interpolate(),
508 }
506 }
509
507
510 i = 0
508 i = 0
511 for part in order:
509 for part in order:
512 value = deltas[part]
510 value = deltas[part]
513 if value != 0:
511 if value != 0:
514
512
515 if i < 5:
513 if i < 5:
516 sub_part = order[i + 1]
514 sub_part = order[i + 1]
517 sub_value = deltas[sub_part]
515 sub_value = deltas[sub_part]
518 else:
516 else:
519 sub_value = 0
517 sub_value = 0
520
518
521 if sub_value == 0 or show_short_version:
519 if sub_value == 0 or show_short_version:
522 _val = fmt_funcs[part](value)
520 _val = fmt_funcs[part](value)
523 if future:
521 if future:
524 if show_suffix:
522 if show_suffix:
525 return _(u'in ${ago}', mapping={'ago': _val})
523 return _(u'in ${ago}', mapping={'ago': _val})
526 else:
524 else:
527 return _(_val)
525 return _(_val)
528
526
529 else:
527 else:
530 if show_suffix:
528 if show_suffix:
531 return _(u'${ago} ago', mapping={'ago': _val})
529 return _(u'${ago} ago', mapping={'ago': _val})
532 else:
530 else:
533 return _(_val)
531 return _(_val)
534
532
535 val = fmt_funcs[part](value)
533 val = fmt_funcs[part](value)
536 val_detail = fmt_funcs[sub_part](sub_value)
534 val_detail = fmt_funcs[sub_part](sub_value)
537 mapping = {'val': val, 'detail': val_detail}
535 mapping = {'val': val, 'detail': val_detail}
538
536
539 if short_format:
537 if short_format:
540 datetime_tmpl = _(u'${val}, ${detail}', mapping=mapping)
538 datetime_tmpl = _(u'${val}, ${detail}', mapping=mapping)
541 if show_suffix:
539 if show_suffix:
542 datetime_tmpl = _(u'${val}, ${detail} ago', mapping=mapping)
540 datetime_tmpl = _(u'${val}, ${detail} ago', mapping=mapping)
543 if future:
541 if future:
544 datetime_tmpl = _(u'in ${val}, ${detail}', mapping=mapping)
542 datetime_tmpl = _(u'in ${val}, ${detail}', mapping=mapping)
545 else:
543 else:
546 datetime_tmpl = _(u'${val} and ${detail}', mapping=mapping)
544 datetime_tmpl = _(u'${val} and ${detail}', mapping=mapping)
547 if show_suffix:
545 if show_suffix:
548 datetime_tmpl = _(u'${val} and ${detail} ago', mapping=mapping)
546 datetime_tmpl = _(u'${val} and ${detail} ago', mapping=mapping)
549 if future:
547 if future:
550 datetime_tmpl = _(u'in ${val} and ${detail}', mapping=mapping)
548 datetime_tmpl = _(u'in ${val} and ${detail}', mapping=mapping)
551
549
552 return datetime_tmpl
550 return datetime_tmpl
553 i += 1
551 i += 1
554 return _(u'just now')
552 return _(u'just now')
555
553
556
554
557 def cleaned_uri(uri):
555 def cleaned_uri(uri):
558 """
556 """
559 Quotes '[' and ']' from uri if there is only one of them.
557 Quotes '[' and ']' from uri if there is only one of them.
560 according to RFC3986 we cannot use such chars in uri
558 according to RFC3986 we cannot use such chars in uri
561 :param uri:
559 :param uri:
562 :return: uri without this chars
560 :return: uri without this chars
563 """
561 """
564 return urllib.quote(uri, safe='@$:/')
562 return urllib.quote(uri, safe='@$:/')
565
563
566
564
567 def uri_filter(uri):
565 def uri_filter(uri):
568 """
566 """
569 Removes user:password from given url string
567 Removes user:password from given url string
570
568
571 :param uri:
569 :param uri:
572 :rtype: unicode
570 :rtype: unicode
573 :returns: filtered list of strings
571 :returns: filtered list of strings
574 """
572 """
575 if not uri:
573 if not uri:
576 return ''
574 return ''
577
575
578 proto = ''
576 proto = ''
579
577
580 for pat in ('https://', 'http://'):
578 for pat in ('https://', 'http://'):
581 if uri.startswith(pat):
579 if uri.startswith(pat):
582 uri = uri[len(pat):]
580 uri = uri[len(pat):]
583 proto = pat
581 proto = pat
584 break
582 break
585
583
586 # remove passwords and username
584 # remove passwords and username
587 uri = uri[uri.find('@') + 1:]
585 uri = uri[uri.find('@') + 1:]
588
586
589 # get the port
587 # get the port
590 cred_pos = uri.find(':')
588 cred_pos = uri.find(':')
591 if cred_pos == -1:
589 if cred_pos == -1:
592 host, port = uri, None
590 host, port = uri, None
593 else:
591 else:
594 host, port = uri[:cred_pos], uri[cred_pos + 1:]
592 host, port = uri[:cred_pos], uri[cred_pos + 1:]
595
593
596 return filter(None, [proto, host, port])
594 return filter(None, [proto, host, port])
597
595
598
596
599 def credentials_filter(uri):
597 def credentials_filter(uri):
600 """
598 """
601 Returns a url with removed credentials
599 Returns a url with removed credentials
602
600
603 :param uri:
601 :param uri:
604 """
602 """
605
603
606 uri = uri_filter(uri)
604 uri = uri_filter(uri)
607 # check if we have port
605 # check if we have port
608 if len(uri) > 2 and uri[2]:
606 if len(uri) > 2 and uri[2]:
609 uri[2] = ':' + uri[2]
607 uri[2] = ':' + uri[2]
610
608
611 return ''.join(uri)
609 return ''.join(uri)
612
610
613
611
614 def get_clone_url(request, uri_tmpl, repo_name, repo_id, **override):
612 def get_clone_url(request, uri_tmpl, repo_name, repo_id, **override):
615 qualifed_home_url = request.route_url('home')
613 qualifed_home_url = request.route_url('home')
616 parsed_url = urlobject.URLObject(qualifed_home_url)
614 parsed_url = urlobject.URLObject(qualifed_home_url)
617 decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/')))
615 decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/')))
618 args = {
616 args = {
619 'scheme': parsed_url.scheme,
617 'scheme': parsed_url.scheme,
620 'user': '',
618 'user': '',
621 # path if we use proxy-prefix
619 # path if we use proxy-prefix
622 'netloc': parsed_url.netloc+decoded_path,
620 'netloc': parsed_url.netloc+decoded_path,
623 'prefix': decoded_path,
621 'prefix': decoded_path,
624 'repo': repo_name,
622 'repo': repo_name,
625 'repoid': str(repo_id)
623 'repoid': str(repo_id)
626 }
624 }
627 args.update(override)
625 args.update(override)
628 args['user'] = urllib.quote(safe_str(args['user']))
626 args['user'] = urllib.quote(safe_str(args['user']))
629
627
630 for k, v in args.items():
628 for k, v in args.items():
631 uri_tmpl = uri_tmpl.replace('{%s}' % k, v)
629 uri_tmpl = uri_tmpl.replace('{%s}' % k, v)
632
630
633 # remove leading @ sign if it's present. Case of empty user
631 # remove leading @ sign if it's present. Case of empty user
634 url_obj = urlobject.URLObject(uri_tmpl)
632 url_obj = urlobject.URLObject(uri_tmpl)
635 url = url_obj.with_netloc(url_obj.netloc.lstrip('@'))
633 url = url_obj.with_netloc(url_obj.netloc.lstrip('@'))
636
634
637 return safe_unicode(url)
635 return safe_unicode(url)
638
636
639
637
640 def get_commit_safe(repo, commit_id=None, commit_idx=None, pre_load=None):
638 def get_commit_safe(repo, commit_id=None, commit_idx=None, pre_load=None):
641 """
639 """
642 Safe version of get_commit if this commit doesn't exists for a
640 Safe version of get_commit if this commit doesn't exists for a
643 repository it returns a Dummy one instead
641 repository it returns a Dummy one instead
644
642
645 :param repo: repository instance
643 :param repo: repository instance
646 :param commit_id: commit id as str
644 :param commit_id: commit id as str
647 :param pre_load: optional list of commit attributes to load
645 :param pre_load: optional list of commit attributes to load
648 """
646 """
649 # TODO(skreft): remove these circular imports
647 # TODO(skreft): remove these circular imports
650 from rhodecode.lib.vcs.backends.base import BaseRepository, EmptyCommit
648 from rhodecode.lib.vcs.backends.base import BaseRepository, EmptyCommit
651 from rhodecode.lib.vcs.exceptions import RepositoryError
649 from rhodecode.lib.vcs.exceptions import RepositoryError
652 if not isinstance(repo, BaseRepository):
650 if not isinstance(repo, BaseRepository):
653 raise Exception('You must pass an Repository '
651 raise Exception('You must pass an Repository '
654 'object as first argument got %s', type(repo))
652 'object as first argument got %s', type(repo))
655
653
656 try:
654 try:
657 commit = repo.get_commit(
655 commit = repo.get_commit(
658 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
656 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
659 except (RepositoryError, LookupError):
657 except (RepositoryError, LookupError):
660 commit = EmptyCommit()
658 commit = EmptyCommit()
661 return commit
659 return commit
662
660
663
661
664 def datetime_to_time(dt):
662 def datetime_to_time(dt):
665 if dt:
663 if dt:
666 return time.mktime(dt.timetuple())
664 return time.mktime(dt.timetuple())
667
665
668
666
669 def time_to_datetime(tm):
667 def time_to_datetime(tm):
670 if tm:
668 if tm:
671 if isinstance(tm, basestring):
669 if isinstance(tm, basestring):
672 try:
670 try:
673 tm = float(tm)
671 tm = float(tm)
674 except ValueError:
672 except ValueError:
675 return
673 return
676 return datetime.datetime.fromtimestamp(tm)
674 return datetime.datetime.fromtimestamp(tm)
677
675
678
676
679 def time_to_utcdatetime(tm):
677 def time_to_utcdatetime(tm):
680 if tm:
678 if tm:
681 if isinstance(tm, basestring):
679 if isinstance(tm, basestring):
682 try:
680 try:
683 tm = float(tm)
681 tm = float(tm)
684 except ValueError:
682 except ValueError:
685 return
683 return
686 return datetime.datetime.utcfromtimestamp(tm)
684 return datetime.datetime.utcfromtimestamp(tm)
687
685
688
686
689 MENTIONS_REGEX = re.compile(
687 MENTIONS_REGEX = re.compile(
690 # ^@ or @ without any special chars in front
688 # ^@ or @ without any special chars in front
691 r'(?:^@|[^a-zA-Z0-9\-\_\.]@)'
689 r'(?:^@|[^a-zA-Z0-9\-\_\.]@)'
692 # main body starts with letter, then can be . - _
690 # main body starts with letter, then can be . - _
693 r'([a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+)',
691 r'([a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+)',
694 re.VERBOSE | re.MULTILINE)
692 re.VERBOSE | re.MULTILINE)
695
693
696
694
697 def extract_mentioned_users(s):
695 def extract_mentioned_users(s):
698 """
696 """
699 Returns unique usernames from given string s that have @mention
697 Returns unique usernames from given string s that have @mention
700
698
701 :param s: string to get mentions
699 :param s: string to get mentions
702 """
700 """
703 usrs = set()
701 usrs = set()
704 for username in MENTIONS_REGEX.findall(s):
702 for username in MENTIONS_REGEX.findall(s):
705 usrs.add(username)
703 usrs.add(username)
706
704
707 return sorted(list(usrs), key=lambda k: k.lower())
705 return sorted(list(usrs), key=lambda k: k.lower())
708
706
709
707
710 class StrictAttributeDict(dict):
708 class StrictAttributeDict(dict):
711 """
709 """
712 Strict Version of Attribute dict which raises an Attribute error when
710 Strict Version of Attribute dict which raises an Attribute error when
713 requested attribute is not set
711 requested attribute is not set
714 """
712 """
715 def __getattr__(self, attr):
713 def __getattr__(self, attr):
716 try:
714 try:
717 return self[attr]
715 return self[attr]
718 except KeyError:
716 except KeyError:
719 raise AttributeError('%s object has no attribute %s' % (
717 raise AttributeError('%s object has no attribute %s' % (
720 self.__class__, attr))
718 self.__class__, attr))
721 __setattr__ = dict.__setitem__
719 __setattr__ = dict.__setitem__
722 __delattr__ = dict.__delitem__
720 __delattr__ = dict.__delitem__
723
721
724
722
725 class AttributeDict(dict):
723 class AttributeDict(dict):
726 def __getattr__(self, attr):
724 def __getattr__(self, attr):
727 return self.get(attr, None)
725 return self.get(attr, None)
728 __setattr__ = dict.__setitem__
726 __setattr__ = dict.__setitem__
729 __delattr__ = dict.__delitem__
727 __delattr__ = dict.__delitem__
730
728
731
729
732 def fix_PATH(os_=None):
730 def fix_PATH(os_=None):
733 """
731 """
734 Get current active python path, and append it to PATH variable to fix
732 Get current active python path, and append it to PATH variable to fix
735 issues of subprocess calls and different python versions
733 issues of subprocess calls and different python versions
736 """
734 """
737 if os_ is None:
735 if os_ is None:
738 import os
736 import os
739 else:
737 else:
740 os = os_
738 os = os_
741
739
742 cur_path = os.path.split(sys.executable)[0]
740 cur_path = os.path.split(sys.executable)[0]
743 if not os.environ['PATH'].startswith(cur_path):
741 if not os.environ['PATH'].startswith(cur_path):
744 os.environ['PATH'] = '%s:%s' % (cur_path, os.environ['PATH'])
742 os.environ['PATH'] = '%s:%s' % (cur_path, os.environ['PATH'])
745
743
746
744
747 def obfuscate_url_pw(engine):
745 def obfuscate_url_pw(engine):
748 _url = engine or ''
746 _url = engine or ''
749 try:
747 try:
750 _url = sqlalchemy.engine.url.make_url(engine)
748 _url = sqlalchemy.engine.url.make_url(engine)
751 if _url.password:
749 if _url.password:
752 _url.password = 'XXXXX'
750 _url.password = 'XXXXX'
753 except Exception:
751 except Exception:
754 pass
752 pass
755 return unicode(_url)
753 return unicode(_url)
756
754
757
755
758 def get_server_url(environ):
756 def get_server_url(environ):
759 req = webob.Request(environ)
757 req = webob.Request(environ)
760 return req.host_url + req.script_name
758 return req.host_url + req.script_name
761
759
762
760
763 def unique_id(hexlen=32):
761 def unique_id(hexlen=32):
764 alphabet = "23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz"
762 alphabet = "23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz"
765 return suuid(truncate_to=hexlen, alphabet=alphabet)
763 return suuid(truncate_to=hexlen, alphabet=alphabet)
766
764
767
765
768 def suuid(url=None, truncate_to=22, alphabet=None):
766 def suuid(url=None, truncate_to=22, alphabet=None):
769 """
767 """
770 Generate and return a short URL safe UUID.
768 Generate and return a short URL safe UUID.
771
769
772 If the url parameter is provided, set the namespace to the provided
770 If the url parameter is provided, set the namespace to the provided
773 URL and generate a UUID.
771 URL and generate a UUID.
774
772
775 :param url to get the uuid for
773 :param url to get the uuid for
776 :truncate_to: truncate the basic 22 UUID to shorter version
774 :truncate_to: truncate the basic 22 UUID to shorter version
777
775
778 The IDs won't be universally unique any longer, but the probability of
776 The IDs won't be universally unique any longer, but the probability of
779 a collision will still be very low.
777 a collision will still be very low.
780 """
778 """
781 # Define our alphabet.
779 # Define our alphabet.
782 _ALPHABET = alphabet or "23456789ABCDEFGHJKLMNPQRSTUVWXYZ"
780 _ALPHABET = alphabet or "23456789ABCDEFGHJKLMNPQRSTUVWXYZ"
783
781
784 # If no URL is given, generate a random UUID.
782 # If no URL is given, generate a random UUID.
785 if url is None:
783 if url is None:
786 unique_id = uuid.uuid4().int
784 unique_id = uuid.uuid4().int
787 else:
785 else:
788 unique_id = uuid.uuid3(uuid.NAMESPACE_URL, url).int
786 unique_id = uuid.uuid3(uuid.NAMESPACE_URL, url).int
789
787
790 alphabet_length = len(_ALPHABET)
788 alphabet_length = len(_ALPHABET)
791 output = []
789 output = []
792 while unique_id > 0:
790 while unique_id > 0:
793 digit = unique_id % alphabet_length
791 digit = unique_id % alphabet_length
794 output.append(_ALPHABET[digit])
792 output.append(_ALPHABET[digit])
795 unique_id = int(unique_id / alphabet_length)
793 unique_id = int(unique_id / alphabet_length)
796 return "".join(output)[:truncate_to]
794 return "".join(output)[:truncate_to]
797
795
798
796
799 def get_current_rhodecode_user(request=None):
797 def get_current_rhodecode_user(request=None):
800 """
798 """
801 Gets rhodecode user from request
799 Gets rhodecode user from request
802 """
800 """
803 pyramid_request = request or pyramid.threadlocal.get_current_request()
801 pyramid_request = request or pyramid.threadlocal.get_current_request()
804
802
805 # web case
803 # web case
806 if pyramid_request and hasattr(pyramid_request, 'user'):
804 if pyramid_request and hasattr(pyramid_request, 'user'):
807 return pyramid_request.user
805 return pyramid_request.user
808
806
809 # api case
807 # api case
810 if pyramid_request and hasattr(pyramid_request, 'rpc_user'):
808 if pyramid_request and hasattr(pyramid_request, 'rpc_user'):
811 return pyramid_request.rpc_user
809 return pyramid_request.rpc_user
812
810
813 return None
811 return None
814
812
815
813
816 def action_logger_generic(action, namespace=''):
814 def action_logger_generic(action, namespace=''):
817 """
815 """
818 A generic logger for actions useful to the system overview, tries to find
816 A generic logger for actions useful to the system overview, tries to find
819 an acting user for the context of the call otherwise reports unknown user
817 an acting user for the context of the call otherwise reports unknown user
820
818
821 :param action: logging message eg 'comment 5 deleted'
819 :param action: logging message eg 'comment 5 deleted'
822 :param type: string
820 :param type: string
823
821
824 :param namespace: namespace of the logging message eg. 'repo.comments'
822 :param namespace: namespace of the logging message eg. 'repo.comments'
825 :param type: string
823 :param type: string
826
824
827 """
825 """
828
826
829 logger_name = 'rhodecode.actions'
827 logger_name = 'rhodecode.actions'
830
828
831 if namespace:
829 if namespace:
832 logger_name += '.' + namespace
830 logger_name += '.' + namespace
833
831
834 log = logging.getLogger(logger_name)
832 log = logging.getLogger(logger_name)
835
833
836 # get a user if we can
834 # get a user if we can
837 user = get_current_rhodecode_user()
835 user = get_current_rhodecode_user()
838
836
839 logfunc = log.info
837 logfunc = log.info
840
838
841 if not user:
839 if not user:
842 user = '<unknown user>'
840 user = '<unknown user>'
843 logfunc = log.warning
841 logfunc = log.warning
844
842
845 logfunc('Logging action by {}: {}'.format(user, action))
843 logfunc('Logging action by {}: {}'.format(user, action))
846
844
847
845
848 def escape_split(text, sep=',', maxsplit=-1):
846 def escape_split(text, sep=',', maxsplit=-1):
849 r"""
847 r"""
850 Allows for escaping of the separator: e.g. arg='foo\, bar'
848 Allows for escaping of the separator: e.g. arg='foo\, bar'
851
849
852 It should be noted that the way bash et. al. do command line parsing, those
850 It should be noted that the way bash et. al. do command line parsing, those
853 single quotes are required.
851 single quotes are required.
854 """
852 """
855 escaped_sep = r'\%s' % sep
853 escaped_sep = r'\%s' % sep
856
854
857 if escaped_sep not in text:
855 if escaped_sep not in text:
858 return text.split(sep, maxsplit)
856 return text.split(sep, maxsplit)
859
857
860 before, _mid, after = text.partition(escaped_sep)
858 before, _mid, after = text.partition(escaped_sep)
861 startlist = before.split(sep, maxsplit) # a regular split is fine here
859 startlist = before.split(sep, maxsplit) # a regular split is fine here
862 unfinished = startlist[-1]
860 unfinished = startlist[-1]
863 startlist = startlist[:-1]
861 startlist = startlist[:-1]
864
862
865 # recurse because there may be more escaped separators
863 # recurse because there may be more escaped separators
866 endlist = escape_split(after, sep, maxsplit)
864 endlist = escape_split(after, sep, maxsplit)
867
865
868 # finish building the escaped value. we use endlist[0] becaue the first
866 # finish building the escaped value. we use endlist[0] becaue the first
869 # part of the string sent in recursion is the rest of the escaped value.
867 # part of the string sent in recursion is the rest of the escaped value.
870 unfinished += sep + endlist[0]
868 unfinished += sep + endlist[0]
871
869
872 return startlist + [unfinished] + endlist[1:] # put together all the parts
870 return startlist + [unfinished] + endlist[1:] # put together all the parts
873
871
874
872
875 class OptionalAttr(object):
873 class OptionalAttr(object):
876 """
874 """
877 Special Optional Option that defines other attribute. Example::
875 Special Optional Option that defines other attribute. Example::
878
876
879 def test(apiuser, userid=Optional(OAttr('apiuser')):
877 def test(apiuser, userid=Optional(OAttr('apiuser')):
880 user = Optional.extract(userid)
878 user = Optional.extract(userid)
881 # calls
879 # calls
882
880
883 """
881 """
884
882
885 def __init__(self, attr_name):
883 def __init__(self, attr_name):
886 self.attr_name = attr_name
884 self.attr_name = attr_name
887
885
888 def __repr__(self):
886 def __repr__(self):
889 return '<OptionalAttr:%s>' % self.attr_name
887 return '<OptionalAttr:%s>' % self.attr_name
890
888
891 def __call__(self):
889 def __call__(self):
892 return self
890 return self
893
891
894
892
895 # alias
893 # alias
896 OAttr = OptionalAttr
894 OAttr = OptionalAttr
897
895
898
896
899 class Optional(object):
897 class Optional(object):
900 """
898 """
901 Defines an optional parameter::
899 Defines an optional parameter::
902
900
903 param = param.getval() if isinstance(param, Optional) else param
901 param = param.getval() if isinstance(param, Optional) else param
904 param = param() if isinstance(param, Optional) else param
902 param = param() if isinstance(param, Optional) else param
905
903
906 is equivalent of::
904 is equivalent of::
907
905
908 param = Optional.extract(param)
906 param = Optional.extract(param)
909
907
910 """
908 """
911
909
912 def __init__(self, type_):
910 def __init__(self, type_):
913 self.type_ = type_
911 self.type_ = type_
914
912
915 def __repr__(self):
913 def __repr__(self):
916 return '<Optional:%s>' % self.type_.__repr__()
914 return '<Optional:%s>' % self.type_.__repr__()
917
915
918 def __call__(self):
916 def __call__(self):
919 return self.getval()
917 return self.getval()
920
918
921 def getval(self):
919 def getval(self):
922 """
920 """
923 returns value from this Optional instance
921 returns value from this Optional instance
924 """
922 """
925 if isinstance(self.type_, OAttr):
923 if isinstance(self.type_, OAttr):
926 # use params name
924 # use params name
927 return self.type_.attr_name
925 return self.type_.attr_name
928 return self.type_
926 return self.type_
929
927
930 @classmethod
928 @classmethod
931 def extract(cls, val):
929 def extract(cls, val):
932 """
930 """
933 Extracts value from Optional() instance
931 Extracts value from Optional() instance
934
932
935 :param val:
933 :param val:
936 :return: original value if it's not Optional instance else
934 :return: original value if it's not Optional instance else
937 value of instance
935 value of instance
938 """
936 """
939 if isinstance(val, cls):
937 if isinstance(val, cls):
940 return val.getval()
938 return val.getval()
941 return val
939 return val
942
940
943
941
944 def get_routes_generator_for_server_url(server_url):
945 parsed_url = urlobject.URLObject(server_url)
946 netloc = safe_str(parsed_url.netloc)
947 script_name = safe_str(parsed_url.path)
948
949 if ':' in netloc:
950 server_name, server_port = netloc.split(':')
951 else:
952 server_name = netloc
953 server_port = (parsed_url.scheme == 'https' and '443' or '80')
954
955 environ = {
956 'REQUEST_METHOD': 'GET',
957 'PATH_INFO': '/',
958 'SERVER_NAME': server_name,
959 'SERVER_PORT': server_port,
960 'SCRIPT_NAME': script_name,
961 }
962 if parsed_url.scheme == 'https':
963 environ['HTTPS'] = 'on'
964 environ['wsgi.url_scheme'] = 'https'
965
966 return routes.util.URLGenerator(rhodecode.CONFIG['routes.map'], environ)
967
968
969 def glob2re(pat):
942 def glob2re(pat):
970 """
943 """
971 Translate a shell PATTERN to a regular expression.
944 Translate a shell PATTERN to a regular expression.
972
945
973 There is no way to quote meta-characters.
946 There is no way to quote meta-characters.
974 """
947 """
975
948
976 i, n = 0, len(pat)
949 i, n = 0, len(pat)
977 res = ''
950 res = ''
978 while i < n:
951 while i < n:
979 c = pat[i]
952 c = pat[i]
980 i = i+1
953 i = i+1
981 if c == '*':
954 if c == '*':
982 #res = res + '.*'
955 #res = res + '.*'
983 res = res + '[^/]*'
956 res = res + '[^/]*'
984 elif c == '?':
957 elif c == '?':
985 #res = res + '.'
958 #res = res + '.'
986 res = res + '[^/]'
959 res = res + '[^/]'
987 elif c == '[':
960 elif c == '[':
988 j = i
961 j = i
989 if j < n and pat[j] == '!':
962 if j < n and pat[j] == '!':
990 j = j+1
963 j = j+1
991 if j < n and pat[j] == ']':
964 if j < n and pat[j] == ']':
992 j = j+1
965 j = j+1
993 while j < n and pat[j] != ']':
966 while j < n and pat[j] != ']':
994 j = j+1
967 j = j+1
995 if j >= n:
968 if j >= n:
996 res = res + '\\['
969 res = res + '\\['
997 else:
970 else:
998 stuff = pat[i:j].replace('\\','\\\\')
971 stuff = pat[i:j].replace('\\','\\\\')
999 i = j+1
972 i = j+1
1000 if stuff[0] == '!':
973 if stuff[0] == '!':
1001 stuff = '^' + stuff[1:]
974 stuff = '^' + stuff[1:]
1002 elif stuff[0] == '^':
975 elif stuff[0] == '^':
1003 stuff = '\\' + stuff
976 stuff = '\\' + stuff
1004 res = '%s[%s]' % (res, stuff)
977 res = '%s[%s]' % (res, stuff)
1005 else:
978 else:
1006 res = res + re.escape(c)
979 res = res + re.escape(c)
1007 return res + '\Z(?ms)'
980 return res + '\Z(?ms)'
@@ -1,450 +1,456 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import json
21 import json
22 import multiprocessing
22 import multiprocessing
23 import os
23 import os
24
24
25 import mock
25 import mock
26 import py
26 import py
27 import pytest
27 import pytest
28
28
29 from rhodecode.lib import caching_query
29 from rhodecode.lib import caching_query
30 from rhodecode.lib import utils
30 from rhodecode.lib import utils
31 from rhodecode.lib.utils2 import md5
31 from rhodecode.lib.utils2 import md5
32 from rhodecode.model import settings
32 from rhodecode.model import settings
33 from rhodecode.model import db
33 from rhodecode.model import db
34 from rhodecode.model import meta
34 from rhodecode.model import meta
35 from rhodecode.model.repo import RepoModel
35 from rhodecode.model.repo import RepoModel
36 from rhodecode.model.repo_group import RepoGroupModel
36 from rhodecode.model.repo_group import RepoGroupModel
37 from rhodecode.model.scm import ScmModel
37 from rhodecode.model.scm import ScmModel
38 from rhodecode.model.settings import UiSetting, SettingsModel
38 from rhodecode.model.settings import UiSetting, SettingsModel
39 from rhodecode.tests.fixture import Fixture
39 from rhodecode.tests.fixture import Fixture
40
40
41
41
42 fixture = Fixture()
42 fixture = Fixture()
43
43
44
44
45 def extract_hooks(config):
45 def extract_hooks(config):
46 """Return a dictionary with the hook entries of the given config."""
46 """Return a dictionary with the hook entries of the given config."""
47 hooks = {}
47 hooks = {}
48 config_items = config.serialize()
48 config_items = config.serialize()
49 for section, name, value in config_items:
49 for section, name, value in config_items:
50 if section != 'hooks':
50 if section != 'hooks':
51 continue
51 continue
52 hooks[name] = value
52 hooks[name] = value
53
53
54 return hooks
54 return hooks
55
55
56
56
57 def disable_hooks(request, hooks):
57 def disable_hooks(request, hooks):
58 """Disables the given hooks from the UI settings."""
58 """Disables the given hooks from the UI settings."""
59 session = meta.Session()
59 session = meta.Session()
60
60
61 model = SettingsModel()
61 model = SettingsModel()
62 for hook_key in hooks:
62 for hook_key in hooks:
63 sett = model.get_ui_by_key(hook_key)
63 sett = model.get_ui_by_key(hook_key)
64 sett.ui_active = False
64 sett.ui_active = False
65 session.add(sett)
65 session.add(sett)
66
66
67 # Invalidate cache
67 # Invalidate cache
68 ui_settings = session.query(db.RhodeCodeUi).options(
68 ui_settings = session.query(db.RhodeCodeUi).options(
69 caching_query.FromCache('sql_cache_short', 'get_hg_ui_settings'))
69 caching_query.FromCache('sql_cache_short', 'get_hg_ui_settings'))
70 ui_settings.invalidate()
70 ui_settings.invalidate()
71
71
72 ui_settings = session.query(db.RhodeCodeUi).options(
72 ui_settings = session.query(db.RhodeCodeUi).options(
73 caching_query.FromCache(
73 caching_query.FromCache(
74 'sql_cache_short', 'get_hook_settings', 'get_hook_settings'))
74 'sql_cache_short', 'get_hook_settings', 'get_hook_settings'))
75 ui_settings.invalidate()
75 ui_settings.invalidate()
76
76
77 @request.addfinalizer
77 @request.addfinalizer
78 def rollback():
78 def rollback():
79 session.rollback()
79 session.rollback()
80
80
81
81
82 HOOK_PRE_PUSH = db.RhodeCodeUi.HOOK_PRE_PUSH
82 HOOK_PRE_PUSH = db.RhodeCodeUi.HOOK_PRE_PUSH
83 HOOK_PRETX_PUSH = db.RhodeCodeUi.HOOK_PRETX_PUSH
83 HOOK_PRETX_PUSH = db.RhodeCodeUi.HOOK_PRETX_PUSH
84 HOOK_PUSH = db.RhodeCodeUi.HOOK_PUSH
84 HOOK_PUSH = db.RhodeCodeUi.HOOK_PUSH
85 HOOK_PRE_PULL = db.RhodeCodeUi.HOOK_PRE_PULL
85 HOOK_PRE_PULL = db.RhodeCodeUi.HOOK_PRE_PULL
86 HOOK_PULL = db.RhodeCodeUi.HOOK_PULL
86 HOOK_PULL = db.RhodeCodeUi.HOOK_PULL
87 HOOK_REPO_SIZE = db.RhodeCodeUi.HOOK_REPO_SIZE
87 HOOK_REPO_SIZE = db.RhodeCodeUi.HOOK_REPO_SIZE
88 HOOK_PUSH_KEY = db.RhodeCodeUi.HOOK_PUSH_KEY
88 HOOK_PUSH_KEY = db.RhodeCodeUi.HOOK_PUSH_KEY
89
89
90 HG_HOOKS = frozenset(
90 HG_HOOKS = frozenset(
91 (HOOK_PRE_PULL, HOOK_PULL, HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_PUSH,
91 (HOOK_PRE_PULL, HOOK_PULL, HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_PUSH,
92 HOOK_REPO_SIZE, HOOK_PUSH_KEY))
92 HOOK_REPO_SIZE, HOOK_PUSH_KEY))
93
93
94
94
95 @pytest.mark.parametrize('disabled_hooks,expected_hooks', [
95 @pytest.mark.parametrize('disabled_hooks,expected_hooks', [
96 ([], HG_HOOKS),
96 ([], HG_HOOKS),
97 (HG_HOOKS, []),
97 (HG_HOOKS, []),
98
98
99 ([HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_REPO_SIZE, HOOK_PUSH_KEY], [HOOK_PRE_PULL, HOOK_PULL, HOOK_PUSH]),
99 ([HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_REPO_SIZE, HOOK_PUSH_KEY], [HOOK_PRE_PULL, HOOK_PULL, HOOK_PUSH]),
100
100
101 # When a pull/push hook is disabled, its pre-pull/push counterpart should
101 # When a pull/push hook is disabled, its pre-pull/push counterpart should
102 # be disabled too.
102 # be disabled too.
103 ([HOOK_PUSH], [HOOK_PRE_PULL, HOOK_PULL, HOOK_REPO_SIZE]),
103 ([HOOK_PUSH], [HOOK_PRE_PULL, HOOK_PULL, HOOK_REPO_SIZE]),
104 ([HOOK_PULL], [HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_PUSH, HOOK_REPO_SIZE,
104 ([HOOK_PULL], [HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_PUSH, HOOK_REPO_SIZE,
105 HOOK_PUSH_KEY]),
105 HOOK_PUSH_KEY]),
106 ])
106 ])
107 def test_make_db_config_hg_hooks(baseapp, request, disabled_hooks,
107 def test_make_db_config_hg_hooks(baseapp, request, disabled_hooks,
108 expected_hooks):
108 expected_hooks):
109 disable_hooks(request, disabled_hooks)
109 disable_hooks(request, disabled_hooks)
110
110
111 config = utils.make_db_config()
111 config = utils.make_db_config()
112 hooks = extract_hooks(config)
112 hooks = extract_hooks(config)
113
113
114 assert set(hooks.iterkeys()).intersection(HG_HOOKS) == set(expected_hooks)
114 assert set(hooks.iterkeys()).intersection(HG_HOOKS) == set(expected_hooks)
115
115
116
116
117 @pytest.mark.parametrize('disabled_hooks,expected_hooks', [
117 @pytest.mark.parametrize('disabled_hooks,expected_hooks', [
118 ([], ['pull', 'push']),
118 ([], ['pull', 'push']),
119 ([HOOK_PUSH], ['pull']),
119 ([HOOK_PUSH], ['pull']),
120 ([HOOK_PULL], ['push']),
120 ([HOOK_PULL], ['push']),
121 ([HOOK_PULL, HOOK_PUSH], []),
121 ([HOOK_PULL, HOOK_PUSH], []),
122 ])
122 ])
123 def test_get_enabled_hook_classes(disabled_hooks, expected_hooks):
123 def test_get_enabled_hook_classes(disabled_hooks, expected_hooks):
124 hook_keys = (HOOK_PUSH, HOOK_PULL)
124 hook_keys = (HOOK_PUSH, HOOK_PULL)
125 ui_settings = [
125 ui_settings = [
126 ('hooks', key, 'some value', key not in disabled_hooks)
126 ('hooks', key, 'some value', key not in disabled_hooks)
127 for key in hook_keys]
127 for key in hook_keys]
128
128
129 result = utils.get_enabled_hook_classes(ui_settings)
129 result = utils.get_enabled_hook_classes(ui_settings)
130 assert sorted(result) == expected_hooks
130 assert sorted(result) == expected_hooks
131
131
132
132
133 def test_get_filesystem_repos_finds_repos(tmpdir, baseapp):
133 def test_get_filesystem_repos_finds_repos(tmpdir, baseapp):
134 _stub_git_repo(tmpdir.ensure('repo', dir=True))
134 _stub_git_repo(tmpdir.ensure('repo', dir=True))
135 repos = list(utils.get_filesystem_repos(str(tmpdir)))
135 repos = list(utils.get_filesystem_repos(str(tmpdir)))
136 assert repos == [('repo', ('git', tmpdir.join('repo')))]
136 assert repos == [('repo', ('git', tmpdir.join('repo')))]
137
137
138
138
139 def test_get_filesystem_repos_skips_directories(tmpdir, baseapp):
139 def test_get_filesystem_repos_skips_directories(tmpdir, baseapp):
140 tmpdir.ensure('not-a-repo', dir=True)
140 tmpdir.ensure('not-a-repo', dir=True)
141 repos = list(utils.get_filesystem_repos(str(tmpdir)))
141 repos = list(utils.get_filesystem_repos(str(tmpdir)))
142 assert repos == []
142 assert repos == []
143
143
144
144
145 def test_get_filesystem_repos_skips_directories_with_repos(tmpdir, baseapp):
145 def test_get_filesystem_repos_skips_directories_with_repos(tmpdir, baseapp):
146 _stub_git_repo(tmpdir.ensure('subdir/repo', dir=True))
146 _stub_git_repo(tmpdir.ensure('subdir/repo', dir=True))
147 repos = list(utils.get_filesystem_repos(str(tmpdir)))
147 repos = list(utils.get_filesystem_repos(str(tmpdir)))
148 assert repos == []
148 assert repos == []
149
149
150
150
151 def test_get_filesystem_repos_finds_repos_in_subdirectories(tmpdir, baseapp):
151 def test_get_filesystem_repos_finds_repos_in_subdirectories(tmpdir, baseapp):
152 _stub_git_repo(tmpdir.ensure('subdir/repo', dir=True))
152 _stub_git_repo(tmpdir.ensure('subdir/repo', dir=True))
153 repos = list(utils.get_filesystem_repos(str(tmpdir), recursive=True))
153 repos = list(utils.get_filesystem_repos(str(tmpdir), recursive=True))
154 assert repos == [('subdir/repo', ('git', tmpdir.join('subdir', 'repo')))]
154 assert repos == [('subdir/repo', ('git', tmpdir.join('subdir', 'repo')))]
155
155
156
156
157 def test_get_filesystem_repos_skips_names_starting_with_dot(tmpdir):
157 def test_get_filesystem_repos_skips_names_starting_with_dot(tmpdir):
158 _stub_git_repo(tmpdir.ensure('.repo', dir=True))
158 _stub_git_repo(tmpdir.ensure('.repo', dir=True))
159 repos = list(utils.get_filesystem_repos(str(tmpdir)))
159 repos = list(utils.get_filesystem_repos(str(tmpdir)))
160 assert repos == []
160 assert repos == []
161
161
162
162
163 def test_get_filesystem_repos_skips_files(tmpdir):
163 def test_get_filesystem_repos_skips_files(tmpdir):
164 tmpdir.ensure('test-file')
164 tmpdir.ensure('test-file')
165 repos = list(utils.get_filesystem_repos(str(tmpdir)))
165 repos = list(utils.get_filesystem_repos(str(tmpdir)))
166 assert repos == []
166 assert repos == []
167
167
168
168
169 def test_get_filesystem_repos_skips_removed_repositories(tmpdir):
169 def test_get_filesystem_repos_skips_removed_repositories(tmpdir):
170 removed_repo_name = 'rm__00000000_000000_000000__.stub'
170 removed_repo_name = 'rm__00000000_000000_000000__.stub'
171 assert utils.REMOVED_REPO_PAT.match(removed_repo_name)
171 assert utils.REMOVED_REPO_PAT.match(removed_repo_name)
172 _stub_git_repo(tmpdir.ensure(removed_repo_name, dir=True))
172 _stub_git_repo(tmpdir.ensure(removed_repo_name, dir=True))
173 repos = list(utils.get_filesystem_repos(str(tmpdir)))
173 repos = list(utils.get_filesystem_repos(str(tmpdir)))
174 assert repos == []
174 assert repos == []
175
175
176
176
177 def _stub_git_repo(repo_path):
177 def _stub_git_repo(repo_path):
178 """
178 """
179 Make `repo_path` look like a Git repository.
179 Make `repo_path` look like a Git repository.
180 """
180 """
181 repo_path.ensure('.git', dir=True)
181 repo_path.ensure('.git', dir=True)
182
182
183
183
184 @pytest.mark.parametrize('str_class', [str, unicode], ids=['str', 'unicode'])
184 @pytest.mark.parametrize('str_class', [str, unicode], ids=['str', 'unicode'])
185 def test_get_dirpaths_returns_all_paths(tmpdir, str_class):
185 def test_get_dirpaths_returns_all_paths(tmpdir, str_class):
186 tmpdir.ensure('test-file')
186 tmpdir.ensure('test-file')
187 dirpaths = utils._get_dirpaths(str_class(tmpdir))
187 dirpaths = utils._get_dirpaths(str_class(tmpdir))
188 assert dirpaths == ['test-file']
188 assert dirpaths == ['test-file']
189
189
190
190
191 def test_get_dirpaths_returns_all_paths_bytes(
191 def test_get_dirpaths_returns_all_paths_bytes(
192 tmpdir, platform_encodes_filenames):
192 tmpdir, platform_encodes_filenames):
193 if platform_encodes_filenames:
193 if platform_encodes_filenames:
194 pytest.skip("This platform seems to encode filenames.")
194 pytest.skip("This platform seems to encode filenames.")
195 tmpdir.ensure('repo-a-umlaut-\xe4')
195 tmpdir.ensure('repo-a-umlaut-\xe4')
196 dirpaths = utils._get_dirpaths(str(tmpdir))
196 dirpaths = utils._get_dirpaths(str(tmpdir))
197 assert dirpaths == ['repo-a-umlaut-\xe4']
197 assert dirpaths == ['repo-a-umlaut-\xe4']
198
198
199
199
200 def test_get_dirpaths_skips_paths_it_cannot_decode(
200 def test_get_dirpaths_skips_paths_it_cannot_decode(
201 tmpdir, platform_encodes_filenames):
201 tmpdir, platform_encodes_filenames):
202 if platform_encodes_filenames:
202 if platform_encodes_filenames:
203 pytest.skip("This platform seems to encode filenames.")
203 pytest.skip("This platform seems to encode filenames.")
204 path_with_latin1 = 'repo-a-umlaut-\xe4'
204 path_with_latin1 = 'repo-a-umlaut-\xe4'
205 tmpdir.ensure(path_with_latin1)
205 tmpdir.ensure(path_with_latin1)
206 dirpaths = utils._get_dirpaths(unicode(tmpdir))
206 dirpaths = utils._get_dirpaths(unicode(tmpdir))
207 assert dirpaths == []
207 assert dirpaths == []
208
208
209
209
210 @pytest.fixture(scope='session')
210 @pytest.fixture(scope='session')
211 def platform_encodes_filenames():
211 def platform_encodes_filenames():
212 """
212 """
213 Boolean indicator if the current platform changes filename encodings.
213 Boolean indicator if the current platform changes filename encodings.
214 """
214 """
215 path_with_latin1 = 'repo-a-umlaut-\xe4'
215 path_with_latin1 = 'repo-a-umlaut-\xe4'
216 tmpdir = py.path.local.mkdtemp()
216 tmpdir = py.path.local.mkdtemp()
217 tmpdir.ensure(path_with_latin1)
217 tmpdir.ensure(path_with_latin1)
218 read_path = tmpdir.listdir()[0].basename
218 read_path = tmpdir.listdir()[0].basename
219 tmpdir.remove()
219 tmpdir.remove()
220 return path_with_latin1 != read_path
220 return path_with_latin1 != read_path
221
221
222
222
223
223
224
224
225 def test_repo2db_mapper_groups(repo_groups):
225 def test_repo2db_mapper_groups(repo_groups):
226 session = meta.Session()
226 session = meta.Session()
227 zombie_group, parent_group, child_group = repo_groups
227 zombie_group, parent_group, child_group = repo_groups
228 zombie_path = os.path.join(
228 zombie_path = os.path.join(
229 RepoGroupModel().repos_path, zombie_group.full_path)
229 RepoGroupModel().repos_path, zombie_group.full_path)
230 os.rmdir(zombie_path)
230 os.rmdir(zombie_path)
231
231
232 # Avoid removing test repos when calling repo2db_mapper
232 # Avoid removing test repos when calling repo2db_mapper
233 repo_list = {
233 repo_list = {
234 repo.repo_name: 'test' for repo in session.query(db.Repository).all()
234 repo.repo_name: 'test' for repo in session.query(db.Repository).all()
235 }
235 }
236 utils.repo2db_mapper(repo_list, remove_obsolete=True)
236 utils.repo2db_mapper(repo_list, remove_obsolete=True)
237
237
238 groups_in_db = session.query(db.RepoGroup).all()
238 groups_in_db = session.query(db.RepoGroup).all()
239 assert child_group in groups_in_db
239 assert child_group in groups_in_db
240 assert parent_group in groups_in_db
240 assert parent_group in groups_in_db
241 assert zombie_path not in groups_in_db
241 assert zombie_path not in groups_in_db
242
242
243
243
244 def test_repo2db_mapper_enables_largefiles(backend):
244 def test_repo2db_mapper_enables_largefiles(backend):
245 repo = backend.create_repo()
245 repo = backend.create_repo()
246 repo_list = {repo.repo_name: 'test'}
246 repo_list = {repo.repo_name: 'test'}
247 with mock.patch('rhodecode.model.db.Repository.scm_instance') as scm_mock:
247 with mock.patch('rhodecode.model.db.Repository.scm_instance') as scm_mock:
248 with mock.patch.multiple('rhodecode.model.scm.ScmModel',
248 with mock.patch.multiple('rhodecode.model.scm.ScmModel',
249 install_git_hook=mock.DEFAULT,
249 install_git_hook=mock.DEFAULT,
250 install_svn_hooks=mock.DEFAULT):
250 install_svn_hooks=mock.DEFAULT):
251 utils.repo2db_mapper(repo_list, remove_obsolete=False)
251 utils.repo2db_mapper(repo_list, remove_obsolete=False)
252 _, kwargs = scm_mock.call_args
252 _, kwargs = scm_mock.call_args
253 assert kwargs['config'].get('extensions', 'largefiles') == ''
253 assert kwargs['config'].get('extensions', 'largefiles') == ''
254
254
255
255
256 @pytest.mark.backends("git", "svn")
256 @pytest.mark.backends("git", "svn")
257 def test_repo2db_mapper_installs_hooks_for_repos_in_db(backend):
257 def test_repo2db_mapper_installs_hooks_for_repos_in_db(backend):
258 repo = backend.create_repo()
258 repo = backend.create_repo()
259 repo_list = {repo.repo_name: 'test'}
259 repo_list = {repo.repo_name: 'test'}
260 with mock.patch.object(ScmModel, 'install_hooks') as install_hooks_mock:
260 with mock.patch.object(ScmModel, 'install_hooks') as install_hooks_mock:
261 utils.repo2db_mapper(repo_list, remove_obsolete=False)
261 utils.repo2db_mapper(repo_list, remove_obsolete=False)
262 install_hooks_mock.assert_called_once_with(
262 install_hooks_mock.assert_called_once_with(
263 repo.scm_instance(), repo_type=backend.alias)
263 repo.scm_instance(), repo_type=backend.alias)
264
264
265
265
266 @pytest.mark.backends("git", "svn")
266 @pytest.mark.backends("git", "svn")
267 def test_repo2db_mapper_installs_hooks_for_newly_added_repos(backend):
267 def test_repo2db_mapper_installs_hooks_for_newly_added_repos(backend):
268 repo = backend.create_repo()
268 repo = backend.create_repo()
269 RepoModel().delete(repo, fs_remove=False)
269 RepoModel().delete(repo, fs_remove=False)
270 meta.Session().commit()
270 meta.Session().commit()
271 repo_list = {repo.repo_name: repo.scm_instance()}
271 repo_list = {repo.repo_name: repo.scm_instance()}
272 with mock.patch.object(ScmModel, 'install_hooks') as install_hooks_mock:
272 with mock.patch.object(ScmModel, 'install_hooks') as install_hooks_mock:
273 utils.repo2db_mapper(repo_list, remove_obsolete=False)
273 utils.repo2db_mapper(repo_list, remove_obsolete=False)
274 assert install_hooks_mock.call_count == 1
274 assert install_hooks_mock.call_count == 1
275 install_hooks_args, _ = install_hooks_mock.call_args
275 install_hooks_args, _ = install_hooks_mock.call_args
276 assert install_hooks_args[0].name == repo.repo_name
276 assert install_hooks_args[0].name == repo.repo_name
277
277
278
278
279 class TestPasswordChanged(object):
279 class TestPasswordChanged(object):
280 def setup(self):
280 def setup(self):
281 self.session = {
281 self.session = {
282 'rhodecode_user': {
282 'rhodecode_user': {
283 'password': '0cc175b9c0f1b6a831c399e269772661'
283 'password': '0cc175b9c0f1b6a831c399e269772661'
284 }
284 }
285 }
285 }
286 self.auth_user = mock.Mock()
286 self.auth_user = mock.Mock()
287 self.auth_user.userame = 'test'
287 self.auth_user.userame = 'test'
288 self.auth_user.password = 'abc123'
288 self.auth_user.password = 'abc123'
289
289
290 def test_returns_false_for_default_user(self):
290 def test_returns_false_for_default_user(self):
291 self.auth_user.username = db.User.DEFAULT_USER
291 self.auth_user.username = db.User.DEFAULT_USER
292 result = utils.password_changed(self.auth_user, self.session)
292 result = utils.password_changed(self.auth_user, self.session)
293 assert result is False
293 assert result is False
294
294
295 def test_returns_false_if_password_was_not_changed(self):
295 def test_returns_false_if_password_was_not_changed(self):
296 self.session['rhodecode_user']['password'] = md5(
296 self.session['rhodecode_user']['password'] = md5(
297 self.auth_user.password)
297 self.auth_user.password)
298 result = utils.password_changed(self.auth_user, self.session)
298 result = utils.password_changed(self.auth_user, self.session)
299 assert result is False
299 assert result is False
300
300
301 def test_returns_true_if_password_was_changed(self):
301 def test_returns_true_if_password_was_changed(self):
302 result = utils.password_changed(self.auth_user, self.session)
302 result = utils.password_changed(self.auth_user, self.session)
303 assert result is True
303 assert result is True
304
304
305 def test_returns_true_if_auth_user_password_is_empty(self):
305 def test_returns_true_if_auth_user_password_is_empty(self):
306 self.auth_user.password = None
306 self.auth_user.password = None
307 result = utils.password_changed(self.auth_user, self.session)
307 result = utils.password_changed(self.auth_user, self.session)
308 assert result is True
308 assert result is True
309
309
310 def test_returns_true_if_session_password_is_empty(self):
310 def test_returns_true_if_session_password_is_empty(self):
311 self.session['rhodecode_user'].pop('password')
311 self.session['rhodecode_user'].pop('password')
312 result = utils.password_changed(self.auth_user, self.session)
312 result = utils.password_changed(self.auth_user, self.session)
313 assert result is True
313 assert result is True
314
314
315
315
316 class TestReadOpensourceLicenses(object):
316 class TestReadOpensourceLicenses(object):
317 def test_success(self):
317 def test_success(self):
318 utils._license_cache = None
318 utils._license_cache = None
319 json_data = '''
319 json_data = '''
320 {
320 {
321 "python2.7-pytest-2.7.1": {"UNKNOWN": null},
321 "python2.7-pytest-2.7.1": {"UNKNOWN": null},
322 "python2.7-Markdown-2.6.2": {
322 "python2.7-Markdown-2.6.2": {
323 "BSD-3-Clause": "http://spdx.org/licenses/BSD-3-Clause"
323 "BSD-3-Clause": "http://spdx.org/licenses/BSD-3-Clause"
324 }
324 }
325 }
325 }
326 '''
326 '''
327 resource_string_patch = mock.patch.object(
327 resource_string_patch = mock.patch.object(
328 utils.pkg_resources, 'resource_string', return_value=json_data)
328 utils.pkg_resources, 'resource_string', return_value=json_data)
329 with resource_string_patch:
329 with resource_string_patch:
330 result = utils.read_opensource_licenses()
330 result = utils.read_opensource_licenses()
331 assert result == json.loads(json_data)
331 assert result == json.loads(json_data)
332
332
333 def test_caching(self):
333 def test_caching(self):
334 utils._license_cache = {
334 utils._license_cache = {
335 "python2.7-pytest-2.7.1": {
335 "python2.7-pytest-2.7.1": {
336 "UNKNOWN": None
336 "UNKNOWN": None
337 },
337 },
338 "python2.7-Markdown-2.6.2": {
338 "python2.7-Markdown-2.6.2": {
339 "BSD-3-Clause": "http://spdx.org/licenses/BSD-3-Clause"
339 "BSD-3-Clause": "http://spdx.org/licenses/BSD-3-Clause"
340 }
340 }
341 }
341 }
342 resource_patch = mock.patch.object(
342 resource_patch = mock.patch.object(
343 utils.pkg_resources, 'resource_string', side_effect=Exception)
343 utils.pkg_resources, 'resource_string', side_effect=Exception)
344 json_patch = mock.patch.object(
344 json_patch = mock.patch.object(
345 utils.json, 'loads', side_effect=Exception)
345 utils.json, 'loads', side_effect=Exception)
346
346
347 with resource_patch as resource_mock, json_patch as json_mock:
347 with resource_patch as resource_mock, json_patch as json_mock:
348 result = utils.read_opensource_licenses()
348 result = utils.read_opensource_licenses()
349
349
350 assert resource_mock.call_count == 0
350 assert resource_mock.call_count == 0
351 assert json_mock.call_count == 0
351 assert json_mock.call_count == 0
352 assert result == utils._license_cache
352 assert result == utils._license_cache
353
353
354 def test_licenses_file_contains_no_unknown_licenses(self):
354 def test_licenses_file_contains_no_unknown_licenses(self):
355 utils._license_cache = None
355 utils._license_cache = None
356 result = utils.read_opensource_licenses()
356 result = utils.read_opensource_licenses()
357 license_names = []
357 license_names = []
358 for licenses in result.values():
358 for licenses in result.values():
359 license_names.extend(licenses.keys())
359 license_names.extend(licenses.keys())
360 assert 'UNKNOWN' not in license_names
360 assert 'UNKNOWN' not in license_names
361
361
362
362
363 class TestMakeDbConfig(object):
363 class TestMakeDbConfig(object):
364 def test_data_from_config_data_from_db_returned(self):
364 def test_data_from_config_data_from_db_returned(self):
365 test_data = [
365 test_data = [
366 ('section1', 'option1', 'value1'),
366 ('section1', 'option1', 'value1'),
367 ('section2', 'option2', 'value2'),
367 ('section2', 'option2', 'value2'),
368 ('section3', 'option3', 'value3'),
368 ('section3', 'option3', 'value3'),
369 ]
369 ]
370 with mock.patch.object(utils, 'config_data_from_db') as config_mock:
370 with mock.patch.object(utils, 'config_data_from_db') as config_mock:
371 config_mock.return_value = test_data
371 config_mock.return_value = test_data
372 kwargs = {'clear_session': False, 'repo': 'test_repo'}
372 kwargs = {'clear_session': False, 'repo': 'test_repo'}
373 result = utils.make_db_config(**kwargs)
373 result = utils.make_db_config(**kwargs)
374 config_mock.assert_called_once_with(**kwargs)
374 config_mock.assert_called_once_with(**kwargs)
375 for section, option, expected_value in test_data:
375 for section, option, expected_value in test_data:
376 value = result.get(section, option)
376 value = result.get(section, option)
377 assert value == expected_value
377 assert value == expected_value
378
378
379
379
380 class TestConfigDataFromDb(object):
380 class TestConfigDataFromDb(object):
381 def test_config_data_from_db_returns_active_settings(self):
381 def test_config_data_from_db_returns_active_settings(self):
382 test_data = [
382 test_data = [
383 UiSetting('section1', 'option1', 'value1', True),
383 UiSetting('section1', 'option1', 'value1', True),
384 UiSetting('section2', 'option2', 'value2', True),
384 UiSetting('section2', 'option2', 'value2', True),
385 UiSetting('section3', 'option3', 'value3', False),
385 UiSetting('section3', 'option3', 'value3', False),
386 ]
386 ]
387 repo_name = 'test_repo'
387 repo_name = 'test_repo'
388
388
389 model_patch = mock.patch.object(settings, 'VcsSettingsModel')
389 model_patch = mock.patch.object(settings, 'VcsSettingsModel')
390 hooks_patch = mock.patch.object(
390 hooks_patch = mock.patch.object(
391 utils, 'get_enabled_hook_classes',
391 utils, 'get_enabled_hook_classes',
392 return_value=['pull', 'push', 'repo_size'])
392 return_value=['pull', 'push', 'repo_size'])
393 with model_patch as model_mock, hooks_patch:
393 with model_patch as model_mock, hooks_patch:
394 instance_mock = mock.Mock()
394 instance_mock = mock.Mock()
395 model_mock.return_value = instance_mock
395 model_mock.return_value = instance_mock
396 instance_mock.get_ui_settings.return_value = test_data
396 instance_mock.get_ui_settings.return_value = test_data
397 result = utils.config_data_from_db(
397 result = utils.config_data_from_db(
398 clear_session=False, repo=repo_name)
398 clear_session=False, repo=repo_name)
399
399
400 self._assert_repo_name_passed(model_mock, repo_name)
400 self._assert_repo_name_passed(model_mock, repo_name)
401
401
402 expected_result = [
402 expected_result = [
403 ('section1', 'option1', 'value1'),
403 ('section1', 'option1', 'value1'),
404 ('section2', 'option2', 'value2'),
404 ('section2', 'option2', 'value2'),
405 ]
405 ]
406 assert result == expected_result
406 assert result == expected_result
407
407
408 def _assert_repo_name_passed(self, model_mock, repo_name):
408 def _assert_repo_name_passed(self, model_mock, repo_name):
409 assert model_mock.call_count == 1
409 assert model_mock.call_count == 1
410 call_args, call_kwargs = model_mock.call_args
410 call_args, call_kwargs = model_mock.call_args
411 assert call_kwargs['repo'] == repo_name
411 assert call_kwargs['repo'] == repo_name
412
412
413
413
414 class TestIsDirWritable(object):
414 class TestIsDirWritable(object):
415 def test_returns_false_when_not_writable(self):
415 def test_returns_false_when_not_writable(self):
416 with mock.patch('__builtin__.open', side_effect=OSError):
416 with mock.patch('__builtin__.open', side_effect=OSError):
417 assert not utils._is_dir_writable('/stub-path')
417 assert not utils._is_dir_writable('/stub-path')
418
418
419 def test_returns_true_when_writable(self, tmpdir):
419 def test_returns_true_when_writable(self, tmpdir):
420 assert utils._is_dir_writable(str(tmpdir))
420 assert utils._is_dir_writable(str(tmpdir))
421
421
422 def test_is_safe_against_race_conditions(self, tmpdir):
422 def test_is_safe_against_race_conditions(self, tmpdir):
423 workers = multiprocessing.Pool()
423 workers = multiprocessing.Pool()
424 directories = [str(tmpdir)] * 10
424 directories = [str(tmpdir)] * 10
425 workers.map(utils._is_dir_writable, directories)
425 workers.map(utils._is_dir_writable, directories)
426
426
427
427
428 class TestGetEnabledHooks(object):
428 class TestGetEnabledHooks(object):
429 def test_only_active_hooks_are_enabled(self):
429 def test_only_active_hooks_are_enabled(self):
430 ui_settings = [
430 ui_settings = [
431 UiSetting('hooks', db.RhodeCodeUi.HOOK_PUSH, 'value', True),
431 UiSetting('hooks', db.RhodeCodeUi.HOOK_PUSH, 'value', True),
432 UiSetting('hooks', db.RhodeCodeUi.HOOK_REPO_SIZE, 'value', True),
432 UiSetting('hooks', db.RhodeCodeUi.HOOK_REPO_SIZE, 'value', True),
433 UiSetting('hooks', db.RhodeCodeUi.HOOK_PULL, 'value', False)
433 UiSetting('hooks', db.RhodeCodeUi.HOOK_PULL, 'value', False)
434 ]
434 ]
435 result = utils.get_enabled_hook_classes(ui_settings)
435 result = utils.get_enabled_hook_classes(ui_settings)
436 assert result == ['push', 'repo_size']
436 assert result == ['push', 'repo_size']
437
437
438 def test_all_hooks_are_enabled(self):
438 def test_all_hooks_are_enabled(self):
439 ui_settings = [
439 ui_settings = [
440 UiSetting('hooks', db.RhodeCodeUi.HOOK_PUSH, 'value', True),
440 UiSetting('hooks', db.RhodeCodeUi.HOOK_PUSH, 'value', True),
441 UiSetting('hooks', db.RhodeCodeUi.HOOK_REPO_SIZE, 'value', True),
441 UiSetting('hooks', db.RhodeCodeUi.HOOK_REPO_SIZE, 'value', True),
442 UiSetting('hooks', db.RhodeCodeUi.HOOK_PULL, 'value', True)
442 UiSetting('hooks', db.RhodeCodeUi.HOOK_PULL, 'value', True)
443 ]
443 ]
444 result = utils.get_enabled_hook_classes(ui_settings)
444 result = utils.get_enabled_hook_classes(ui_settings)
445 assert result == ['push', 'repo_size', 'pull']
445 assert result == ['push', 'repo_size', 'pull']
446
446
447 def test_no_enabled_hooks_when_no_hook_settings_are_found(self):
447 def test_no_enabled_hooks_when_no_hook_settings_are_found(self):
448 ui_settings = []
448 ui_settings = []
449 result = utils.get_enabled_hook_classes(ui_settings)
449 result = utils.get_enabled_hook_classes(ui_settings)
450 assert result == []
450 assert result == []
451
452
453 def test_obfuscate_url_pw():
454 from rhodecode.lib.utils2 import obfuscate_url_pw
455 engine = u'/home/repos/malmö'
456 assert obfuscate_url_pw(engine) No newline at end of file
@@ -1,1837 +1,1836 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import collections
21 import collections
22 import datetime
22 import datetime
23 import hashlib
23 import hashlib
24 import os
24 import os
25 import re
25 import re
26 import pprint
26 import pprint
27 import shutil
27 import shutil
28 import socket
28 import socket
29 import subprocess32
29 import subprocess32
30 import time
30 import time
31 import uuid
31 import uuid
32 import dateutil.tz
32 import dateutil.tz
33
33
34 import mock
34 import mock
35 import pyramid.testing
35 import pyramid.testing
36 import pytest
36 import pytest
37 import colander
37 import colander
38 import requests
38 import requests
39
39
40 import rhodecode
40 import rhodecode
41 from rhodecode.lib.utils2 import AttributeDict
41 from rhodecode.lib.utils2 import AttributeDict
42 from rhodecode.model.changeset_status import ChangesetStatusModel
42 from rhodecode.model.changeset_status import ChangesetStatusModel
43 from rhodecode.model.comment import CommentsModel
43 from rhodecode.model.comment import CommentsModel
44 from rhodecode.model.db import (
44 from rhodecode.model.db import (
45 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
45 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
46 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
46 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
47 from rhodecode.model.meta import Session
47 from rhodecode.model.meta import Session
48 from rhodecode.model.pull_request import PullRequestModel
48 from rhodecode.model.pull_request import PullRequestModel
49 from rhodecode.model.repo import RepoModel
49 from rhodecode.model.repo import RepoModel
50 from rhodecode.model.repo_group import RepoGroupModel
50 from rhodecode.model.repo_group import RepoGroupModel
51 from rhodecode.model.user import UserModel
51 from rhodecode.model.user import UserModel
52 from rhodecode.model.settings import VcsSettingsModel
52 from rhodecode.model.settings import VcsSettingsModel
53 from rhodecode.model.user_group import UserGroupModel
53 from rhodecode.model.user_group import UserGroupModel
54 from rhodecode.model.integration import IntegrationModel
54 from rhodecode.model.integration import IntegrationModel
55 from rhodecode.integrations import integration_type_registry
55 from rhodecode.integrations import integration_type_registry
56 from rhodecode.integrations.types.base import IntegrationTypeBase
56 from rhodecode.integrations.types.base import IntegrationTypeBase
57 from rhodecode.lib.utils import repo2db_mapper
57 from rhodecode.lib.utils import repo2db_mapper
58 from rhodecode.lib.vcs import create_vcsserver_proxy
58 from rhodecode.lib.vcs import create_vcsserver_proxy
59 from rhodecode.lib.vcs.backends import get_backend
59 from rhodecode.lib.vcs.backends import get_backend
60 from rhodecode.lib.vcs.nodes import FileNode
60 from rhodecode.lib.vcs.nodes import FileNode
61 from rhodecode.tests import (
61 from rhodecode.tests import (
62 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
62 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
63 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
63 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
64 TEST_USER_REGULAR_PASS)
64 TEST_USER_REGULAR_PASS)
65 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
65 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
66 from rhodecode.tests.fixture import Fixture
66 from rhodecode.tests.fixture import Fixture
67
67
68
68
69 def _split_comma(value):
69 def _split_comma(value):
70 return value.split(',')
70 return value.split(',')
71
71
72
72
73 def pytest_addoption(parser):
73 def pytest_addoption(parser):
74 parser.addoption(
74 parser.addoption(
75 '--keep-tmp-path', action='store_true',
75 '--keep-tmp-path', action='store_true',
76 help="Keep the test temporary directories")
76 help="Keep the test temporary directories")
77 parser.addoption(
77 parser.addoption(
78 '--backends', action='store', type=_split_comma,
78 '--backends', action='store', type=_split_comma,
79 default=['git', 'hg', 'svn'],
79 default=['git', 'hg', 'svn'],
80 help="Select which backends to test for backend specific tests.")
80 help="Select which backends to test for backend specific tests.")
81 parser.addoption(
81 parser.addoption(
82 '--dbs', action='store', type=_split_comma,
82 '--dbs', action='store', type=_split_comma,
83 default=['sqlite'],
83 default=['sqlite'],
84 help="Select which database to test for database specific tests. "
84 help="Select which database to test for database specific tests. "
85 "Possible options are sqlite,postgres,mysql")
85 "Possible options are sqlite,postgres,mysql")
86 parser.addoption(
86 parser.addoption(
87 '--appenlight', '--ae', action='store_true',
87 '--appenlight', '--ae', action='store_true',
88 help="Track statistics in appenlight.")
88 help="Track statistics in appenlight.")
89 parser.addoption(
89 parser.addoption(
90 '--appenlight-api-key', '--ae-key',
90 '--appenlight-api-key', '--ae-key',
91 help="API key for Appenlight.")
91 help="API key for Appenlight.")
92 parser.addoption(
92 parser.addoption(
93 '--appenlight-url', '--ae-url',
93 '--appenlight-url', '--ae-url',
94 default="https://ae.rhodecode.com",
94 default="https://ae.rhodecode.com",
95 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
95 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
96 parser.addoption(
96 parser.addoption(
97 '--sqlite-connection-string', action='store',
97 '--sqlite-connection-string', action='store',
98 default='', help="Connection string for the dbs tests with SQLite")
98 default='', help="Connection string for the dbs tests with SQLite")
99 parser.addoption(
99 parser.addoption(
100 '--postgres-connection-string', action='store',
100 '--postgres-connection-string', action='store',
101 default='', help="Connection string for the dbs tests with Postgres")
101 default='', help="Connection string for the dbs tests with Postgres")
102 parser.addoption(
102 parser.addoption(
103 '--mysql-connection-string', action='store',
103 '--mysql-connection-string', action='store',
104 default='', help="Connection string for the dbs tests with MySQL")
104 default='', help="Connection string for the dbs tests with MySQL")
105 parser.addoption(
105 parser.addoption(
106 '--repeat', type=int, default=100,
106 '--repeat', type=int, default=100,
107 help="Number of repetitions in performance tests.")
107 help="Number of repetitions in performance tests.")
108
108
109
109
110 def pytest_configure(config):
110 def pytest_configure(config):
111 # Appy the kombu patch early on, needed for test discovery on Python 2.7.11
112 from rhodecode.config import patches
111 from rhodecode.config import patches
113
112
114
113
115 def pytest_collection_modifyitems(session, config, items):
114 def pytest_collection_modifyitems(session, config, items):
116 # nottest marked, compare nose, used for transition from nose to pytest
115 # nottest marked, compare nose, used for transition from nose to pytest
117 remaining = [
116 remaining = [
118 i for i in items if getattr(i.obj, '__test__', True)]
117 i for i in items if getattr(i.obj, '__test__', True)]
119 items[:] = remaining
118 items[:] = remaining
120
119
121
120
122 def pytest_generate_tests(metafunc):
121 def pytest_generate_tests(metafunc):
123 # Support test generation based on --backend parameter
122 # Support test generation based on --backend parameter
124 if 'backend_alias' in metafunc.fixturenames:
123 if 'backend_alias' in metafunc.fixturenames:
125 backends = get_backends_from_metafunc(metafunc)
124 backends = get_backends_from_metafunc(metafunc)
126 scope = None
125 scope = None
127 if not backends:
126 if not backends:
128 pytest.skip("Not enabled for any of selected backends")
127 pytest.skip("Not enabled for any of selected backends")
129 metafunc.parametrize('backend_alias', backends, scope=scope)
128 metafunc.parametrize('backend_alias', backends, scope=scope)
130 elif hasattr(metafunc.function, 'backends'):
129 elif hasattr(metafunc.function, 'backends'):
131 backends = get_backends_from_metafunc(metafunc)
130 backends = get_backends_from_metafunc(metafunc)
132 if not backends:
131 if not backends:
133 pytest.skip("Not enabled for any of selected backends")
132 pytest.skip("Not enabled for any of selected backends")
134
133
135
134
136 def get_backends_from_metafunc(metafunc):
135 def get_backends_from_metafunc(metafunc):
137 requested_backends = set(metafunc.config.getoption('--backends'))
136 requested_backends = set(metafunc.config.getoption('--backends'))
138 if hasattr(metafunc.function, 'backends'):
137 if hasattr(metafunc.function, 'backends'):
139 # Supported backends by this test function, created from
138 # Supported backends by this test function, created from
140 # pytest.mark.backends
139 # pytest.mark.backends
141 backends = metafunc.function.backends.args
140 backends = metafunc.function.backends.args
142 elif hasattr(metafunc.cls, 'backend_alias'):
141 elif hasattr(metafunc.cls, 'backend_alias'):
143 # Support class attribute "backend_alias", this is mainly
142 # Support class attribute "backend_alias", this is mainly
144 # for legacy reasons for tests not yet using pytest.mark.backends
143 # for legacy reasons for tests not yet using pytest.mark.backends
145 backends = [metafunc.cls.backend_alias]
144 backends = [metafunc.cls.backend_alias]
146 else:
145 else:
147 backends = metafunc.config.getoption('--backends')
146 backends = metafunc.config.getoption('--backends')
148 return requested_backends.intersection(backends)
147 return requested_backends.intersection(backends)
149
148
150
149
151 @pytest.fixture(scope='session', autouse=True)
150 @pytest.fixture(scope='session', autouse=True)
152 def activate_example_rcextensions(request):
151 def activate_example_rcextensions(request):
153 """
152 """
154 Patch in an example rcextensions module which verifies passed in kwargs.
153 Patch in an example rcextensions module which verifies passed in kwargs.
155 """
154 """
156 from rhodecode.tests.other import example_rcextensions
155 from rhodecode.tests.other import example_rcextensions
157
156
158 old_extensions = rhodecode.EXTENSIONS
157 old_extensions = rhodecode.EXTENSIONS
159 rhodecode.EXTENSIONS = example_rcextensions
158 rhodecode.EXTENSIONS = example_rcextensions
160
159
161 @request.addfinalizer
160 @request.addfinalizer
162 def cleanup():
161 def cleanup():
163 rhodecode.EXTENSIONS = old_extensions
162 rhodecode.EXTENSIONS = old_extensions
164
163
165
164
166 @pytest.fixture
165 @pytest.fixture
167 def capture_rcextensions():
166 def capture_rcextensions():
168 """
167 """
169 Returns the recorded calls to entry points in rcextensions.
168 Returns the recorded calls to entry points in rcextensions.
170 """
169 """
171 calls = rhodecode.EXTENSIONS.calls
170 calls = rhodecode.EXTENSIONS.calls
172 calls.clear()
171 calls.clear()
173 # Note: At this moment, it is still the empty dict, but that will
172 # Note: At this moment, it is still the empty dict, but that will
174 # be filled during the test run and since it is a reference this
173 # be filled during the test run and since it is a reference this
175 # is enough to make it work.
174 # is enough to make it work.
176 return calls
175 return calls
177
176
178
177
179 @pytest.fixture(scope='session')
178 @pytest.fixture(scope='session')
180 def http_environ_session():
179 def http_environ_session():
181 """
180 """
182 Allow to use "http_environ" in session scope.
181 Allow to use "http_environ" in session scope.
183 """
182 """
184 return http_environ(
183 return http_environ(
185 http_host_stub=http_host_stub())
184 http_host_stub=http_host_stub())
186
185
187
186
188 @pytest.fixture
187 @pytest.fixture
189 def http_host_stub():
188 def http_host_stub():
190 """
189 """
191 Value of HTTP_HOST in the test run.
190 Value of HTTP_HOST in the test run.
192 """
191 """
193 return 'example.com:80'
192 return 'example.com:80'
194
193
195
194
196 @pytest.fixture
195 @pytest.fixture
197 def http_host_only_stub():
196 def http_host_only_stub():
198 """
197 """
199 Value of HTTP_HOST in the test run.
198 Value of HTTP_HOST in the test run.
200 """
199 """
201 return http_host_stub().split(':')[0]
200 return http_host_stub().split(':')[0]
202
201
203
202
204 @pytest.fixture
203 @pytest.fixture
205 def http_environ(http_host_stub):
204 def http_environ(http_host_stub):
206 """
205 """
207 HTTP extra environ keys.
206 HTTP extra environ keys.
208
207
209 User by the test application and as well for setting up the pylons
208 User by the test application and as well for setting up the pylons
210 environment. In the case of the fixture "app" it should be possible
209 environment. In the case of the fixture "app" it should be possible
211 to override this for a specific test case.
210 to override this for a specific test case.
212 """
211 """
213 return {
212 return {
214 'SERVER_NAME': http_host_only_stub(),
213 'SERVER_NAME': http_host_only_stub(),
215 'SERVER_PORT': http_host_stub.split(':')[1],
214 'SERVER_PORT': http_host_stub.split(':')[1],
216 'HTTP_HOST': http_host_stub,
215 'HTTP_HOST': http_host_stub,
217 'HTTP_USER_AGENT': 'rc-test-agent',
216 'HTTP_USER_AGENT': 'rc-test-agent',
218 'REQUEST_METHOD': 'GET'
217 'REQUEST_METHOD': 'GET'
219 }
218 }
220
219
221
220
222 @pytest.fixture(scope='function')
221 @pytest.fixture(scope='function')
223 def app(request, config_stub, baseapp, http_environ):
222 def app(request, config_stub, baseapp, http_environ):
224 app = CustomTestApp(
223 app = CustomTestApp(
225 baseapp,
224 baseapp,
226 extra_environ=http_environ)
225 extra_environ=http_environ)
227 if request.cls:
226 if request.cls:
228 request.cls.app = app
227 request.cls.app = app
229 return app
228 return app
230
229
231
230
232 @pytest.fixture(scope='session')
231 @pytest.fixture(scope='session')
233 def app_settings(baseapp, ini_config):
232 def app_settings(baseapp, ini_config):
234 """
233 """
235 Settings dictionary used to create the app.
234 Settings dictionary used to create the app.
236
235
237 Parses the ini file and passes the result through the sanitize and apply
236 Parses the ini file and passes the result through the sanitize and apply
238 defaults mechanism in `rhodecode.config.middleware`.
237 defaults mechanism in `rhodecode.config.middleware`.
239 """
238 """
240 return baseapp.config.get_settings()
239 return baseapp.config.get_settings()
241
240
242
241
243 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
242 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
244
243
245
244
246 def _autologin_user(app, *args):
245 def _autologin_user(app, *args):
247 session = login_user_session(app, *args)
246 session = login_user_session(app, *args)
248 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
247 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
249 return LoginData(csrf_token, session['rhodecode_user'])
248 return LoginData(csrf_token, session['rhodecode_user'])
250
249
251
250
252 @pytest.fixture
251 @pytest.fixture
253 def autologin_user(app):
252 def autologin_user(app):
254 """
253 """
255 Utility fixture which makes sure that the admin user is logged in
254 Utility fixture which makes sure that the admin user is logged in
256 """
255 """
257 return _autologin_user(app)
256 return _autologin_user(app)
258
257
259
258
260 @pytest.fixture
259 @pytest.fixture
261 def autologin_regular_user(app):
260 def autologin_regular_user(app):
262 """
261 """
263 Utility fixture which makes sure that the regular user is logged in
262 Utility fixture which makes sure that the regular user is logged in
264 """
263 """
265 return _autologin_user(
264 return _autologin_user(
266 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
265 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
267
266
268
267
269 @pytest.fixture(scope='function')
268 @pytest.fixture(scope='function')
270 def csrf_token(request, autologin_user):
269 def csrf_token(request, autologin_user):
271 return autologin_user.csrf_token
270 return autologin_user.csrf_token
272
271
273
272
274 @pytest.fixture(scope='function')
273 @pytest.fixture(scope='function')
275 def xhr_header(request):
274 def xhr_header(request):
276 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
275 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
277
276
278
277
279 @pytest.fixture
278 @pytest.fixture
280 def real_crypto_backend(monkeypatch):
279 def real_crypto_backend(monkeypatch):
281 """
280 """
282 Switch the production crypto backend on for this test.
281 Switch the production crypto backend on for this test.
283
282
284 During the test run the crypto backend is replaced with a faster
283 During the test run the crypto backend is replaced with a faster
285 implementation based on the MD5 algorithm.
284 implementation based on the MD5 algorithm.
286 """
285 """
287 monkeypatch.setattr(rhodecode, 'is_test', False)
286 monkeypatch.setattr(rhodecode, 'is_test', False)
288
287
289
288
290 @pytest.fixture(scope='class')
289 @pytest.fixture(scope='class')
291 def index_location(request, baseapp):
290 def index_location(request, baseapp):
292 index_location = baseapp.config.get_settings()['search.location']
291 index_location = baseapp.config.get_settings()['search.location']
293 if request.cls:
292 if request.cls:
294 request.cls.index_location = index_location
293 request.cls.index_location = index_location
295 return index_location
294 return index_location
296
295
297
296
298 @pytest.fixture(scope='session', autouse=True)
297 @pytest.fixture(scope='session', autouse=True)
299 def tests_tmp_path(request):
298 def tests_tmp_path(request):
300 """
299 """
301 Create temporary directory to be used during the test session.
300 Create temporary directory to be used during the test session.
302 """
301 """
303 if not os.path.exists(TESTS_TMP_PATH):
302 if not os.path.exists(TESTS_TMP_PATH):
304 os.makedirs(TESTS_TMP_PATH)
303 os.makedirs(TESTS_TMP_PATH)
305
304
306 if not request.config.getoption('--keep-tmp-path'):
305 if not request.config.getoption('--keep-tmp-path'):
307 @request.addfinalizer
306 @request.addfinalizer
308 def remove_tmp_path():
307 def remove_tmp_path():
309 shutil.rmtree(TESTS_TMP_PATH)
308 shutil.rmtree(TESTS_TMP_PATH)
310
309
311 return TESTS_TMP_PATH
310 return TESTS_TMP_PATH
312
311
313
312
314 @pytest.fixture
313 @pytest.fixture
315 def test_repo_group(request):
314 def test_repo_group(request):
316 """
315 """
317 Create a temporary repository group, and destroy it after
316 Create a temporary repository group, and destroy it after
318 usage automatically
317 usage automatically
319 """
318 """
320 fixture = Fixture()
319 fixture = Fixture()
321 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
320 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
322 repo_group = fixture.create_repo_group(repogroupid)
321 repo_group = fixture.create_repo_group(repogroupid)
323
322
324 def _cleanup():
323 def _cleanup():
325 fixture.destroy_repo_group(repogroupid)
324 fixture.destroy_repo_group(repogroupid)
326
325
327 request.addfinalizer(_cleanup)
326 request.addfinalizer(_cleanup)
328 return repo_group
327 return repo_group
329
328
330
329
331 @pytest.fixture
330 @pytest.fixture
332 def test_user_group(request):
331 def test_user_group(request):
333 """
332 """
334 Create a temporary user group, and destroy it after
333 Create a temporary user group, and destroy it after
335 usage automatically
334 usage automatically
336 """
335 """
337 fixture = Fixture()
336 fixture = Fixture()
338 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
337 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
339 user_group = fixture.create_user_group(usergroupid)
338 user_group = fixture.create_user_group(usergroupid)
340
339
341 def _cleanup():
340 def _cleanup():
342 fixture.destroy_user_group(user_group)
341 fixture.destroy_user_group(user_group)
343
342
344 request.addfinalizer(_cleanup)
343 request.addfinalizer(_cleanup)
345 return user_group
344 return user_group
346
345
347
346
348 @pytest.fixture(scope='session')
347 @pytest.fixture(scope='session')
349 def test_repo(request):
348 def test_repo(request):
350 container = TestRepoContainer()
349 container = TestRepoContainer()
351 request.addfinalizer(container._cleanup)
350 request.addfinalizer(container._cleanup)
352 return container
351 return container
353
352
354
353
355 class TestRepoContainer(object):
354 class TestRepoContainer(object):
356 """
355 """
357 Container for test repositories which are used read only.
356 Container for test repositories which are used read only.
358
357
359 Repositories will be created on demand and re-used during the lifetime
358 Repositories will be created on demand and re-used during the lifetime
360 of this object.
359 of this object.
361
360
362 Usage to get the svn test repository "minimal"::
361 Usage to get the svn test repository "minimal"::
363
362
364 test_repo = TestContainer()
363 test_repo = TestContainer()
365 repo = test_repo('minimal', 'svn')
364 repo = test_repo('minimal', 'svn')
366
365
367 """
366 """
368
367
369 dump_extractors = {
368 dump_extractors = {
370 'git': utils.extract_git_repo_from_dump,
369 'git': utils.extract_git_repo_from_dump,
371 'hg': utils.extract_hg_repo_from_dump,
370 'hg': utils.extract_hg_repo_from_dump,
372 'svn': utils.extract_svn_repo_from_dump,
371 'svn': utils.extract_svn_repo_from_dump,
373 }
372 }
374
373
375 def __init__(self):
374 def __init__(self):
376 self._cleanup_repos = []
375 self._cleanup_repos = []
377 self._fixture = Fixture()
376 self._fixture = Fixture()
378 self._repos = {}
377 self._repos = {}
379
378
380 def __call__(self, dump_name, backend_alias, config=None):
379 def __call__(self, dump_name, backend_alias, config=None):
381 key = (dump_name, backend_alias)
380 key = (dump_name, backend_alias)
382 if key not in self._repos:
381 if key not in self._repos:
383 repo = self._create_repo(dump_name, backend_alias, config)
382 repo = self._create_repo(dump_name, backend_alias, config)
384 self._repos[key] = repo.repo_id
383 self._repos[key] = repo.repo_id
385 return Repository.get(self._repos[key])
384 return Repository.get(self._repos[key])
386
385
387 def _create_repo(self, dump_name, backend_alias, config):
386 def _create_repo(self, dump_name, backend_alias, config):
388 repo_name = '%s-%s' % (backend_alias, dump_name)
387 repo_name = '%s-%s' % (backend_alias, dump_name)
389 backend_class = get_backend(backend_alias)
388 backend_class = get_backend(backend_alias)
390 dump_extractor = self.dump_extractors[backend_alias]
389 dump_extractor = self.dump_extractors[backend_alias]
391 repo_path = dump_extractor(dump_name, repo_name)
390 repo_path = dump_extractor(dump_name, repo_name)
392
391
393 vcs_repo = backend_class(repo_path, config=config)
392 vcs_repo = backend_class(repo_path, config=config)
394 repo2db_mapper({repo_name: vcs_repo})
393 repo2db_mapper({repo_name: vcs_repo})
395
394
396 repo = RepoModel().get_by_repo_name(repo_name)
395 repo = RepoModel().get_by_repo_name(repo_name)
397 self._cleanup_repos.append(repo_name)
396 self._cleanup_repos.append(repo_name)
398 return repo
397 return repo
399
398
400 def _cleanup(self):
399 def _cleanup(self):
401 for repo_name in reversed(self._cleanup_repos):
400 for repo_name in reversed(self._cleanup_repos):
402 self._fixture.destroy_repo(repo_name)
401 self._fixture.destroy_repo(repo_name)
403
402
404
403
405 @pytest.fixture
404 @pytest.fixture
406 def backend(request, backend_alias, baseapp, test_repo):
405 def backend(request, backend_alias, baseapp, test_repo):
407 """
406 """
408 Parametrized fixture which represents a single backend implementation.
407 Parametrized fixture which represents a single backend implementation.
409
408
410 It respects the option `--backends` to focus the test run on specific
409 It respects the option `--backends` to focus the test run on specific
411 backend implementations.
410 backend implementations.
412
411
413 It also supports `pytest.mark.xfail_backends` to mark tests as failing
412 It also supports `pytest.mark.xfail_backends` to mark tests as failing
414 for specific backends. This is intended as a utility for incremental
413 for specific backends. This is intended as a utility for incremental
415 development of a new backend implementation.
414 development of a new backend implementation.
416 """
415 """
417 if backend_alias not in request.config.getoption('--backends'):
416 if backend_alias not in request.config.getoption('--backends'):
418 pytest.skip("Backend %s not selected." % (backend_alias, ))
417 pytest.skip("Backend %s not selected." % (backend_alias, ))
419
418
420 utils.check_xfail_backends(request.node, backend_alias)
419 utils.check_xfail_backends(request.node, backend_alias)
421 utils.check_skip_backends(request.node, backend_alias)
420 utils.check_skip_backends(request.node, backend_alias)
422
421
423 repo_name = 'vcs_test_%s' % (backend_alias, )
422 repo_name = 'vcs_test_%s' % (backend_alias, )
424 backend = Backend(
423 backend = Backend(
425 alias=backend_alias,
424 alias=backend_alias,
426 repo_name=repo_name,
425 repo_name=repo_name,
427 test_name=request.node.name,
426 test_name=request.node.name,
428 test_repo_container=test_repo)
427 test_repo_container=test_repo)
429 request.addfinalizer(backend.cleanup)
428 request.addfinalizer(backend.cleanup)
430 return backend
429 return backend
431
430
432
431
433 @pytest.fixture
432 @pytest.fixture
434 def backend_git(request, baseapp, test_repo):
433 def backend_git(request, baseapp, test_repo):
435 return backend(request, 'git', baseapp, test_repo)
434 return backend(request, 'git', baseapp, test_repo)
436
435
437
436
438 @pytest.fixture
437 @pytest.fixture
439 def backend_hg(request, baseapp, test_repo):
438 def backend_hg(request, baseapp, test_repo):
440 return backend(request, 'hg', baseapp, test_repo)
439 return backend(request, 'hg', baseapp, test_repo)
441
440
442
441
443 @pytest.fixture
442 @pytest.fixture
444 def backend_svn(request, baseapp, test_repo):
443 def backend_svn(request, baseapp, test_repo):
445 return backend(request, 'svn', baseapp, test_repo)
444 return backend(request, 'svn', baseapp, test_repo)
446
445
447
446
448 @pytest.fixture
447 @pytest.fixture
449 def backend_random(backend_git):
448 def backend_random(backend_git):
450 """
449 """
451 Use this to express that your tests need "a backend.
450 Use this to express that your tests need "a backend.
452
451
453 A few of our tests need a backend, so that we can run the code. This
452 A few of our tests need a backend, so that we can run the code. This
454 fixture is intended to be used for such cases. It will pick one of the
453 fixture is intended to be used for such cases. It will pick one of the
455 backends and run the tests.
454 backends and run the tests.
456
455
457 The fixture `backend` would run the test multiple times for each
456 The fixture `backend` would run the test multiple times for each
458 available backend which is a pure waste of time if the test is
457 available backend which is a pure waste of time if the test is
459 independent of the backend type.
458 independent of the backend type.
460 """
459 """
461 # TODO: johbo: Change this to pick a random backend
460 # TODO: johbo: Change this to pick a random backend
462 return backend_git
461 return backend_git
463
462
464
463
465 @pytest.fixture
464 @pytest.fixture
466 def backend_stub(backend_git):
465 def backend_stub(backend_git):
467 """
466 """
468 Use this to express that your tests need a backend stub
467 Use this to express that your tests need a backend stub
469
468
470 TODO: mikhail: Implement a real stub logic instead of returning
469 TODO: mikhail: Implement a real stub logic instead of returning
471 a git backend
470 a git backend
472 """
471 """
473 return backend_git
472 return backend_git
474
473
475
474
476 @pytest.fixture
475 @pytest.fixture
477 def repo_stub(backend_stub):
476 def repo_stub(backend_stub):
478 """
477 """
479 Use this to express that your tests need a repository stub
478 Use this to express that your tests need a repository stub
480 """
479 """
481 return backend_stub.create_repo()
480 return backend_stub.create_repo()
482
481
483
482
484 class Backend(object):
483 class Backend(object):
485 """
484 """
486 Represents the test configuration for one supported backend
485 Represents the test configuration for one supported backend
487
486
488 Provides easy access to different test repositories based on
487 Provides easy access to different test repositories based on
489 `__getitem__`. Such repositories will only be created once per test
488 `__getitem__`. Such repositories will only be created once per test
490 session.
489 session.
491 """
490 """
492
491
493 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
492 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
494 _master_repo = None
493 _master_repo = None
495 _commit_ids = {}
494 _commit_ids = {}
496
495
497 def __init__(self, alias, repo_name, test_name, test_repo_container):
496 def __init__(self, alias, repo_name, test_name, test_repo_container):
498 self.alias = alias
497 self.alias = alias
499 self.repo_name = repo_name
498 self.repo_name = repo_name
500 self._cleanup_repos = []
499 self._cleanup_repos = []
501 self._test_name = test_name
500 self._test_name = test_name
502 self._test_repo_container = test_repo_container
501 self._test_repo_container = test_repo_container
503 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
502 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
504 # Fixture will survive in the end.
503 # Fixture will survive in the end.
505 self._fixture = Fixture()
504 self._fixture = Fixture()
506
505
507 def __getitem__(self, key):
506 def __getitem__(self, key):
508 return self._test_repo_container(key, self.alias)
507 return self._test_repo_container(key, self.alias)
509
508
510 def create_test_repo(self, key, config=None):
509 def create_test_repo(self, key, config=None):
511 return self._test_repo_container(key, self.alias, config)
510 return self._test_repo_container(key, self.alias, config)
512
511
513 @property
512 @property
514 def repo(self):
513 def repo(self):
515 """
514 """
516 Returns the "current" repository. This is the vcs_test repo or the
515 Returns the "current" repository. This is the vcs_test repo or the
517 last repo which has been created with `create_repo`.
516 last repo which has been created with `create_repo`.
518 """
517 """
519 from rhodecode.model.db import Repository
518 from rhodecode.model.db import Repository
520 return Repository.get_by_repo_name(self.repo_name)
519 return Repository.get_by_repo_name(self.repo_name)
521
520
522 @property
521 @property
523 def default_branch_name(self):
522 def default_branch_name(self):
524 VcsRepository = get_backend(self.alias)
523 VcsRepository = get_backend(self.alias)
525 return VcsRepository.DEFAULT_BRANCH_NAME
524 return VcsRepository.DEFAULT_BRANCH_NAME
526
525
527 @property
526 @property
528 def default_head_id(self):
527 def default_head_id(self):
529 """
528 """
530 Returns the default head id of the underlying backend.
529 Returns the default head id of the underlying backend.
531
530
532 This will be the default branch name in case the backend does have a
531 This will be the default branch name in case the backend does have a
533 default branch. In the other cases it will point to a valid head
532 default branch. In the other cases it will point to a valid head
534 which can serve as the base to create a new commit on top of it.
533 which can serve as the base to create a new commit on top of it.
535 """
534 """
536 vcsrepo = self.repo.scm_instance()
535 vcsrepo = self.repo.scm_instance()
537 head_id = (
536 head_id = (
538 vcsrepo.DEFAULT_BRANCH_NAME or
537 vcsrepo.DEFAULT_BRANCH_NAME or
539 vcsrepo.commit_ids[-1])
538 vcsrepo.commit_ids[-1])
540 return head_id
539 return head_id
541
540
542 @property
541 @property
543 def commit_ids(self):
542 def commit_ids(self):
544 """
543 """
545 Returns the list of commits for the last created repository
544 Returns the list of commits for the last created repository
546 """
545 """
547 return self._commit_ids
546 return self._commit_ids
548
547
549 def create_master_repo(self, commits):
548 def create_master_repo(self, commits):
550 """
549 """
551 Create a repository and remember it as a template.
550 Create a repository and remember it as a template.
552
551
553 This allows to easily create derived repositories to construct
552 This allows to easily create derived repositories to construct
554 more complex scenarios for diff, compare and pull requests.
553 more complex scenarios for diff, compare and pull requests.
555
554
556 Returns a commit map which maps from commit message to raw_id.
555 Returns a commit map which maps from commit message to raw_id.
557 """
556 """
558 self._master_repo = self.create_repo(commits=commits)
557 self._master_repo = self.create_repo(commits=commits)
559 return self._commit_ids
558 return self._commit_ids
560
559
561 def create_repo(
560 def create_repo(
562 self, commits=None, number_of_commits=0, heads=None,
561 self, commits=None, number_of_commits=0, heads=None,
563 name_suffix=u'', **kwargs):
562 name_suffix=u'', **kwargs):
564 """
563 """
565 Create a repository and record it for later cleanup.
564 Create a repository and record it for later cleanup.
566
565
567 :param commits: Optional. A sequence of dict instances.
566 :param commits: Optional. A sequence of dict instances.
568 Will add a commit per entry to the new repository.
567 Will add a commit per entry to the new repository.
569 :param number_of_commits: Optional. If set to a number, this number of
568 :param number_of_commits: Optional. If set to a number, this number of
570 commits will be added to the new repository.
569 commits will be added to the new repository.
571 :param heads: Optional. Can be set to a sequence of of commit
570 :param heads: Optional. Can be set to a sequence of of commit
572 names which shall be pulled in from the master repository.
571 names which shall be pulled in from the master repository.
573
572
574 """
573 """
575 self.repo_name = self._next_repo_name() + name_suffix
574 self.repo_name = self._next_repo_name() + name_suffix
576 repo = self._fixture.create_repo(
575 repo = self._fixture.create_repo(
577 self.repo_name, repo_type=self.alias, **kwargs)
576 self.repo_name, repo_type=self.alias, **kwargs)
578 self._cleanup_repos.append(repo.repo_name)
577 self._cleanup_repos.append(repo.repo_name)
579
578
580 commits = commits or [
579 commits = commits or [
581 {'message': 'Commit %s of %s' % (x, self.repo_name)}
580 {'message': 'Commit %s of %s' % (x, self.repo_name)}
582 for x in xrange(number_of_commits)]
581 for x in xrange(number_of_commits)]
583 self._add_commits_to_repo(repo.scm_instance(), commits)
582 self._add_commits_to_repo(repo.scm_instance(), commits)
584 if heads:
583 if heads:
585 self.pull_heads(repo, heads)
584 self.pull_heads(repo, heads)
586
585
587 return repo
586 return repo
588
587
589 def pull_heads(self, repo, heads):
588 def pull_heads(self, repo, heads):
590 """
589 """
591 Make sure that repo contains all commits mentioned in `heads`
590 Make sure that repo contains all commits mentioned in `heads`
592 """
591 """
593 vcsmaster = self._master_repo.scm_instance()
592 vcsmaster = self._master_repo.scm_instance()
594 vcsrepo = repo.scm_instance()
593 vcsrepo = repo.scm_instance()
595 vcsrepo.config.clear_section('hooks')
594 vcsrepo.config.clear_section('hooks')
596 commit_ids = [self._commit_ids[h] for h in heads]
595 commit_ids = [self._commit_ids[h] for h in heads]
597 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
596 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
598
597
599 def create_fork(self):
598 def create_fork(self):
600 repo_to_fork = self.repo_name
599 repo_to_fork = self.repo_name
601 self.repo_name = self._next_repo_name()
600 self.repo_name = self._next_repo_name()
602 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
601 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
603 self._cleanup_repos.append(self.repo_name)
602 self._cleanup_repos.append(self.repo_name)
604 return repo
603 return repo
605
604
606 def new_repo_name(self, suffix=u''):
605 def new_repo_name(self, suffix=u''):
607 self.repo_name = self._next_repo_name() + suffix
606 self.repo_name = self._next_repo_name() + suffix
608 self._cleanup_repos.append(self.repo_name)
607 self._cleanup_repos.append(self.repo_name)
609 return self.repo_name
608 return self.repo_name
610
609
611 def _next_repo_name(self):
610 def _next_repo_name(self):
612 return u"%s_%s" % (
611 return u"%s_%s" % (
613 self.invalid_repo_name.sub(u'_', self._test_name),
612 self.invalid_repo_name.sub(u'_', self._test_name),
614 len(self._cleanup_repos))
613 len(self._cleanup_repos))
615
614
616 def ensure_file(self, filename, content='Test content\n'):
615 def ensure_file(self, filename, content='Test content\n'):
617 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
616 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
618 commits = [
617 commits = [
619 {'added': [
618 {'added': [
620 FileNode(filename, content=content),
619 FileNode(filename, content=content),
621 ]},
620 ]},
622 ]
621 ]
623 self._add_commits_to_repo(self.repo.scm_instance(), commits)
622 self._add_commits_to_repo(self.repo.scm_instance(), commits)
624
623
625 def enable_downloads(self):
624 def enable_downloads(self):
626 repo = self.repo
625 repo = self.repo
627 repo.enable_downloads = True
626 repo.enable_downloads = True
628 Session().add(repo)
627 Session().add(repo)
629 Session().commit()
628 Session().commit()
630
629
631 def cleanup(self):
630 def cleanup(self):
632 for repo_name in reversed(self._cleanup_repos):
631 for repo_name in reversed(self._cleanup_repos):
633 self._fixture.destroy_repo(repo_name)
632 self._fixture.destroy_repo(repo_name)
634
633
635 def _add_commits_to_repo(self, repo, commits):
634 def _add_commits_to_repo(self, repo, commits):
636 commit_ids = _add_commits_to_repo(repo, commits)
635 commit_ids = _add_commits_to_repo(repo, commits)
637 if not commit_ids:
636 if not commit_ids:
638 return
637 return
639 self._commit_ids = commit_ids
638 self._commit_ids = commit_ids
640
639
641 # Creating refs for Git to allow fetching them from remote repository
640 # Creating refs for Git to allow fetching them from remote repository
642 if self.alias == 'git':
641 if self.alias == 'git':
643 refs = {}
642 refs = {}
644 for message in self._commit_ids:
643 for message in self._commit_ids:
645 # TODO: mikhail: do more special chars replacements
644 # TODO: mikhail: do more special chars replacements
646 ref_name = 'refs/test-refs/{}'.format(
645 ref_name = 'refs/test-refs/{}'.format(
647 message.replace(' ', ''))
646 message.replace(' ', ''))
648 refs[ref_name] = self._commit_ids[message]
647 refs[ref_name] = self._commit_ids[message]
649 self._create_refs(repo, refs)
648 self._create_refs(repo, refs)
650
649
651 def _create_refs(self, repo, refs):
650 def _create_refs(self, repo, refs):
652 for ref_name in refs:
651 for ref_name in refs:
653 repo.set_refs(ref_name, refs[ref_name])
652 repo.set_refs(ref_name, refs[ref_name])
654
653
655
654
656 @pytest.fixture
655 @pytest.fixture
657 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
656 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
658 """
657 """
659 Parametrized fixture which represents a single vcs backend implementation.
658 Parametrized fixture which represents a single vcs backend implementation.
660
659
661 See the fixture `backend` for more details. This one implements the same
660 See the fixture `backend` for more details. This one implements the same
662 concept, but on vcs level. So it does not provide model instances etc.
661 concept, but on vcs level. So it does not provide model instances etc.
663
662
664 Parameters are generated dynamically, see :func:`pytest_generate_tests`
663 Parameters are generated dynamically, see :func:`pytest_generate_tests`
665 for how this works.
664 for how this works.
666 """
665 """
667 if backend_alias not in request.config.getoption('--backends'):
666 if backend_alias not in request.config.getoption('--backends'):
668 pytest.skip("Backend %s not selected." % (backend_alias, ))
667 pytest.skip("Backend %s not selected." % (backend_alias, ))
669
668
670 utils.check_xfail_backends(request.node, backend_alias)
669 utils.check_xfail_backends(request.node, backend_alias)
671 utils.check_skip_backends(request.node, backend_alias)
670 utils.check_skip_backends(request.node, backend_alias)
672
671
673 repo_name = 'vcs_test_%s' % (backend_alias, )
672 repo_name = 'vcs_test_%s' % (backend_alias, )
674 repo_path = os.path.join(tests_tmp_path, repo_name)
673 repo_path = os.path.join(tests_tmp_path, repo_name)
675 backend = VcsBackend(
674 backend = VcsBackend(
676 alias=backend_alias,
675 alias=backend_alias,
677 repo_path=repo_path,
676 repo_path=repo_path,
678 test_name=request.node.name,
677 test_name=request.node.name,
679 test_repo_container=test_repo)
678 test_repo_container=test_repo)
680 request.addfinalizer(backend.cleanup)
679 request.addfinalizer(backend.cleanup)
681 return backend
680 return backend
682
681
683
682
684 @pytest.fixture
683 @pytest.fixture
685 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
684 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
686 return vcsbackend(request, 'git', tests_tmp_path, baseapp, test_repo)
685 return vcsbackend(request, 'git', tests_tmp_path, baseapp, test_repo)
687
686
688
687
689 @pytest.fixture
688 @pytest.fixture
690 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
689 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
691 return vcsbackend(request, 'hg', tests_tmp_path, baseapp, test_repo)
690 return vcsbackend(request, 'hg', tests_tmp_path, baseapp, test_repo)
692
691
693
692
694 @pytest.fixture
693 @pytest.fixture
695 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
694 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
696 return vcsbackend(request, 'svn', tests_tmp_path, baseapp, test_repo)
695 return vcsbackend(request, 'svn', tests_tmp_path, baseapp, test_repo)
697
696
698
697
699 @pytest.fixture
698 @pytest.fixture
700 def vcsbackend_random(vcsbackend_git):
699 def vcsbackend_random(vcsbackend_git):
701 """
700 """
702 Use this to express that your tests need "a vcsbackend".
701 Use this to express that your tests need "a vcsbackend".
703
702
704 The fixture `vcsbackend` would run the test multiple times for each
703 The fixture `vcsbackend` would run the test multiple times for each
705 available vcs backend which is a pure waste of time if the test is
704 available vcs backend which is a pure waste of time if the test is
706 independent of the vcs backend type.
705 independent of the vcs backend type.
707 """
706 """
708 # TODO: johbo: Change this to pick a random backend
707 # TODO: johbo: Change this to pick a random backend
709 return vcsbackend_git
708 return vcsbackend_git
710
709
711
710
712 @pytest.fixture
711 @pytest.fixture
713 def vcsbackend_stub(vcsbackend_git):
712 def vcsbackend_stub(vcsbackend_git):
714 """
713 """
715 Use this to express that your test just needs a stub of a vcsbackend.
714 Use this to express that your test just needs a stub of a vcsbackend.
716
715
717 Plan is to eventually implement an in-memory stub to speed tests up.
716 Plan is to eventually implement an in-memory stub to speed tests up.
718 """
717 """
719 return vcsbackend_git
718 return vcsbackend_git
720
719
721
720
722 class VcsBackend(object):
721 class VcsBackend(object):
723 """
722 """
724 Represents the test configuration for one supported vcs backend.
723 Represents the test configuration for one supported vcs backend.
725 """
724 """
726
725
727 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
726 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
728
727
729 def __init__(self, alias, repo_path, test_name, test_repo_container):
728 def __init__(self, alias, repo_path, test_name, test_repo_container):
730 self.alias = alias
729 self.alias = alias
731 self._repo_path = repo_path
730 self._repo_path = repo_path
732 self._cleanup_repos = []
731 self._cleanup_repos = []
733 self._test_name = test_name
732 self._test_name = test_name
734 self._test_repo_container = test_repo_container
733 self._test_repo_container = test_repo_container
735
734
736 def __getitem__(self, key):
735 def __getitem__(self, key):
737 return self._test_repo_container(key, self.alias).scm_instance()
736 return self._test_repo_container(key, self.alias).scm_instance()
738
737
739 @property
738 @property
740 def repo(self):
739 def repo(self):
741 """
740 """
742 Returns the "current" repository. This is the vcs_test repo of the last
741 Returns the "current" repository. This is the vcs_test repo of the last
743 repo which has been created.
742 repo which has been created.
744 """
743 """
745 Repository = get_backend(self.alias)
744 Repository = get_backend(self.alias)
746 return Repository(self._repo_path)
745 return Repository(self._repo_path)
747
746
748 @property
747 @property
749 def backend(self):
748 def backend(self):
750 """
749 """
751 Returns the backend implementation class.
750 Returns the backend implementation class.
752 """
751 """
753 return get_backend(self.alias)
752 return get_backend(self.alias)
754
753
755 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None):
754 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None):
756 repo_name = self._next_repo_name()
755 repo_name = self._next_repo_name()
757 self._repo_path = get_new_dir(repo_name)
756 self._repo_path = get_new_dir(repo_name)
758 repo_class = get_backend(self.alias)
757 repo_class = get_backend(self.alias)
759 src_url = None
758 src_url = None
760 if _clone_repo:
759 if _clone_repo:
761 src_url = _clone_repo.path
760 src_url = _clone_repo.path
762 repo = repo_class(self._repo_path, create=True, src_url=src_url)
761 repo = repo_class(self._repo_path, create=True, src_url=src_url)
763 self._cleanup_repos.append(repo)
762 self._cleanup_repos.append(repo)
764
763
765 commits = commits or [
764 commits = commits or [
766 {'message': 'Commit %s of %s' % (x, repo_name)}
765 {'message': 'Commit %s of %s' % (x, repo_name)}
767 for x in xrange(number_of_commits)]
766 for x in xrange(number_of_commits)]
768 _add_commits_to_repo(repo, commits)
767 _add_commits_to_repo(repo, commits)
769 return repo
768 return repo
770
769
771 def clone_repo(self, repo):
770 def clone_repo(self, repo):
772 return self.create_repo(_clone_repo=repo)
771 return self.create_repo(_clone_repo=repo)
773
772
774 def cleanup(self):
773 def cleanup(self):
775 for repo in self._cleanup_repos:
774 for repo in self._cleanup_repos:
776 shutil.rmtree(repo.path)
775 shutil.rmtree(repo.path)
777
776
778 def new_repo_path(self):
777 def new_repo_path(self):
779 repo_name = self._next_repo_name()
778 repo_name = self._next_repo_name()
780 self._repo_path = get_new_dir(repo_name)
779 self._repo_path = get_new_dir(repo_name)
781 return self._repo_path
780 return self._repo_path
782
781
783 def _next_repo_name(self):
782 def _next_repo_name(self):
784 return "%s_%s" % (
783 return "%s_%s" % (
785 self.invalid_repo_name.sub('_', self._test_name),
784 self.invalid_repo_name.sub('_', self._test_name),
786 len(self._cleanup_repos))
785 len(self._cleanup_repos))
787
786
788 def add_file(self, repo, filename, content='Test content\n'):
787 def add_file(self, repo, filename, content='Test content\n'):
789 imc = repo.in_memory_commit
788 imc = repo.in_memory_commit
790 imc.add(FileNode(filename, content=content))
789 imc.add(FileNode(filename, content=content))
791 imc.commit(
790 imc.commit(
792 message=u'Automatic commit from vcsbackend fixture',
791 message=u'Automatic commit from vcsbackend fixture',
793 author=u'Automatic')
792 author=u'Automatic')
794
793
795 def ensure_file(self, filename, content='Test content\n'):
794 def ensure_file(self, filename, content='Test content\n'):
796 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
795 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
797 self.add_file(self.repo, filename, content)
796 self.add_file(self.repo, filename, content)
798
797
799
798
800 def _add_commits_to_repo(vcs_repo, commits):
799 def _add_commits_to_repo(vcs_repo, commits):
801 commit_ids = {}
800 commit_ids = {}
802 if not commits:
801 if not commits:
803 return commit_ids
802 return commit_ids
804
803
805 imc = vcs_repo.in_memory_commit
804 imc = vcs_repo.in_memory_commit
806 commit = None
805 commit = None
807
806
808 for idx, commit in enumerate(commits):
807 for idx, commit in enumerate(commits):
809 message = unicode(commit.get('message', 'Commit %s' % idx))
808 message = unicode(commit.get('message', 'Commit %s' % idx))
810
809
811 for node in commit.get('added', []):
810 for node in commit.get('added', []):
812 imc.add(FileNode(node.path, content=node.content))
811 imc.add(FileNode(node.path, content=node.content))
813 for node in commit.get('changed', []):
812 for node in commit.get('changed', []):
814 imc.change(FileNode(node.path, content=node.content))
813 imc.change(FileNode(node.path, content=node.content))
815 for node in commit.get('removed', []):
814 for node in commit.get('removed', []):
816 imc.remove(FileNode(node.path))
815 imc.remove(FileNode(node.path))
817
816
818 parents = [
817 parents = [
819 vcs_repo.get_commit(commit_id=commit_ids[p])
818 vcs_repo.get_commit(commit_id=commit_ids[p])
820 for p in commit.get('parents', [])]
819 for p in commit.get('parents', [])]
821
820
822 operations = ('added', 'changed', 'removed')
821 operations = ('added', 'changed', 'removed')
823 if not any((commit.get(o) for o in operations)):
822 if not any((commit.get(o) for o in operations)):
824 imc.add(FileNode('file_%s' % idx, content=message))
823 imc.add(FileNode('file_%s' % idx, content=message))
825
824
826 commit = imc.commit(
825 commit = imc.commit(
827 message=message,
826 message=message,
828 author=unicode(commit.get('author', 'Automatic')),
827 author=unicode(commit.get('author', 'Automatic')),
829 date=commit.get('date'),
828 date=commit.get('date'),
830 branch=commit.get('branch'),
829 branch=commit.get('branch'),
831 parents=parents)
830 parents=parents)
832
831
833 commit_ids[commit.message] = commit.raw_id
832 commit_ids[commit.message] = commit.raw_id
834
833
835 return commit_ids
834 return commit_ids
836
835
837
836
838 @pytest.fixture
837 @pytest.fixture
839 def reposerver(request):
838 def reposerver(request):
840 """
839 """
841 Allows to serve a backend repository
840 Allows to serve a backend repository
842 """
841 """
843
842
844 repo_server = RepoServer()
843 repo_server = RepoServer()
845 request.addfinalizer(repo_server.cleanup)
844 request.addfinalizer(repo_server.cleanup)
846 return repo_server
845 return repo_server
847
846
848
847
849 class RepoServer(object):
848 class RepoServer(object):
850 """
849 """
851 Utility to serve a local repository for the duration of a test case.
850 Utility to serve a local repository for the duration of a test case.
852
851
853 Supports only Subversion so far.
852 Supports only Subversion so far.
854 """
853 """
855
854
856 url = None
855 url = None
857
856
858 def __init__(self):
857 def __init__(self):
859 self._cleanup_servers = []
858 self._cleanup_servers = []
860
859
861 def serve(self, vcsrepo):
860 def serve(self, vcsrepo):
862 if vcsrepo.alias != 'svn':
861 if vcsrepo.alias != 'svn':
863 raise TypeError("Backend %s not supported" % vcsrepo.alias)
862 raise TypeError("Backend %s not supported" % vcsrepo.alias)
864
863
865 proc = subprocess32.Popen(
864 proc = subprocess32.Popen(
866 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
865 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
867 '--root', vcsrepo.path])
866 '--root', vcsrepo.path])
868 self._cleanup_servers.append(proc)
867 self._cleanup_servers.append(proc)
869 self.url = 'svn://localhost'
868 self.url = 'svn://localhost'
870
869
871 def cleanup(self):
870 def cleanup(self):
872 for proc in self._cleanup_servers:
871 for proc in self._cleanup_servers:
873 proc.terminate()
872 proc.terminate()
874
873
875
874
876 @pytest.fixture
875 @pytest.fixture
877 def pr_util(backend, request, config_stub):
876 def pr_util(backend, request, config_stub):
878 """
877 """
879 Utility for tests of models and for functional tests around pull requests.
878 Utility for tests of models and for functional tests around pull requests.
880
879
881 It gives an instance of :class:`PRTestUtility` which provides various
880 It gives an instance of :class:`PRTestUtility` which provides various
882 utility methods around one pull request.
881 utility methods around one pull request.
883
882
884 This fixture uses `backend` and inherits its parameterization.
883 This fixture uses `backend` and inherits its parameterization.
885 """
884 """
886
885
887 util = PRTestUtility(backend)
886 util = PRTestUtility(backend)
888 request.addfinalizer(util.cleanup)
887 request.addfinalizer(util.cleanup)
889
888
890 return util
889 return util
891
890
892
891
893 class PRTestUtility(object):
892 class PRTestUtility(object):
894
893
895 pull_request = None
894 pull_request = None
896 pull_request_id = None
895 pull_request_id = None
897 mergeable_patcher = None
896 mergeable_patcher = None
898 mergeable_mock = None
897 mergeable_mock = None
899 notification_patcher = None
898 notification_patcher = None
900
899
901 def __init__(self, backend):
900 def __init__(self, backend):
902 self.backend = backend
901 self.backend = backend
903
902
904 def create_pull_request(
903 def create_pull_request(
905 self, commits=None, target_head=None, source_head=None,
904 self, commits=None, target_head=None, source_head=None,
906 revisions=None, approved=False, author=None, mergeable=False,
905 revisions=None, approved=False, author=None, mergeable=False,
907 enable_notifications=True, name_suffix=u'', reviewers=None,
906 enable_notifications=True, name_suffix=u'', reviewers=None,
908 title=u"Test", description=u"Description"):
907 title=u"Test", description=u"Description"):
909 self.set_mergeable(mergeable)
908 self.set_mergeable(mergeable)
910 if not enable_notifications:
909 if not enable_notifications:
911 # mock notification side effect
910 # mock notification side effect
912 self.notification_patcher = mock.patch(
911 self.notification_patcher = mock.patch(
913 'rhodecode.model.notification.NotificationModel.create')
912 'rhodecode.model.notification.NotificationModel.create')
914 self.notification_patcher.start()
913 self.notification_patcher.start()
915
914
916 if not self.pull_request:
915 if not self.pull_request:
917 if not commits:
916 if not commits:
918 commits = [
917 commits = [
919 {'message': 'c1'},
918 {'message': 'c1'},
920 {'message': 'c2'},
919 {'message': 'c2'},
921 {'message': 'c3'},
920 {'message': 'c3'},
922 ]
921 ]
923 target_head = 'c1'
922 target_head = 'c1'
924 source_head = 'c2'
923 source_head = 'c2'
925 revisions = ['c2']
924 revisions = ['c2']
926
925
927 self.commit_ids = self.backend.create_master_repo(commits)
926 self.commit_ids = self.backend.create_master_repo(commits)
928 self.target_repository = self.backend.create_repo(
927 self.target_repository = self.backend.create_repo(
929 heads=[target_head], name_suffix=name_suffix)
928 heads=[target_head], name_suffix=name_suffix)
930 self.source_repository = self.backend.create_repo(
929 self.source_repository = self.backend.create_repo(
931 heads=[source_head], name_suffix=name_suffix)
930 heads=[source_head], name_suffix=name_suffix)
932 self.author = author or UserModel().get_by_username(
931 self.author = author or UserModel().get_by_username(
933 TEST_USER_ADMIN_LOGIN)
932 TEST_USER_ADMIN_LOGIN)
934
933
935 model = PullRequestModel()
934 model = PullRequestModel()
936 self.create_parameters = {
935 self.create_parameters = {
937 'created_by': self.author,
936 'created_by': self.author,
938 'source_repo': self.source_repository.repo_name,
937 'source_repo': self.source_repository.repo_name,
939 'source_ref': self._default_branch_reference(source_head),
938 'source_ref': self._default_branch_reference(source_head),
940 'target_repo': self.target_repository.repo_name,
939 'target_repo': self.target_repository.repo_name,
941 'target_ref': self._default_branch_reference(target_head),
940 'target_ref': self._default_branch_reference(target_head),
942 'revisions': [self.commit_ids[r] for r in revisions],
941 'revisions': [self.commit_ids[r] for r in revisions],
943 'reviewers': reviewers or self._get_reviewers(),
942 'reviewers': reviewers or self._get_reviewers(),
944 'title': title,
943 'title': title,
945 'description': description,
944 'description': description,
946 }
945 }
947 self.pull_request = model.create(**self.create_parameters)
946 self.pull_request = model.create(**self.create_parameters)
948 assert model.get_versions(self.pull_request) == []
947 assert model.get_versions(self.pull_request) == []
949
948
950 self.pull_request_id = self.pull_request.pull_request_id
949 self.pull_request_id = self.pull_request.pull_request_id
951
950
952 if approved:
951 if approved:
953 self.approve()
952 self.approve()
954
953
955 Session().add(self.pull_request)
954 Session().add(self.pull_request)
956 Session().commit()
955 Session().commit()
957
956
958 return self.pull_request
957 return self.pull_request
959
958
960 def approve(self):
959 def approve(self):
961 self.create_status_votes(
960 self.create_status_votes(
962 ChangesetStatus.STATUS_APPROVED,
961 ChangesetStatus.STATUS_APPROVED,
963 *self.pull_request.reviewers)
962 *self.pull_request.reviewers)
964
963
965 def close(self):
964 def close(self):
966 PullRequestModel().close_pull_request(self.pull_request, self.author)
965 PullRequestModel().close_pull_request(self.pull_request, self.author)
967
966
968 def _default_branch_reference(self, commit_message):
967 def _default_branch_reference(self, commit_message):
969 reference = '%s:%s:%s' % (
968 reference = '%s:%s:%s' % (
970 'branch',
969 'branch',
971 self.backend.default_branch_name,
970 self.backend.default_branch_name,
972 self.commit_ids[commit_message])
971 self.commit_ids[commit_message])
973 return reference
972 return reference
974
973
975 def _get_reviewers(self):
974 def _get_reviewers(self):
976 return [
975 return [
977 (TEST_USER_REGULAR_LOGIN, ['default1'], False),
976 (TEST_USER_REGULAR_LOGIN, ['default1'], False),
978 (TEST_USER_REGULAR2_LOGIN, ['default2'], False),
977 (TEST_USER_REGULAR2_LOGIN, ['default2'], False),
979 ]
978 ]
980
979
981 def update_source_repository(self, head=None):
980 def update_source_repository(self, head=None):
982 heads = [head or 'c3']
981 heads = [head or 'c3']
983 self.backend.pull_heads(self.source_repository, heads=heads)
982 self.backend.pull_heads(self.source_repository, heads=heads)
984
983
985 def add_one_commit(self, head=None):
984 def add_one_commit(self, head=None):
986 self.update_source_repository(head=head)
985 self.update_source_repository(head=head)
987 old_commit_ids = set(self.pull_request.revisions)
986 old_commit_ids = set(self.pull_request.revisions)
988 PullRequestModel().update_commits(self.pull_request)
987 PullRequestModel().update_commits(self.pull_request)
989 commit_ids = set(self.pull_request.revisions)
988 commit_ids = set(self.pull_request.revisions)
990 new_commit_ids = commit_ids - old_commit_ids
989 new_commit_ids = commit_ids - old_commit_ids
991 assert len(new_commit_ids) == 1
990 assert len(new_commit_ids) == 1
992 return new_commit_ids.pop()
991 return new_commit_ids.pop()
993
992
994 def remove_one_commit(self):
993 def remove_one_commit(self):
995 assert len(self.pull_request.revisions) == 2
994 assert len(self.pull_request.revisions) == 2
996 source_vcs = self.source_repository.scm_instance()
995 source_vcs = self.source_repository.scm_instance()
997 removed_commit_id = source_vcs.commit_ids[-1]
996 removed_commit_id = source_vcs.commit_ids[-1]
998
997
999 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
998 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1000 # remove the if once that's sorted out.
999 # remove the if once that's sorted out.
1001 if self.backend.alias == "git":
1000 if self.backend.alias == "git":
1002 kwargs = {'branch_name': self.backend.default_branch_name}
1001 kwargs = {'branch_name': self.backend.default_branch_name}
1003 else:
1002 else:
1004 kwargs = {}
1003 kwargs = {}
1005 source_vcs.strip(removed_commit_id, **kwargs)
1004 source_vcs.strip(removed_commit_id, **kwargs)
1006
1005
1007 PullRequestModel().update_commits(self.pull_request)
1006 PullRequestModel().update_commits(self.pull_request)
1008 assert len(self.pull_request.revisions) == 1
1007 assert len(self.pull_request.revisions) == 1
1009 return removed_commit_id
1008 return removed_commit_id
1010
1009
1011 def create_comment(self, linked_to=None):
1010 def create_comment(self, linked_to=None):
1012 comment = CommentsModel().create(
1011 comment = CommentsModel().create(
1013 text=u"Test comment",
1012 text=u"Test comment",
1014 repo=self.target_repository.repo_name,
1013 repo=self.target_repository.repo_name,
1015 user=self.author,
1014 user=self.author,
1016 pull_request=self.pull_request)
1015 pull_request=self.pull_request)
1017 assert comment.pull_request_version_id is None
1016 assert comment.pull_request_version_id is None
1018
1017
1019 if linked_to:
1018 if linked_to:
1020 PullRequestModel()._link_comments_to_version(linked_to)
1019 PullRequestModel()._link_comments_to_version(linked_to)
1021
1020
1022 return comment
1021 return comment
1023
1022
1024 def create_inline_comment(
1023 def create_inline_comment(
1025 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1024 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1026 comment = CommentsModel().create(
1025 comment = CommentsModel().create(
1027 text=u"Test comment",
1026 text=u"Test comment",
1028 repo=self.target_repository.repo_name,
1027 repo=self.target_repository.repo_name,
1029 user=self.author,
1028 user=self.author,
1030 line_no=line_no,
1029 line_no=line_no,
1031 f_path=file_path,
1030 f_path=file_path,
1032 pull_request=self.pull_request)
1031 pull_request=self.pull_request)
1033 assert comment.pull_request_version_id is None
1032 assert comment.pull_request_version_id is None
1034
1033
1035 if linked_to:
1034 if linked_to:
1036 PullRequestModel()._link_comments_to_version(linked_to)
1035 PullRequestModel()._link_comments_to_version(linked_to)
1037
1036
1038 return comment
1037 return comment
1039
1038
1040 def create_version_of_pull_request(self):
1039 def create_version_of_pull_request(self):
1041 pull_request = self.create_pull_request()
1040 pull_request = self.create_pull_request()
1042 version = PullRequestModel()._create_version_from_snapshot(
1041 version = PullRequestModel()._create_version_from_snapshot(
1043 pull_request)
1042 pull_request)
1044 return version
1043 return version
1045
1044
1046 def create_status_votes(self, status, *reviewers):
1045 def create_status_votes(self, status, *reviewers):
1047 for reviewer in reviewers:
1046 for reviewer in reviewers:
1048 ChangesetStatusModel().set_status(
1047 ChangesetStatusModel().set_status(
1049 repo=self.pull_request.target_repo,
1048 repo=self.pull_request.target_repo,
1050 status=status,
1049 status=status,
1051 user=reviewer.user_id,
1050 user=reviewer.user_id,
1052 pull_request=self.pull_request)
1051 pull_request=self.pull_request)
1053
1052
1054 def set_mergeable(self, value):
1053 def set_mergeable(self, value):
1055 if not self.mergeable_patcher:
1054 if not self.mergeable_patcher:
1056 self.mergeable_patcher = mock.patch.object(
1055 self.mergeable_patcher = mock.patch.object(
1057 VcsSettingsModel, 'get_general_settings')
1056 VcsSettingsModel, 'get_general_settings')
1058 self.mergeable_mock = self.mergeable_patcher.start()
1057 self.mergeable_mock = self.mergeable_patcher.start()
1059 self.mergeable_mock.return_value = {
1058 self.mergeable_mock.return_value = {
1060 'rhodecode_pr_merge_enabled': value}
1059 'rhodecode_pr_merge_enabled': value}
1061
1060
1062 def cleanup(self):
1061 def cleanup(self):
1063 # In case the source repository is already cleaned up, the pull
1062 # In case the source repository is already cleaned up, the pull
1064 # request will already be deleted.
1063 # request will already be deleted.
1065 pull_request = PullRequest().get(self.pull_request_id)
1064 pull_request = PullRequest().get(self.pull_request_id)
1066 if pull_request:
1065 if pull_request:
1067 PullRequestModel().delete(pull_request, pull_request.author)
1066 PullRequestModel().delete(pull_request, pull_request.author)
1068 Session().commit()
1067 Session().commit()
1069
1068
1070 if self.notification_patcher:
1069 if self.notification_patcher:
1071 self.notification_patcher.stop()
1070 self.notification_patcher.stop()
1072
1071
1073 if self.mergeable_patcher:
1072 if self.mergeable_patcher:
1074 self.mergeable_patcher.stop()
1073 self.mergeable_patcher.stop()
1075
1074
1076
1075
1077 @pytest.fixture
1076 @pytest.fixture
1078 def user_admin(baseapp):
1077 def user_admin(baseapp):
1079 """
1078 """
1080 Provides the default admin test user as an instance of `db.User`.
1079 Provides the default admin test user as an instance of `db.User`.
1081 """
1080 """
1082 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1081 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1083 return user
1082 return user
1084
1083
1085
1084
1086 @pytest.fixture
1085 @pytest.fixture
1087 def user_regular(baseapp):
1086 def user_regular(baseapp):
1088 """
1087 """
1089 Provides the default regular test user as an instance of `db.User`.
1088 Provides the default regular test user as an instance of `db.User`.
1090 """
1089 """
1091 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1090 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1092 return user
1091 return user
1093
1092
1094
1093
1095 @pytest.fixture
1094 @pytest.fixture
1096 def user_util(request, baseapp):
1095 def user_util(request, baseapp):
1097 """
1096 """
1098 Provides a wired instance of `UserUtility` with integrated cleanup.
1097 Provides a wired instance of `UserUtility` with integrated cleanup.
1099 """
1098 """
1100 utility = UserUtility(test_name=request.node.name)
1099 utility = UserUtility(test_name=request.node.name)
1101 request.addfinalizer(utility.cleanup)
1100 request.addfinalizer(utility.cleanup)
1102 return utility
1101 return utility
1103
1102
1104
1103
1105 # TODO: johbo: Split this up into utilities per domain or something similar
1104 # TODO: johbo: Split this up into utilities per domain or something similar
1106 class UserUtility(object):
1105 class UserUtility(object):
1107
1106
1108 def __init__(self, test_name="test"):
1107 def __init__(self, test_name="test"):
1109 self._test_name = self._sanitize_name(test_name)
1108 self._test_name = self._sanitize_name(test_name)
1110 self.fixture = Fixture()
1109 self.fixture = Fixture()
1111 self.repo_group_ids = []
1110 self.repo_group_ids = []
1112 self.repos_ids = []
1111 self.repos_ids = []
1113 self.user_ids = []
1112 self.user_ids = []
1114 self.user_group_ids = []
1113 self.user_group_ids = []
1115 self.user_repo_permission_ids = []
1114 self.user_repo_permission_ids = []
1116 self.user_group_repo_permission_ids = []
1115 self.user_group_repo_permission_ids = []
1117 self.user_repo_group_permission_ids = []
1116 self.user_repo_group_permission_ids = []
1118 self.user_group_repo_group_permission_ids = []
1117 self.user_group_repo_group_permission_ids = []
1119 self.user_user_group_permission_ids = []
1118 self.user_user_group_permission_ids = []
1120 self.user_group_user_group_permission_ids = []
1119 self.user_group_user_group_permission_ids = []
1121 self.user_permissions = []
1120 self.user_permissions = []
1122
1121
1123 def _sanitize_name(self, name):
1122 def _sanitize_name(self, name):
1124 for char in ['[', ']']:
1123 for char in ['[', ']']:
1125 name = name.replace(char, '_')
1124 name = name.replace(char, '_')
1126 return name
1125 return name
1127
1126
1128 def create_repo_group(
1127 def create_repo_group(
1129 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1128 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1130 group_name = "{prefix}_repogroup_{count}".format(
1129 group_name = "{prefix}_repogroup_{count}".format(
1131 prefix=self._test_name,
1130 prefix=self._test_name,
1132 count=len(self.repo_group_ids))
1131 count=len(self.repo_group_ids))
1133 repo_group = self.fixture.create_repo_group(
1132 repo_group = self.fixture.create_repo_group(
1134 group_name, cur_user=owner)
1133 group_name, cur_user=owner)
1135 if auto_cleanup:
1134 if auto_cleanup:
1136 self.repo_group_ids.append(repo_group.group_id)
1135 self.repo_group_ids.append(repo_group.group_id)
1137 return repo_group
1136 return repo_group
1138
1137
1139 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1138 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1140 auto_cleanup=True, repo_type='hg'):
1139 auto_cleanup=True, repo_type='hg'):
1141 repo_name = "{prefix}_repository_{count}".format(
1140 repo_name = "{prefix}_repository_{count}".format(
1142 prefix=self._test_name,
1141 prefix=self._test_name,
1143 count=len(self.repos_ids))
1142 count=len(self.repos_ids))
1144
1143
1145 repository = self.fixture.create_repo(
1144 repository = self.fixture.create_repo(
1146 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type)
1145 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type)
1147 if auto_cleanup:
1146 if auto_cleanup:
1148 self.repos_ids.append(repository.repo_id)
1147 self.repos_ids.append(repository.repo_id)
1149 return repository
1148 return repository
1150
1149
1151 def create_user(self, auto_cleanup=True, **kwargs):
1150 def create_user(self, auto_cleanup=True, **kwargs):
1152 user_name = "{prefix}_user_{count}".format(
1151 user_name = "{prefix}_user_{count}".format(
1153 prefix=self._test_name,
1152 prefix=self._test_name,
1154 count=len(self.user_ids))
1153 count=len(self.user_ids))
1155 user = self.fixture.create_user(user_name, **kwargs)
1154 user = self.fixture.create_user(user_name, **kwargs)
1156 if auto_cleanup:
1155 if auto_cleanup:
1157 self.user_ids.append(user.user_id)
1156 self.user_ids.append(user.user_id)
1158 return user
1157 return user
1159
1158
1160 def create_user_with_group(self):
1159 def create_user_with_group(self):
1161 user = self.create_user()
1160 user = self.create_user()
1162 user_group = self.create_user_group(members=[user])
1161 user_group = self.create_user_group(members=[user])
1163 return user, user_group
1162 return user, user_group
1164
1163
1165 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1164 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1166 auto_cleanup=True, **kwargs):
1165 auto_cleanup=True, **kwargs):
1167 group_name = "{prefix}_usergroup_{count}".format(
1166 group_name = "{prefix}_usergroup_{count}".format(
1168 prefix=self._test_name,
1167 prefix=self._test_name,
1169 count=len(self.user_group_ids))
1168 count=len(self.user_group_ids))
1170 user_group = self.fixture.create_user_group(
1169 user_group = self.fixture.create_user_group(
1171 group_name, cur_user=owner, **kwargs)
1170 group_name, cur_user=owner, **kwargs)
1172
1171
1173 if auto_cleanup:
1172 if auto_cleanup:
1174 self.user_group_ids.append(user_group.users_group_id)
1173 self.user_group_ids.append(user_group.users_group_id)
1175 if members:
1174 if members:
1176 for user in members:
1175 for user in members:
1177 UserGroupModel().add_user_to_group(user_group, user)
1176 UserGroupModel().add_user_to_group(user_group, user)
1178 return user_group
1177 return user_group
1179
1178
1180 def grant_user_permission(self, user_name, permission_name):
1179 def grant_user_permission(self, user_name, permission_name):
1181 self._inherit_default_user_permissions(user_name, False)
1180 self._inherit_default_user_permissions(user_name, False)
1182 self.user_permissions.append((user_name, permission_name))
1181 self.user_permissions.append((user_name, permission_name))
1183
1182
1184 def grant_user_permission_to_repo_group(
1183 def grant_user_permission_to_repo_group(
1185 self, repo_group, user, permission_name):
1184 self, repo_group, user, permission_name):
1186 permission = RepoGroupModel().grant_user_permission(
1185 permission = RepoGroupModel().grant_user_permission(
1187 repo_group, user, permission_name)
1186 repo_group, user, permission_name)
1188 self.user_repo_group_permission_ids.append(
1187 self.user_repo_group_permission_ids.append(
1189 (repo_group.group_id, user.user_id))
1188 (repo_group.group_id, user.user_id))
1190 return permission
1189 return permission
1191
1190
1192 def grant_user_group_permission_to_repo_group(
1191 def grant_user_group_permission_to_repo_group(
1193 self, repo_group, user_group, permission_name):
1192 self, repo_group, user_group, permission_name):
1194 permission = RepoGroupModel().grant_user_group_permission(
1193 permission = RepoGroupModel().grant_user_group_permission(
1195 repo_group, user_group, permission_name)
1194 repo_group, user_group, permission_name)
1196 self.user_group_repo_group_permission_ids.append(
1195 self.user_group_repo_group_permission_ids.append(
1197 (repo_group.group_id, user_group.users_group_id))
1196 (repo_group.group_id, user_group.users_group_id))
1198 return permission
1197 return permission
1199
1198
1200 def grant_user_permission_to_repo(
1199 def grant_user_permission_to_repo(
1201 self, repo, user, permission_name):
1200 self, repo, user, permission_name):
1202 permission = RepoModel().grant_user_permission(
1201 permission = RepoModel().grant_user_permission(
1203 repo, user, permission_name)
1202 repo, user, permission_name)
1204 self.user_repo_permission_ids.append(
1203 self.user_repo_permission_ids.append(
1205 (repo.repo_id, user.user_id))
1204 (repo.repo_id, user.user_id))
1206 return permission
1205 return permission
1207
1206
1208 def grant_user_group_permission_to_repo(
1207 def grant_user_group_permission_to_repo(
1209 self, repo, user_group, permission_name):
1208 self, repo, user_group, permission_name):
1210 permission = RepoModel().grant_user_group_permission(
1209 permission = RepoModel().grant_user_group_permission(
1211 repo, user_group, permission_name)
1210 repo, user_group, permission_name)
1212 self.user_group_repo_permission_ids.append(
1211 self.user_group_repo_permission_ids.append(
1213 (repo.repo_id, user_group.users_group_id))
1212 (repo.repo_id, user_group.users_group_id))
1214 return permission
1213 return permission
1215
1214
1216 def grant_user_permission_to_user_group(
1215 def grant_user_permission_to_user_group(
1217 self, target_user_group, user, permission_name):
1216 self, target_user_group, user, permission_name):
1218 permission = UserGroupModel().grant_user_permission(
1217 permission = UserGroupModel().grant_user_permission(
1219 target_user_group, user, permission_name)
1218 target_user_group, user, permission_name)
1220 self.user_user_group_permission_ids.append(
1219 self.user_user_group_permission_ids.append(
1221 (target_user_group.users_group_id, user.user_id))
1220 (target_user_group.users_group_id, user.user_id))
1222 return permission
1221 return permission
1223
1222
1224 def grant_user_group_permission_to_user_group(
1223 def grant_user_group_permission_to_user_group(
1225 self, target_user_group, user_group, permission_name):
1224 self, target_user_group, user_group, permission_name):
1226 permission = UserGroupModel().grant_user_group_permission(
1225 permission = UserGroupModel().grant_user_group_permission(
1227 target_user_group, user_group, permission_name)
1226 target_user_group, user_group, permission_name)
1228 self.user_group_user_group_permission_ids.append(
1227 self.user_group_user_group_permission_ids.append(
1229 (target_user_group.users_group_id, user_group.users_group_id))
1228 (target_user_group.users_group_id, user_group.users_group_id))
1230 return permission
1229 return permission
1231
1230
1232 def revoke_user_permission(self, user_name, permission_name):
1231 def revoke_user_permission(self, user_name, permission_name):
1233 self._inherit_default_user_permissions(user_name, True)
1232 self._inherit_default_user_permissions(user_name, True)
1234 UserModel().revoke_perm(user_name, permission_name)
1233 UserModel().revoke_perm(user_name, permission_name)
1235
1234
1236 def _inherit_default_user_permissions(self, user_name, value):
1235 def _inherit_default_user_permissions(self, user_name, value):
1237 user = UserModel().get_by_username(user_name)
1236 user = UserModel().get_by_username(user_name)
1238 user.inherit_default_permissions = value
1237 user.inherit_default_permissions = value
1239 Session().add(user)
1238 Session().add(user)
1240 Session().commit()
1239 Session().commit()
1241
1240
1242 def cleanup(self):
1241 def cleanup(self):
1243 self._cleanup_permissions()
1242 self._cleanup_permissions()
1244 self._cleanup_repos()
1243 self._cleanup_repos()
1245 self._cleanup_repo_groups()
1244 self._cleanup_repo_groups()
1246 self._cleanup_user_groups()
1245 self._cleanup_user_groups()
1247 self._cleanup_users()
1246 self._cleanup_users()
1248
1247
1249 def _cleanup_permissions(self):
1248 def _cleanup_permissions(self):
1250 if self.user_permissions:
1249 if self.user_permissions:
1251 for user_name, permission_name in self.user_permissions:
1250 for user_name, permission_name in self.user_permissions:
1252 self.revoke_user_permission(user_name, permission_name)
1251 self.revoke_user_permission(user_name, permission_name)
1253
1252
1254 for permission in self.user_repo_permission_ids:
1253 for permission in self.user_repo_permission_ids:
1255 RepoModel().revoke_user_permission(*permission)
1254 RepoModel().revoke_user_permission(*permission)
1256
1255
1257 for permission in self.user_group_repo_permission_ids:
1256 for permission in self.user_group_repo_permission_ids:
1258 RepoModel().revoke_user_group_permission(*permission)
1257 RepoModel().revoke_user_group_permission(*permission)
1259
1258
1260 for permission in self.user_repo_group_permission_ids:
1259 for permission in self.user_repo_group_permission_ids:
1261 RepoGroupModel().revoke_user_permission(*permission)
1260 RepoGroupModel().revoke_user_permission(*permission)
1262
1261
1263 for permission in self.user_group_repo_group_permission_ids:
1262 for permission in self.user_group_repo_group_permission_ids:
1264 RepoGroupModel().revoke_user_group_permission(*permission)
1263 RepoGroupModel().revoke_user_group_permission(*permission)
1265
1264
1266 for permission in self.user_user_group_permission_ids:
1265 for permission in self.user_user_group_permission_ids:
1267 UserGroupModel().revoke_user_permission(*permission)
1266 UserGroupModel().revoke_user_permission(*permission)
1268
1267
1269 for permission in self.user_group_user_group_permission_ids:
1268 for permission in self.user_group_user_group_permission_ids:
1270 UserGroupModel().revoke_user_group_permission(*permission)
1269 UserGroupModel().revoke_user_group_permission(*permission)
1271
1270
1272 def _cleanup_repo_groups(self):
1271 def _cleanup_repo_groups(self):
1273 def _repo_group_compare(first_group_id, second_group_id):
1272 def _repo_group_compare(first_group_id, second_group_id):
1274 """
1273 """
1275 Gives higher priority to the groups with the most complex paths
1274 Gives higher priority to the groups with the most complex paths
1276 """
1275 """
1277 first_group = RepoGroup.get(first_group_id)
1276 first_group = RepoGroup.get(first_group_id)
1278 second_group = RepoGroup.get(second_group_id)
1277 second_group = RepoGroup.get(second_group_id)
1279 first_group_parts = (
1278 first_group_parts = (
1280 len(first_group.group_name.split('/')) if first_group else 0)
1279 len(first_group.group_name.split('/')) if first_group else 0)
1281 second_group_parts = (
1280 second_group_parts = (
1282 len(second_group.group_name.split('/')) if second_group else 0)
1281 len(second_group.group_name.split('/')) if second_group else 0)
1283 return cmp(second_group_parts, first_group_parts)
1282 return cmp(second_group_parts, first_group_parts)
1284
1283
1285 sorted_repo_group_ids = sorted(
1284 sorted_repo_group_ids = sorted(
1286 self.repo_group_ids, cmp=_repo_group_compare)
1285 self.repo_group_ids, cmp=_repo_group_compare)
1287 for repo_group_id in sorted_repo_group_ids:
1286 for repo_group_id in sorted_repo_group_ids:
1288 self.fixture.destroy_repo_group(repo_group_id)
1287 self.fixture.destroy_repo_group(repo_group_id)
1289
1288
1290 def _cleanup_repos(self):
1289 def _cleanup_repos(self):
1291 sorted_repos_ids = sorted(self.repos_ids)
1290 sorted_repos_ids = sorted(self.repos_ids)
1292 for repo_id in sorted_repos_ids:
1291 for repo_id in sorted_repos_ids:
1293 self.fixture.destroy_repo(repo_id)
1292 self.fixture.destroy_repo(repo_id)
1294
1293
1295 def _cleanup_user_groups(self):
1294 def _cleanup_user_groups(self):
1296 def _user_group_compare(first_group_id, second_group_id):
1295 def _user_group_compare(first_group_id, second_group_id):
1297 """
1296 """
1298 Gives higher priority to the groups with the most complex paths
1297 Gives higher priority to the groups with the most complex paths
1299 """
1298 """
1300 first_group = UserGroup.get(first_group_id)
1299 first_group = UserGroup.get(first_group_id)
1301 second_group = UserGroup.get(second_group_id)
1300 second_group = UserGroup.get(second_group_id)
1302 first_group_parts = (
1301 first_group_parts = (
1303 len(first_group.users_group_name.split('/'))
1302 len(first_group.users_group_name.split('/'))
1304 if first_group else 0)
1303 if first_group else 0)
1305 second_group_parts = (
1304 second_group_parts = (
1306 len(second_group.users_group_name.split('/'))
1305 len(second_group.users_group_name.split('/'))
1307 if second_group else 0)
1306 if second_group else 0)
1308 return cmp(second_group_parts, first_group_parts)
1307 return cmp(second_group_parts, first_group_parts)
1309
1308
1310 sorted_user_group_ids = sorted(
1309 sorted_user_group_ids = sorted(
1311 self.user_group_ids, cmp=_user_group_compare)
1310 self.user_group_ids, cmp=_user_group_compare)
1312 for user_group_id in sorted_user_group_ids:
1311 for user_group_id in sorted_user_group_ids:
1313 self.fixture.destroy_user_group(user_group_id)
1312 self.fixture.destroy_user_group(user_group_id)
1314
1313
1315 def _cleanup_users(self):
1314 def _cleanup_users(self):
1316 for user_id in self.user_ids:
1315 for user_id in self.user_ids:
1317 self.fixture.destroy_user(user_id)
1316 self.fixture.destroy_user(user_id)
1318
1317
1319
1318
1320 # TODO: Think about moving this into a pytest-pyro package and make it a
1319 # TODO: Think about moving this into a pytest-pyro package and make it a
1321 # pytest plugin
1320 # pytest plugin
1322 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1321 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1323 def pytest_runtest_makereport(item, call):
1322 def pytest_runtest_makereport(item, call):
1324 """
1323 """
1325 Adding the remote traceback if the exception has this information.
1324 Adding the remote traceback if the exception has this information.
1326
1325
1327 VCSServer attaches this information as the attribute `_vcs_server_traceback`
1326 VCSServer attaches this information as the attribute `_vcs_server_traceback`
1328 to the exception instance.
1327 to the exception instance.
1329 """
1328 """
1330 outcome = yield
1329 outcome = yield
1331 report = outcome.get_result()
1330 report = outcome.get_result()
1332 if call.excinfo:
1331 if call.excinfo:
1333 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1332 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1334
1333
1335
1334
1336 def _add_vcsserver_remote_traceback(report, exc):
1335 def _add_vcsserver_remote_traceback(report, exc):
1337 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1336 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1338
1337
1339 if vcsserver_traceback:
1338 if vcsserver_traceback:
1340 section = 'VCSServer remote traceback ' + report.when
1339 section = 'VCSServer remote traceback ' + report.when
1341 report.sections.append((section, vcsserver_traceback))
1340 report.sections.append((section, vcsserver_traceback))
1342
1341
1343
1342
1344 @pytest.fixture(scope='session')
1343 @pytest.fixture(scope='session')
1345 def testrun():
1344 def testrun():
1346 return {
1345 return {
1347 'uuid': uuid.uuid4(),
1346 'uuid': uuid.uuid4(),
1348 'start': datetime.datetime.utcnow().isoformat(),
1347 'start': datetime.datetime.utcnow().isoformat(),
1349 'timestamp': int(time.time()),
1348 'timestamp': int(time.time()),
1350 }
1349 }
1351
1350
1352
1351
1353 @pytest.fixture(autouse=True)
1352 @pytest.fixture(autouse=True)
1354 def collect_appenlight_stats(request, testrun):
1353 def collect_appenlight_stats(request, testrun):
1355 """
1354 """
1356 This fixture reports memory consumtion of single tests.
1355 This fixture reports memory consumtion of single tests.
1357
1356
1358 It gathers data based on `psutil` and sends them to Appenlight. The option
1357 It gathers data based on `psutil` and sends them to Appenlight. The option
1359 ``--ae`` has te be used to enable this fixture and the API key for your
1358 ``--ae`` has te be used to enable this fixture and the API key for your
1360 application has to be provided in ``--ae-key``.
1359 application has to be provided in ``--ae-key``.
1361 """
1360 """
1362 try:
1361 try:
1363 # cygwin cannot have yet psutil support.
1362 # cygwin cannot have yet psutil support.
1364 import psutil
1363 import psutil
1365 except ImportError:
1364 except ImportError:
1366 return
1365 return
1367
1366
1368 if not request.config.getoption('--appenlight'):
1367 if not request.config.getoption('--appenlight'):
1369 return
1368 return
1370 else:
1369 else:
1371 # Only request the baseapp fixture if appenlight tracking is
1370 # Only request the baseapp fixture if appenlight tracking is
1372 # enabled. This will speed up a test run of unit tests by 2 to 3
1371 # enabled. This will speed up a test run of unit tests by 2 to 3
1373 # seconds if appenlight is not enabled.
1372 # seconds if appenlight is not enabled.
1374 baseapp = request.getfuncargvalue("baseapp")
1373 baseapp = request.getfuncargvalue("baseapp")
1375 url = '{}/api/logs'.format(request.config.getoption('--appenlight-url'))
1374 url = '{}/api/logs'.format(request.config.getoption('--appenlight-url'))
1376 client = AppenlightClient(
1375 client = AppenlightClient(
1377 url=url,
1376 url=url,
1378 api_key=request.config.getoption('--appenlight-api-key'),
1377 api_key=request.config.getoption('--appenlight-api-key'),
1379 namespace=request.node.nodeid,
1378 namespace=request.node.nodeid,
1380 request=str(testrun['uuid']),
1379 request=str(testrun['uuid']),
1381 testrun=testrun)
1380 testrun=testrun)
1382
1381
1383 client.collect({
1382 client.collect({
1384 'message': "Starting",
1383 'message': "Starting",
1385 })
1384 })
1386
1385
1387 server_and_port = baseapp.config.get_settings()['vcs.server']
1386 server_and_port = baseapp.config.get_settings()['vcs.server']
1388 protocol = baseapp.config.get_settings()['vcs.server.protocol']
1387 protocol = baseapp.config.get_settings()['vcs.server.protocol']
1389 server = create_vcsserver_proxy(server_and_port, protocol)
1388 server = create_vcsserver_proxy(server_and_port, protocol)
1390 with server:
1389 with server:
1391 vcs_pid = server.get_pid()
1390 vcs_pid = server.get_pid()
1392 server.run_gc()
1391 server.run_gc()
1393 vcs_process = psutil.Process(vcs_pid)
1392 vcs_process = psutil.Process(vcs_pid)
1394 mem = vcs_process.memory_info()
1393 mem = vcs_process.memory_info()
1395 client.tag_before('vcsserver.rss', mem.rss)
1394 client.tag_before('vcsserver.rss', mem.rss)
1396 client.tag_before('vcsserver.vms', mem.vms)
1395 client.tag_before('vcsserver.vms', mem.vms)
1397
1396
1398 test_process = psutil.Process()
1397 test_process = psutil.Process()
1399 mem = test_process.memory_info()
1398 mem = test_process.memory_info()
1400 client.tag_before('test.rss', mem.rss)
1399 client.tag_before('test.rss', mem.rss)
1401 client.tag_before('test.vms', mem.vms)
1400 client.tag_before('test.vms', mem.vms)
1402
1401
1403 client.tag_before('time', time.time())
1402 client.tag_before('time', time.time())
1404
1403
1405 @request.addfinalizer
1404 @request.addfinalizer
1406 def send_stats():
1405 def send_stats():
1407 client.tag_after('time', time.time())
1406 client.tag_after('time', time.time())
1408 with server:
1407 with server:
1409 gc_stats = server.run_gc()
1408 gc_stats = server.run_gc()
1410 for tag, value in gc_stats.items():
1409 for tag, value in gc_stats.items():
1411 client.tag_after(tag, value)
1410 client.tag_after(tag, value)
1412 mem = vcs_process.memory_info()
1411 mem = vcs_process.memory_info()
1413 client.tag_after('vcsserver.rss', mem.rss)
1412 client.tag_after('vcsserver.rss', mem.rss)
1414 client.tag_after('vcsserver.vms', mem.vms)
1413 client.tag_after('vcsserver.vms', mem.vms)
1415
1414
1416 mem = test_process.memory_info()
1415 mem = test_process.memory_info()
1417 client.tag_after('test.rss', mem.rss)
1416 client.tag_after('test.rss', mem.rss)
1418 client.tag_after('test.vms', mem.vms)
1417 client.tag_after('test.vms', mem.vms)
1419
1418
1420 client.collect({
1419 client.collect({
1421 'message': "Finished",
1420 'message': "Finished",
1422 })
1421 })
1423 client.send_stats()
1422 client.send_stats()
1424
1423
1425 return client
1424 return client
1426
1425
1427
1426
1428 class AppenlightClient():
1427 class AppenlightClient():
1429
1428
1430 url_template = '{url}?protocol_version=0.5'
1429 url_template = '{url}?protocol_version=0.5'
1431
1430
1432 def __init__(
1431 def __init__(
1433 self, url, api_key, add_server=True, add_timestamp=True,
1432 self, url, api_key, add_server=True, add_timestamp=True,
1434 namespace=None, request=None, testrun=None):
1433 namespace=None, request=None, testrun=None):
1435 self.url = self.url_template.format(url=url)
1434 self.url = self.url_template.format(url=url)
1436 self.api_key = api_key
1435 self.api_key = api_key
1437 self.add_server = add_server
1436 self.add_server = add_server
1438 self.add_timestamp = add_timestamp
1437 self.add_timestamp = add_timestamp
1439 self.namespace = namespace
1438 self.namespace = namespace
1440 self.request = request
1439 self.request = request
1441 self.server = socket.getfqdn(socket.gethostname())
1440 self.server = socket.getfqdn(socket.gethostname())
1442 self.tags_before = {}
1441 self.tags_before = {}
1443 self.tags_after = {}
1442 self.tags_after = {}
1444 self.stats = []
1443 self.stats = []
1445 self.testrun = testrun or {}
1444 self.testrun = testrun or {}
1446
1445
1447 def tag_before(self, tag, value):
1446 def tag_before(self, tag, value):
1448 self.tags_before[tag] = value
1447 self.tags_before[tag] = value
1449
1448
1450 def tag_after(self, tag, value):
1449 def tag_after(self, tag, value):
1451 self.tags_after[tag] = value
1450 self.tags_after[tag] = value
1452
1451
1453 def collect(self, data):
1452 def collect(self, data):
1454 if self.add_server:
1453 if self.add_server:
1455 data.setdefault('server', self.server)
1454 data.setdefault('server', self.server)
1456 if self.add_timestamp:
1455 if self.add_timestamp:
1457 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1456 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1458 if self.namespace:
1457 if self.namespace:
1459 data.setdefault('namespace', self.namespace)
1458 data.setdefault('namespace', self.namespace)
1460 if self.request:
1459 if self.request:
1461 data.setdefault('request', self.request)
1460 data.setdefault('request', self.request)
1462 self.stats.append(data)
1461 self.stats.append(data)
1463
1462
1464 def send_stats(self):
1463 def send_stats(self):
1465 tags = [
1464 tags = [
1466 ('testrun', self.request),
1465 ('testrun', self.request),
1467 ('testrun.start', self.testrun['start']),
1466 ('testrun.start', self.testrun['start']),
1468 ('testrun.timestamp', self.testrun['timestamp']),
1467 ('testrun.timestamp', self.testrun['timestamp']),
1469 ('test', self.namespace),
1468 ('test', self.namespace),
1470 ]
1469 ]
1471 for key, value in self.tags_before.items():
1470 for key, value in self.tags_before.items():
1472 tags.append((key + '.before', value))
1471 tags.append((key + '.before', value))
1473 try:
1472 try:
1474 delta = self.tags_after[key] - value
1473 delta = self.tags_after[key] - value
1475 tags.append((key + '.delta', delta))
1474 tags.append((key + '.delta', delta))
1476 except Exception:
1475 except Exception:
1477 pass
1476 pass
1478 for key, value in self.tags_after.items():
1477 for key, value in self.tags_after.items():
1479 tags.append((key + '.after', value))
1478 tags.append((key + '.after', value))
1480 self.collect({
1479 self.collect({
1481 'message': "Collected tags",
1480 'message': "Collected tags",
1482 'tags': tags,
1481 'tags': tags,
1483 })
1482 })
1484
1483
1485 response = requests.post(
1484 response = requests.post(
1486 self.url,
1485 self.url,
1487 headers={
1486 headers={
1488 'X-appenlight-api-key': self.api_key},
1487 'X-appenlight-api-key': self.api_key},
1489 json=self.stats,
1488 json=self.stats,
1490 )
1489 )
1491
1490
1492 if not response.status_code == 200:
1491 if not response.status_code == 200:
1493 pprint.pprint(self.stats)
1492 pprint.pprint(self.stats)
1494 print(response.headers)
1493 print(response.headers)
1495 print(response.text)
1494 print(response.text)
1496 raise Exception('Sending to appenlight failed')
1495 raise Exception('Sending to appenlight failed')
1497
1496
1498
1497
1499 @pytest.fixture
1498 @pytest.fixture
1500 def gist_util(request, baseapp):
1499 def gist_util(request, baseapp):
1501 """
1500 """
1502 Provides a wired instance of `GistUtility` with integrated cleanup.
1501 Provides a wired instance of `GistUtility` with integrated cleanup.
1503 """
1502 """
1504 utility = GistUtility()
1503 utility = GistUtility()
1505 request.addfinalizer(utility.cleanup)
1504 request.addfinalizer(utility.cleanup)
1506 return utility
1505 return utility
1507
1506
1508
1507
1509 class GistUtility(object):
1508 class GistUtility(object):
1510 def __init__(self):
1509 def __init__(self):
1511 self.fixture = Fixture()
1510 self.fixture = Fixture()
1512 self.gist_ids = []
1511 self.gist_ids = []
1513
1512
1514 def create_gist(self, **kwargs):
1513 def create_gist(self, **kwargs):
1515 gist = self.fixture.create_gist(**kwargs)
1514 gist = self.fixture.create_gist(**kwargs)
1516 self.gist_ids.append(gist.gist_id)
1515 self.gist_ids.append(gist.gist_id)
1517 return gist
1516 return gist
1518
1517
1519 def cleanup(self):
1518 def cleanup(self):
1520 for id_ in self.gist_ids:
1519 for id_ in self.gist_ids:
1521 self.fixture.destroy_gists(str(id_))
1520 self.fixture.destroy_gists(str(id_))
1522
1521
1523
1522
1524 @pytest.fixture
1523 @pytest.fixture
1525 def enabled_backends(request):
1524 def enabled_backends(request):
1526 backends = request.config.option.backends
1525 backends = request.config.option.backends
1527 return backends[:]
1526 return backends[:]
1528
1527
1529
1528
1530 @pytest.fixture
1529 @pytest.fixture
1531 def settings_util(request):
1530 def settings_util(request):
1532 """
1531 """
1533 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1532 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1534 """
1533 """
1535 utility = SettingsUtility()
1534 utility = SettingsUtility()
1536 request.addfinalizer(utility.cleanup)
1535 request.addfinalizer(utility.cleanup)
1537 return utility
1536 return utility
1538
1537
1539
1538
1540 class SettingsUtility(object):
1539 class SettingsUtility(object):
1541 def __init__(self):
1540 def __init__(self):
1542 self.rhodecode_ui_ids = []
1541 self.rhodecode_ui_ids = []
1543 self.rhodecode_setting_ids = []
1542 self.rhodecode_setting_ids = []
1544 self.repo_rhodecode_ui_ids = []
1543 self.repo_rhodecode_ui_ids = []
1545 self.repo_rhodecode_setting_ids = []
1544 self.repo_rhodecode_setting_ids = []
1546
1545
1547 def create_repo_rhodecode_ui(
1546 def create_repo_rhodecode_ui(
1548 self, repo, section, value, key=None, active=True, cleanup=True):
1547 self, repo, section, value, key=None, active=True, cleanup=True):
1549 key = key or hashlib.sha1(
1548 key = key or hashlib.sha1(
1550 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1549 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1551
1550
1552 setting = RepoRhodeCodeUi()
1551 setting = RepoRhodeCodeUi()
1553 setting.repository_id = repo.repo_id
1552 setting.repository_id = repo.repo_id
1554 setting.ui_section = section
1553 setting.ui_section = section
1555 setting.ui_value = value
1554 setting.ui_value = value
1556 setting.ui_key = key
1555 setting.ui_key = key
1557 setting.ui_active = active
1556 setting.ui_active = active
1558 Session().add(setting)
1557 Session().add(setting)
1559 Session().commit()
1558 Session().commit()
1560
1559
1561 if cleanup:
1560 if cleanup:
1562 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1561 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1563 return setting
1562 return setting
1564
1563
1565 def create_rhodecode_ui(
1564 def create_rhodecode_ui(
1566 self, section, value, key=None, active=True, cleanup=True):
1565 self, section, value, key=None, active=True, cleanup=True):
1567 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1566 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1568
1567
1569 setting = RhodeCodeUi()
1568 setting = RhodeCodeUi()
1570 setting.ui_section = section
1569 setting.ui_section = section
1571 setting.ui_value = value
1570 setting.ui_value = value
1572 setting.ui_key = key
1571 setting.ui_key = key
1573 setting.ui_active = active
1572 setting.ui_active = active
1574 Session().add(setting)
1573 Session().add(setting)
1575 Session().commit()
1574 Session().commit()
1576
1575
1577 if cleanup:
1576 if cleanup:
1578 self.rhodecode_ui_ids.append(setting.ui_id)
1577 self.rhodecode_ui_ids.append(setting.ui_id)
1579 return setting
1578 return setting
1580
1579
1581 def create_repo_rhodecode_setting(
1580 def create_repo_rhodecode_setting(
1582 self, repo, name, value, type_, cleanup=True):
1581 self, repo, name, value, type_, cleanup=True):
1583 setting = RepoRhodeCodeSetting(
1582 setting = RepoRhodeCodeSetting(
1584 repo.repo_id, key=name, val=value, type=type_)
1583 repo.repo_id, key=name, val=value, type=type_)
1585 Session().add(setting)
1584 Session().add(setting)
1586 Session().commit()
1585 Session().commit()
1587
1586
1588 if cleanup:
1587 if cleanup:
1589 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1588 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1590 return setting
1589 return setting
1591
1590
1592 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1591 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1593 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1592 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1594 Session().add(setting)
1593 Session().add(setting)
1595 Session().commit()
1594 Session().commit()
1596
1595
1597 if cleanup:
1596 if cleanup:
1598 self.rhodecode_setting_ids.append(setting.app_settings_id)
1597 self.rhodecode_setting_ids.append(setting.app_settings_id)
1599
1598
1600 return setting
1599 return setting
1601
1600
1602 def cleanup(self):
1601 def cleanup(self):
1603 for id_ in self.rhodecode_ui_ids:
1602 for id_ in self.rhodecode_ui_ids:
1604 setting = RhodeCodeUi.get(id_)
1603 setting = RhodeCodeUi.get(id_)
1605 Session().delete(setting)
1604 Session().delete(setting)
1606
1605
1607 for id_ in self.rhodecode_setting_ids:
1606 for id_ in self.rhodecode_setting_ids:
1608 setting = RhodeCodeSetting.get(id_)
1607 setting = RhodeCodeSetting.get(id_)
1609 Session().delete(setting)
1608 Session().delete(setting)
1610
1609
1611 for id_ in self.repo_rhodecode_ui_ids:
1610 for id_ in self.repo_rhodecode_ui_ids:
1612 setting = RepoRhodeCodeUi.get(id_)
1611 setting = RepoRhodeCodeUi.get(id_)
1613 Session().delete(setting)
1612 Session().delete(setting)
1614
1613
1615 for id_ in self.repo_rhodecode_setting_ids:
1614 for id_ in self.repo_rhodecode_setting_ids:
1616 setting = RepoRhodeCodeSetting.get(id_)
1615 setting = RepoRhodeCodeSetting.get(id_)
1617 Session().delete(setting)
1616 Session().delete(setting)
1618
1617
1619 Session().commit()
1618 Session().commit()
1620
1619
1621
1620
1622 @pytest.fixture
1621 @pytest.fixture
1623 def no_notifications(request):
1622 def no_notifications(request):
1624 notification_patcher = mock.patch(
1623 notification_patcher = mock.patch(
1625 'rhodecode.model.notification.NotificationModel.create')
1624 'rhodecode.model.notification.NotificationModel.create')
1626 notification_patcher.start()
1625 notification_patcher.start()
1627 request.addfinalizer(notification_patcher.stop)
1626 request.addfinalizer(notification_patcher.stop)
1628
1627
1629
1628
1630 @pytest.fixture(scope='session')
1629 @pytest.fixture(scope='session')
1631 def repeat(request):
1630 def repeat(request):
1632 """
1631 """
1633 The number of repetitions is based on this fixture.
1632 The number of repetitions is based on this fixture.
1634
1633
1635 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1634 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1636 tests are not too slow in our default test suite.
1635 tests are not too slow in our default test suite.
1637 """
1636 """
1638 return request.config.getoption('--repeat')
1637 return request.config.getoption('--repeat')
1639
1638
1640
1639
1641 @pytest.fixture
1640 @pytest.fixture
1642 def rhodecode_fixtures():
1641 def rhodecode_fixtures():
1643 return Fixture()
1642 return Fixture()
1644
1643
1645
1644
1646 @pytest.fixture
1645 @pytest.fixture
1647 def context_stub():
1646 def context_stub():
1648 """
1647 """
1649 Stub context object.
1648 Stub context object.
1650 """
1649 """
1651 context = pyramid.testing.DummyResource()
1650 context = pyramid.testing.DummyResource()
1652 return context
1651 return context
1653
1652
1654
1653
1655 @pytest.fixture
1654 @pytest.fixture
1656 def request_stub():
1655 def request_stub():
1657 """
1656 """
1658 Stub request object.
1657 Stub request object.
1659 """
1658 """
1660 from rhodecode.lib.base import bootstrap_request
1659 from rhodecode.lib.base import bootstrap_request
1661 request = bootstrap_request(scheme='https')
1660 request = bootstrap_request(scheme='https')
1662 return request
1661 return request
1663
1662
1664
1663
1665 @pytest.fixture
1664 @pytest.fixture
1666 def config_stub(request, request_stub):
1665 def config_stub(request, request_stub):
1667 """
1666 """
1668 Set up pyramid.testing and return the Configurator.
1667 Set up pyramid.testing and return the Configurator.
1669 """
1668 """
1670 from rhodecode.lib.base import bootstrap_config
1669 from rhodecode.lib.base import bootstrap_config
1671 config = bootstrap_config(request=request_stub)
1670 config = bootstrap_config(request=request_stub)
1672
1671
1673 @request.addfinalizer
1672 @request.addfinalizer
1674 def cleanup():
1673 def cleanup():
1675 pyramid.testing.tearDown()
1674 pyramid.testing.tearDown()
1676
1675
1677 return config
1676 return config
1678
1677
1679
1678
1680 @pytest.fixture
1679 @pytest.fixture
1681 def StubIntegrationType():
1680 def StubIntegrationType():
1682 class _StubIntegrationType(IntegrationTypeBase):
1681 class _StubIntegrationType(IntegrationTypeBase):
1683 """ Test integration type class """
1682 """ Test integration type class """
1684
1683
1685 key = 'test'
1684 key = 'test'
1686 display_name = 'Test integration type'
1685 display_name = 'Test integration type'
1687 description = 'A test integration type for testing'
1686 description = 'A test integration type for testing'
1688 icon = 'test_icon_html_image'
1687 icon = 'test_icon_html_image'
1689
1688
1690 def __init__(self, settings):
1689 def __init__(self, settings):
1691 super(_StubIntegrationType, self).__init__(settings)
1690 super(_StubIntegrationType, self).__init__(settings)
1692 self.sent_events = [] # for testing
1691 self.sent_events = [] # for testing
1693
1692
1694 def send_event(self, event):
1693 def send_event(self, event):
1695 self.sent_events.append(event)
1694 self.sent_events.append(event)
1696
1695
1697 def settings_schema(self):
1696 def settings_schema(self):
1698 class SettingsSchema(colander.Schema):
1697 class SettingsSchema(colander.Schema):
1699 test_string_field = colander.SchemaNode(
1698 test_string_field = colander.SchemaNode(
1700 colander.String(),
1699 colander.String(),
1701 missing=colander.required,
1700 missing=colander.required,
1702 title='test string field',
1701 title='test string field',
1703 )
1702 )
1704 test_int_field = colander.SchemaNode(
1703 test_int_field = colander.SchemaNode(
1705 colander.Int(),
1704 colander.Int(),
1706 title='some integer setting',
1705 title='some integer setting',
1707 )
1706 )
1708 return SettingsSchema()
1707 return SettingsSchema()
1709
1708
1710
1709
1711 integration_type_registry.register_integration_type(_StubIntegrationType)
1710 integration_type_registry.register_integration_type(_StubIntegrationType)
1712 return _StubIntegrationType
1711 return _StubIntegrationType
1713
1712
1714 @pytest.fixture
1713 @pytest.fixture
1715 def stub_integration_settings():
1714 def stub_integration_settings():
1716 return {
1715 return {
1717 'test_string_field': 'some data',
1716 'test_string_field': 'some data',
1718 'test_int_field': 100,
1717 'test_int_field': 100,
1719 }
1718 }
1720
1719
1721
1720
1722 @pytest.fixture
1721 @pytest.fixture
1723 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1722 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1724 stub_integration_settings):
1723 stub_integration_settings):
1725 integration = IntegrationModel().create(
1724 integration = IntegrationModel().create(
1726 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1725 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1727 name='test repo integration',
1726 name='test repo integration',
1728 repo=repo_stub, repo_group=None, child_repos_only=None)
1727 repo=repo_stub, repo_group=None, child_repos_only=None)
1729
1728
1730 @request.addfinalizer
1729 @request.addfinalizer
1731 def cleanup():
1730 def cleanup():
1732 IntegrationModel().delete(integration)
1731 IntegrationModel().delete(integration)
1733
1732
1734 return integration
1733 return integration
1735
1734
1736
1735
1737 @pytest.fixture
1736 @pytest.fixture
1738 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1737 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1739 stub_integration_settings):
1738 stub_integration_settings):
1740 integration = IntegrationModel().create(
1739 integration = IntegrationModel().create(
1741 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1740 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1742 name='test repogroup integration',
1741 name='test repogroup integration',
1743 repo=None, repo_group=test_repo_group, child_repos_only=True)
1742 repo=None, repo_group=test_repo_group, child_repos_only=True)
1744
1743
1745 @request.addfinalizer
1744 @request.addfinalizer
1746 def cleanup():
1745 def cleanup():
1747 IntegrationModel().delete(integration)
1746 IntegrationModel().delete(integration)
1748
1747
1749 return integration
1748 return integration
1750
1749
1751
1750
1752 @pytest.fixture
1751 @pytest.fixture
1753 def repogroup_recursive_integration_stub(request, test_repo_group,
1752 def repogroup_recursive_integration_stub(request, test_repo_group,
1754 StubIntegrationType, stub_integration_settings):
1753 StubIntegrationType, stub_integration_settings):
1755 integration = IntegrationModel().create(
1754 integration = IntegrationModel().create(
1756 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1755 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1757 name='test recursive repogroup integration',
1756 name='test recursive repogroup integration',
1758 repo=None, repo_group=test_repo_group, child_repos_only=False)
1757 repo=None, repo_group=test_repo_group, child_repos_only=False)
1759
1758
1760 @request.addfinalizer
1759 @request.addfinalizer
1761 def cleanup():
1760 def cleanup():
1762 IntegrationModel().delete(integration)
1761 IntegrationModel().delete(integration)
1763
1762
1764 return integration
1763 return integration
1765
1764
1766
1765
1767 @pytest.fixture
1766 @pytest.fixture
1768 def global_integration_stub(request, StubIntegrationType,
1767 def global_integration_stub(request, StubIntegrationType,
1769 stub_integration_settings):
1768 stub_integration_settings):
1770 integration = IntegrationModel().create(
1769 integration = IntegrationModel().create(
1771 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1770 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1772 name='test global integration',
1771 name='test global integration',
1773 repo=None, repo_group=None, child_repos_only=None)
1772 repo=None, repo_group=None, child_repos_only=None)
1774
1773
1775 @request.addfinalizer
1774 @request.addfinalizer
1776 def cleanup():
1775 def cleanup():
1777 IntegrationModel().delete(integration)
1776 IntegrationModel().delete(integration)
1778
1777
1779 return integration
1778 return integration
1780
1779
1781
1780
1782 @pytest.fixture
1781 @pytest.fixture
1783 def root_repos_integration_stub(request, StubIntegrationType,
1782 def root_repos_integration_stub(request, StubIntegrationType,
1784 stub_integration_settings):
1783 stub_integration_settings):
1785 integration = IntegrationModel().create(
1784 integration = IntegrationModel().create(
1786 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1785 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1787 name='test global integration',
1786 name='test global integration',
1788 repo=None, repo_group=None, child_repos_only=True)
1787 repo=None, repo_group=None, child_repos_only=True)
1789
1788
1790 @request.addfinalizer
1789 @request.addfinalizer
1791 def cleanup():
1790 def cleanup():
1792 IntegrationModel().delete(integration)
1791 IntegrationModel().delete(integration)
1793
1792
1794 return integration
1793 return integration
1795
1794
1796
1795
1797 @pytest.fixture
1796 @pytest.fixture
1798 def local_dt_to_utc():
1797 def local_dt_to_utc():
1799 def _factory(dt):
1798 def _factory(dt):
1800 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1799 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1801 dateutil.tz.tzutc()).replace(tzinfo=None)
1800 dateutil.tz.tzutc()).replace(tzinfo=None)
1802 return _factory
1801 return _factory
1803
1802
1804
1803
1805 @pytest.fixture
1804 @pytest.fixture
1806 def disable_anonymous_user(request, baseapp):
1805 def disable_anonymous_user(request, baseapp):
1807 set_anonymous_access(False)
1806 set_anonymous_access(False)
1808
1807
1809 @request.addfinalizer
1808 @request.addfinalizer
1810 def cleanup():
1809 def cleanup():
1811 set_anonymous_access(True)
1810 set_anonymous_access(True)
1812
1811
1813
1812
1814 @pytest.fixture
1813 @pytest.fixture
1815 def rc_fixture(request):
1814 def rc_fixture(request):
1816 return Fixture()
1815 return Fixture()
1817
1816
1818
1817
1819 @pytest.fixture
1818 @pytest.fixture
1820 def repo_groups(request):
1819 def repo_groups(request):
1821 fixture = Fixture()
1820 fixture = Fixture()
1822
1821
1823 session = Session()
1822 session = Session()
1824 zombie_group = fixture.create_repo_group('zombie')
1823 zombie_group = fixture.create_repo_group('zombie')
1825 parent_group = fixture.create_repo_group('parent')
1824 parent_group = fixture.create_repo_group('parent')
1826 child_group = fixture.create_repo_group('parent/child')
1825 child_group = fixture.create_repo_group('parent/child')
1827 groups_in_db = session.query(RepoGroup).all()
1826 groups_in_db = session.query(RepoGroup).all()
1828 assert len(groups_in_db) == 3
1827 assert len(groups_in_db) == 3
1829 assert child_group.group_parent_id == parent_group.group_id
1828 assert child_group.group_parent_id == parent_group.group_id
1830
1829
1831 @request.addfinalizer
1830 @request.addfinalizer
1832 def cleanup():
1831 def cleanup():
1833 fixture.destroy_repo_group(zombie_group)
1832 fixture.destroy_repo_group(zombie_group)
1834 fixture.destroy_repo_group(child_group)
1833 fixture.destroy_repo_group(child_group)
1835 fixture.destroy_repo_group(parent_group)
1834 fixture.destroy_repo_group(parent_group)
1836
1835
1837 return zombie_group, parent_group, child_group
1836 return zombie_group, parent_group, child_group
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
General Comments 0
You need to be logged in to leave comments. Login now