##// END OF EJS Templates
hooks: made the callback host configurable....
marcink -
r2833:920dbf8a default
parent child Browse files
Show More
@@ -1,716 +1,720 b''
1
1
2
2
3 ################################################################################
3 ################################################################################
4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
5 # The %(here)s variable will be replaced with the parent directory of this file#
5 # The %(here)s variable will be replaced with the parent directory of this file#
6 ################################################################################
6 ################################################################################
7
7
8 [DEFAULT]
8 [DEFAULT]
9 debug = true
9 debug = true
10
10
11 ################################################################################
11 ################################################################################
12 ## EMAIL CONFIGURATION ##
12 ## EMAIL CONFIGURATION ##
13 ## Uncomment and replace with the email address which should receive ##
13 ## Uncomment and replace with the email address which should receive ##
14 ## any error reports after an application crash ##
14 ## any error reports after an application crash ##
15 ## Additionally these settings will be used by the RhodeCode mailing system ##
15 ## Additionally these settings will be used by the RhodeCode mailing system ##
16 ################################################################################
16 ################################################################################
17
17
18 ## prefix all emails subjects with given prefix, helps filtering out emails
18 ## prefix all emails subjects with given prefix, helps filtering out emails
19 #email_prefix = [RhodeCode]
19 #email_prefix = [RhodeCode]
20
20
21 ## email FROM address all mails will be sent
21 ## email FROM address all mails will be sent
22 #app_email_from = rhodecode-noreply@localhost
22 #app_email_from = rhodecode-noreply@localhost
23
23
24 ## Uncomment and replace with the address which should receive any error report
24 ## Uncomment and replace with the address which should receive any error report
25 ## note: using appenlight for error handling doesn't need this to be uncommented
25 ## note: using appenlight for error handling doesn't need this to be uncommented
26 #email_to = admin@localhost
26 #email_to = admin@localhost
27
27
28 ## in case of Application errors, sent an error email form
28 ## in case of Application errors, sent an error email form
29 #error_email_from = rhodecode_error@localhost
29 #error_email_from = rhodecode_error@localhost
30
30
31 ## additional error message to be send in case of server crash
31 ## additional error message to be send in case of server crash
32 #error_message =
32 #error_message =
33
33
34
34
35 #smtp_server = mail.server.com
35 #smtp_server = mail.server.com
36 #smtp_username =
36 #smtp_username =
37 #smtp_password =
37 #smtp_password =
38 #smtp_port =
38 #smtp_port =
39 #smtp_use_tls = false
39 #smtp_use_tls = false
40 #smtp_use_ssl = true
40 #smtp_use_ssl = true
41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
42 #smtp_auth =
42 #smtp_auth =
43
43
44 [server:main]
44 [server:main]
45 ## COMMON ##
45 ## COMMON ##
46 host = 127.0.0.1
46 host = 127.0.0.1
47 port = 5000
47 port = 5000
48
48
49 ##################################
49 ##################################
50 ## WAITRESS WSGI SERVER ##
50 ## WAITRESS WSGI SERVER ##
51 ## Recommended for Development ##
51 ## Recommended for Development ##
52 ##################################
52 ##################################
53
53
54 use = egg:waitress#main
54 use = egg:waitress#main
55 ## number of worker threads
55 ## number of worker threads
56 threads = 5
56 threads = 5
57 ## MAX BODY SIZE 100GB
57 ## MAX BODY SIZE 100GB
58 max_request_body_size = 107374182400
58 max_request_body_size = 107374182400
59 ## Use poll instead of select, fixes file descriptors limits problems.
59 ## Use poll instead of select, fixes file descriptors limits problems.
60 ## May not work on old windows systems.
60 ## May not work on old windows systems.
61 asyncore_use_poll = true
61 asyncore_use_poll = true
62
62
63
63
64 ##########################
64 ##########################
65 ## GUNICORN WSGI SERVER ##
65 ## GUNICORN WSGI SERVER ##
66 ##########################
66 ##########################
67 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
67 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
68
68
69 #use = egg:gunicorn#main
69 #use = egg:gunicorn#main
70 ## Sets the number of process workers. You must set `instance_id = *`
70 ## Sets the number of process workers. You must set `instance_id = *`
71 ## when this option is set to more than one worker, recommended
71 ## when this option is set to more than one worker, recommended
72 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
72 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
73 ## The `instance_id = *` must be set in the [app:main] section below
73 ## The `instance_id = *` must be set in the [app:main] section below
74 #workers = 2
74 #workers = 2
75 ## number of threads for each of the worker, must be set to 1 for gevent
75 ## number of threads for each of the worker, must be set to 1 for gevent
76 ## generally recommended to be at 1
76 ## generally recommended to be at 1
77 #threads = 1
77 #threads = 1
78 ## process name
78 ## process name
79 #proc_name = rhodecode
79 #proc_name = rhodecode
80 ## type of worker class, one of sync, gevent
80 ## type of worker class, one of sync, gevent
81 ## recommended for bigger setup is using of of other than sync one
81 ## recommended for bigger setup is using of of other than sync one
82 #worker_class = gevent
82 #worker_class = gevent
83 ## The maximum number of simultaneous clients. Valid only for Gevent
83 ## The maximum number of simultaneous clients. Valid only for Gevent
84 #worker_connections = 10
84 #worker_connections = 10
85 ## max number of requests that worker will handle before being gracefully
85 ## max number of requests that worker will handle before being gracefully
86 ## restarted, could prevent memory leaks
86 ## restarted, could prevent memory leaks
87 #max_requests = 1000
87 #max_requests = 1000
88 #max_requests_jitter = 30
88 #max_requests_jitter = 30
89 ## amount of time a worker can spend with handling a request before it
89 ## amount of time a worker can spend with handling a request before it
90 ## gets killed and restarted. Set to 6hrs
90 ## gets killed and restarted. Set to 6hrs
91 #timeout = 21600
91 #timeout = 21600
92
92
93
93
94 ## prefix middleware for RhodeCode.
94 ## prefix middleware for RhodeCode.
95 ## recommended when using proxy setup.
95 ## recommended when using proxy setup.
96 ## allows to set RhodeCode under a prefix in server.
96 ## allows to set RhodeCode under a prefix in server.
97 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
97 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
98 ## And set your prefix like: `prefix = /custom_prefix`
98 ## And set your prefix like: `prefix = /custom_prefix`
99 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
99 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
100 ## to make your cookies only work on prefix url
100 ## to make your cookies only work on prefix url
101 [filter:proxy-prefix]
101 [filter:proxy-prefix]
102 use = egg:PasteDeploy#prefix
102 use = egg:PasteDeploy#prefix
103 prefix = /
103 prefix = /
104
104
105 [app:main]
105 [app:main]
106 use = egg:rhodecode-enterprise-ce
106 use = egg:rhodecode-enterprise-ce
107
107
108 ## enable proxy prefix middleware, defined above
108 ## enable proxy prefix middleware, defined above
109 #filter-with = proxy-prefix
109 #filter-with = proxy-prefix
110
110
111 # During development the we want to have the debug toolbar enabled
111 # During development the we want to have the debug toolbar enabled
112 pyramid.includes =
112 pyramid.includes =
113 pyramid_debugtoolbar
113 pyramid_debugtoolbar
114 rhodecode.lib.middleware.request_wrapper
114 rhodecode.lib.middleware.request_wrapper
115
115
116 pyramid.reload_templates = true
116 pyramid.reload_templates = true
117
117
118 debugtoolbar.hosts = 0.0.0.0/0
118 debugtoolbar.hosts = 0.0.0.0/0
119 debugtoolbar.exclude_prefixes =
119 debugtoolbar.exclude_prefixes =
120 /css
120 /css
121 /fonts
121 /fonts
122 /images
122 /images
123 /js
123 /js
124
124
125 ## RHODECODE PLUGINS ##
125 ## RHODECODE PLUGINS ##
126 rhodecode.includes =
126 rhodecode.includes =
127 rhodecode.api
127 rhodecode.api
128
128
129
129
130 # api prefix url
130 # api prefix url
131 rhodecode.api.url = /_admin/api
131 rhodecode.api.url = /_admin/api
132
132
133
133
134 ## END RHODECODE PLUGINS ##
134 ## END RHODECODE PLUGINS ##
135
135
136 ## encryption key used to encrypt social plugin tokens,
136 ## encryption key used to encrypt social plugin tokens,
137 ## remote_urls with credentials etc, if not set it defaults to
137 ## remote_urls with credentials etc, if not set it defaults to
138 ## `beaker.session.secret`
138 ## `beaker.session.secret`
139 #rhodecode.encrypted_values.secret =
139 #rhodecode.encrypted_values.secret =
140
140
141 ## decryption strict mode (enabled by default). It controls if decryption raises
141 ## decryption strict mode (enabled by default). It controls if decryption raises
142 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
142 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
143 #rhodecode.encrypted_values.strict = false
143 #rhodecode.encrypted_values.strict = false
144
144
145 ## return gzipped responses from Rhodecode (static files/application)
145 ## return gzipped responses from Rhodecode (static files/application)
146 gzip_responses = false
146 gzip_responses = false
147
147
148 ## autogenerate javascript routes file on startup
148 ## autogenerate javascript routes file on startup
149 generate_js_files = false
149 generate_js_files = false
150
150
151 ## Optional Languages
151 ## Optional Languages
152 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
152 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
153 lang = en
153 lang = en
154
154
155 ## perform a full repository scan on each server start, this should be
155 ## perform a full repository scan on each server start, this should be
156 ## set to false after first startup, to allow faster server restarts.
156 ## set to false after first startup, to allow faster server restarts.
157 startup.import_repos = false
157 startup.import_repos = false
158
158
159 ## Uncomment and set this path to use archive download cache.
159 ## Uncomment and set this path to use archive download cache.
160 ## Once enabled, generated archives will be cached at this location
160 ## Once enabled, generated archives will be cached at this location
161 ## and served from the cache during subsequent requests for the same archive of
161 ## and served from the cache during subsequent requests for the same archive of
162 ## the repository.
162 ## the repository.
163 #archive_cache_dir = /tmp/tarballcache
163 #archive_cache_dir = /tmp/tarballcache
164
164
165 ## URL at which the application is running. This is used for bootstraping
165 ## URL at which the application is running. This is used for bootstraping
166 ## requests in context when no web request is available. Used in ishell, or
166 ## requests in context when no web request is available. Used in ishell, or
167 ## SSH calls. Set this for events to receive proper url for SSH calls.
167 ## SSH calls. Set this for events to receive proper url for SSH calls.
168 app.base_url = http://rhodecode.local
168 app.base_url = http://rhodecode.local
169
169
170 ## change this to unique ID for security
170 ## change this to unique ID for security
171 app_instance_uuid = rc-production
171 app_instance_uuid = rc-production
172
172
173 ## cut off limit for large diffs (size in bytes). If overall diff size on
173 ## cut off limit for large diffs (size in bytes). If overall diff size on
174 ## commit, or pull request exceeds this limit this diff will be displayed
174 ## commit, or pull request exceeds this limit this diff will be displayed
175 ## partially. E.g 512000 == 512Kb
175 ## partially. E.g 512000 == 512Kb
176 cut_off_limit_diff = 512000
176 cut_off_limit_diff = 512000
177
177
178 ## cut off limit for large files inside diffs (size in bytes). Each individual
178 ## cut off limit for large files inside diffs (size in bytes). Each individual
179 ## file inside diff which exceeds this limit will be displayed partially.
179 ## file inside diff which exceeds this limit will be displayed partially.
180 ## E.g 128000 == 128Kb
180 ## E.g 128000 == 128Kb
181 cut_off_limit_file = 128000
181 cut_off_limit_file = 128000
182
182
183 ## use cache version of scm repo everywhere
183 ## use cache version of scm repo everywhere
184 vcs_full_cache = true
184 vcs_full_cache = true
185
185
186 ## force https in RhodeCode, fixes https redirects, assumes it's always https
186 ## force https in RhodeCode, fixes https redirects, assumes it's always https
187 ## Normally this is controlled by proper http flags sent from http server
187 ## Normally this is controlled by proper http flags sent from http server
188 force_https = false
188 force_https = false
189
189
190 ## use Strict-Transport-Security headers
190 ## use Strict-Transport-Security headers
191 use_htsts = false
191 use_htsts = false
192
192
193 ## git rev filter option, --all is the default filter, if you need to
193 ## git rev filter option, --all is the default filter, if you need to
194 ## hide all refs in changelog switch this to --branches --tags
194 ## hide all refs in changelog switch this to --branches --tags
195 git_rev_filter = --branches --tags
195 git_rev_filter = --branches --tags
196
196
197 # Set to true if your repos are exposed using the dumb protocol
197 # Set to true if your repos are exposed using the dumb protocol
198 git_update_server_info = false
198 git_update_server_info = false
199
199
200 ## RSS/ATOM feed options
200 ## RSS/ATOM feed options
201 rss_cut_off_limit = 256000
201 rss_cut_off_limit = 256000
202 rss_items_per_page = 10
202 rss_items_per_page = 10
203 rss_include_diff = false
203 rss_include_diff = false
204
204
205 ## gist URL alias, used to create nicer urls for gist. This should be an
205 ## gist URL alias, used to create nicer urls for gist. This should be an
206 ## url that does rewrites to _admin/gists/{gistid}.
206 ## url that does rewrites to _admin/gists/{gistid}.
207 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
207 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
208 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
208 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
209 gist_alias_url =
209 gist_alias_url =
210
210
211 ## List of views (using glob pattern syntax) that AUTH TOKENS could be
211 ## List of views (using glob pattern syntax) that AUTH TOKENS could be
212 ## used for access.
212 ## used for access.
213 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
213 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
214 ## came from the the logged in user who own this authentication token.
214 ## came from the the logged in user who own this authentication token.
215 ## Additionally @TOKEN syntaxt can be used to bound the view to specific
215 ## Additionally @TOKEN syntaxt can be used to bound the view to specific
216 ## authentication token. Such view would be only accessible when used together
216 ## authentication token. Such view would be only accessible when used together
217 ## with this authentication token
217 ## with this authentication token
218 ##
218 ##
219 ## list of all views can be found under `/_admin/permissions/auth_token_access`
219 ## list of all views can be found under `/_admin/permissions/auth_token_access`
220 ## The list should be "," separated and on a single line.
220 ## The list should be "," separated and on a single line.
221 ##
221 ##
222 ## Most common views to enable:
222 ## Most common views to enable:
223 # RepoCommitsView:repo_commit_download
223 # RepoCommitsView:repo_commit_download
224 # RepoCommitsView:repo_commit_patch
224 # RepoCommitsView:repo_commit_patch
225 # RepoCommitsView:repo_commit_raw
225 # RepoCommitsView:repo_commit_raw
226 # RepoCommitsView:repo_commit_raw@TOKEN
226 # RepoCommitsView:repo_commit_raw@TOKEN
227 # RepoFilesView:repo_files_diff
227 # RepoFilesView:repo_files_diff
228 # RepoFilesView:repo_archivefile
228 # RepoFilesView:repo_archivefile
229 # RepoFilesView:repo_file_raw
229 # RepoFilesView:repo_file_raw
230 # GistView:*
230 # GistView:*
231 api_access_controllers_whitelist =
231 api_access_controllers_whitelist =
232
232
233 ## default encoding used to convert from and to unicode
233 ## default encoding used to convert from and to unicode
234 ## can be also a comma separated list of encoding in case of mixed encodings
234 ## can be also a comma separated list of encoding in case of mixed encodings
235 default_encoding = UTF-8
235 default_encoding = UTF-8
236
236
237 ## instance-id prefix
237 ## instance-id prefix
238 ## a prefix key for this instance used for cache invalidation when running
238 ## a prefix key for this instance used for cache invalidation when running
239 ## multiple instances of rhodecode, make sure it's globally unique for
239 ## multiple instances of rhodecode, make sure it's globally unique for
240 ## all running rhodecode instances. Leave empty if you don't use it
240 ## all running rhodecode instances. Leave empty if you don't use it
241 instance_id =
241 instance_id =
242
242
243 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
243 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
244 ## of an authentication plugin also if it is disabled by it's settings.
244 ## of an authentication plugin also if it is disabled by it's settings.
245 ## This could be useful if you are unable to log in to the system due to broken
245 ## This could be useful if you are unable to log in to the system due to broken
246 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
246 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
247 ## module to log in again and fix the settings.
247 ## module to log in again and fix the settings.
248 ##
248 ##
249 ## Available builtin plugin IDs (hash is part of the ID):
249 ## Available builtin plugin IDs (hash is part of the ID):
250 ## egg:rhodecode-enterprise-ce#rhodecode
250 ## egg:rhodecode-enterprise-ce#rhodecode
251 ## egg:rhodecode-enterprise-ce#pam
251 ## egg:rhodecode-enterprise-ce#pam
252 ## egg:rhodecode-enterprise-ce#ldap
252 ## egg:rhodecode-enterprise-ce#ldap
253 ## egg:rhodecode-enterprise-ce#jasig_cas
253 ## egg:rhodecode-enterprise-ce#jasig_cas
254 ## egg:rhodecode-enterprise-ce#headers
254 ## egg:rhodecode-enterprise-ce#headers
255 ## egg:rhodecode-enterprise-ce#crowd
255 ## egg:rhodecode-enterprise-ce#crowd
256 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
256 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
257
257
258 ## alternative return HTTP header for failed authentication. Default HTTP
258 ## alternative return HTTP header for failed authentication. Default HTTP
259 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
259 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
260 ## handling that causing a series of failed authentication calls.
260 ## handling that causing a series of failed authentication calls.
261 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
261 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
262 ## This will be served instead of default 401 on bad authnetication
262 ## This will be served instead of default 401 on bad authnetication
263 auth_ret_code =
263 auth_ret_code =
264
264
265 ## use special detection method when serving auth_ret_code, instead of serving
265 ## use special detection method when serving auth_ret_code, instead of serving
266 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
266 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
267 ## and then serve auth_ret_code to clients
267 ## and then serve auth_ret_code to clients
268 auth_ret_code_detection = false
268 auth_ret_code_detection = false
269
269
270 ## locking return code. When repository is locked return this HTTP code. 2XX
270 ## locking return code. When repository is locked return this HTTP code. 2XX
271 ## codes don't break the transactions while 4XX codes do
271 ## codes don't break the transactions while 4XX codes do
272 lock_ret_code = 423
272 lock_ret_code = 423
273
273
274 ## allows to change the repository location in settings page
274 ## allows to change the repository location in settings page
275 allow_repo_location_change = true
275 allow_repo_location_change = true
276
276
277 ## allows to setup custom hooks in settings page
277 ## allows to setup custom hooks in settings page
278 allow_custom_hooks_settings = true
278 allow_custom_hooks_settings = true
279
279
280 ## generated license token, goto license page in RhodeCode settings to obtain
280 ## generated license token, goto license page in RhodeCode settings to obtain
281 ## new token
281 ## new token
282 license_token =
282 license_token =
283
283
284 ## supervisor connection uri, for managing supervisor and logs.
284 ## supervisor connection uri, for managing supervisor and logs.
285 supervisor.uri =
285 supervisor.uri =
286 ## supervisord group name/id we only want this RC instance to handle
286 ## supervisord group name/id we only want this RC instance to handle
287 supervisor.group_id = dev
287 supervisor.group_id = dev
288
288
289 ## Display extended labs settings
289 ## Display extended labs settings
290 labs_settings_active = true
290 labs_settings_active = true
291
291
292 ####################################
292 ####################################
293 ### CELERY CONFIG ####
293 ### CELERY CONFIG ####
294 ####################################
294 ####################################
295 ## run: /path/to/celery worker \
295 ## run: /path/to/celery worker \
296 ## -E --beat --app rhodecode.lib.celerylib.loader \
296 ## -E --beat --app rhodecode.lib.celerylib.loader \
297 ## --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler \
297 ## --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler \
298 ## --loglevel DEBUG --ini /path/to/rhodecode.ini
298 ## --loglevel DEBUG --ini /path/to/rhodecode.ini
299
299
300 use_celery = false
300 use_celery = false
301
301
302 ## connection url to the message broker (default rabbitmq)
302 ## connection url to the message broker (default rabbitmq)
303 celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
303 celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
304
304
305 ## maximum tasks to execute before worker restart
305 ## maximum tasks to execute before worker restart
306 celery.max_tasks_per_child = 100
306 celery.max_tasks_per_child = 100
307
307
308 ## tasks will never be sent to the queue, but executed locally instead.
308 ## tasks will never be sent to the queue, but executed locally instead.
309 celery.task_always_eager = false
309 celery.task_always_eager = false
310
310
311 ####################################
311 ####################################
312 ### BEAKER CACHE ####
312 ### BEAKER CACHE ####
313 ####################################
313 ####################################
314 # default cache dir for templates. Putting this into a ramdisk
314 # default cache dir for templates. Putting this into a ramdisk
315 ## can boost performance, eg. %(here)s/data_ramdisk
315 ## can boost performance, eg. %(here)s/data_ramdisk
316 cache_dir = %(here)s/data
316 cache_dir = %(here)s/data
317
317
318 ## locking and default file storage for Beaker. Putting this into a ramdisk
318 ## locking and default file storage for Beaker. Putting this into a ramdisk
319 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
319 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
320 beaker.cache.data_dir = %(here)s/data/cache/beaker_data
320 beaker.cache.data_dir = %(here)s/data/cache/beaker_data
321 beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock
321 beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock
322
322
323 beaker.cache.regions = short_term, long_term, sql_cache_short, auth_plugins, repo_cache_long
323 beaker.cache.regions = short_term, long_term, sql_cache_short, auth_plugins, repo_cache_long
324
324
325 # used for caching user permissions
325 # used for caching user permissions
326 beaker.cache.short_term.type = file
326 beaker.cache.short_term.type = file
327 beaker.cache.short_term.expire = 0
327 beaker.cache.short_term.expire = 0
328 beaker.cache.short_term.key_length = 256
328 beaker.cache.short_term.key_length = 256
329
329
330 beaker.cache.long_term.type = memory
330 beaker.cache.long_term.type = memory
331 beaker.cache.long_term.expire = 36000
331 beaker.cache.long_term.expire = 36000
332 beaker.cache.long_term.key_length = 256
332 beaker.cache.long_term.key_length = 256
333
333
334 beaker.cache.sql_cache_short.type = memory
334 beaker.cache.sql_cache_short.type = memory
335 beaker.cache.sql_cache_short.expire = 10
335 beaker.cache.sql_cache_short.expire = 10
336 beaker.cache.sql_cache_short.key_length = 256
336 beaker.cache.sql_cache_short.key_length = 256
337
337
338 ## default is memory cache, configure only if required
338 ## default is memory cache, configure only if required
339 ## using multi-node or multi-worker setup
339 ## using multi-node or multi-worker setup
340 #beaker.cache.auth_plugins.type = ext:database
340 #beaker.cache.auth_plugins.type = ext:database
341 #beaker.cache.auth_plugins.lock_dir = %(here)s/data/cache/auth_plugin_lock
341 #beaker.cache.auth_plugins.lock_dir = %(here)s/data/cache/auth_plugin_lock
342 #beaker.cache.auth_plugins.url = postgresql://postgres:secret@localhost/rhodecode
342 #beaker.cache.auth_plugins.url = postgresql://postgres:secret@localhost/rhodecode
343 #beaker.cache.auth_plugins.url = mysql://root:secret@127.0.0.1/rhodecode
343 #beaker.cache.auth_plugins.url = mysql://root:secret@127.0.0.1/rhodecode
344 #beaker.cache.auth_plugins.sa.pool_recycle = 3600
344 #beaker.cache.auth_plugins.sa.pool_recycle = 3600
345 #beaker.cache.auth_plugins.sa.pool_size = 10
345 #beaker.cache.auth_plugins.sa.pool_size = 10
346 #beaker.cache.auth_plugins.sa.max_overflow = 0
346 #beaker.cache.auth_plugins.sa.max_overflow = 0
347
347
348 beaker.cache.repo_cache_long.type = memorylru_base
348 beaker.cache.repo_cache_long.type = memorylru_base
349 beaker.cache.repo_cache_long.max_items = 4096
349 beaker.cache.repo_cache_long.max_items = 4096
350 beaker.cache.repo_cache_long.expire = 2592000
350 beaker.cache.repo_cache_long.expire = 2592000
351
351
352 ## default is memorylru_base cache, configure only if required
352 ## default is memorylru_base cache, configure only if required
353 ## using multi-node or multi-worker setup
353 ## using multi-node or multi-worker setup
354 #beaker.cache.repo_cache_long.type = ext:memcached
354 #beaker.cache.repo_cache_long.type = ext:memcached
355 #beaker.cache.repo_cache_long.url = localhost:11211
355 #beaker.cache.repo_cache_long.url = localhost:11211
356 #beaker.cache.repo_cache_long.expire = 1209600
356 #beaker.cache.repo_cache_long.expire = 1209600
357 #beaker.cache.repo_cache_long.key_length = 256
357 #beaker.cache.repo_cache_long.key_length = 256
358
358
359 ####################################
359 ####################################
360 ### BEAKER SESSION ####
360 ### BEAKER SESSION ####
361 ####################################
361 ####################################
362
362
363 ## .session.type is type of storage options for the session, current allowed
363 ## .session.type is type of storage options for the session, current allowed
364 ## types are file, ext:memcached, ext:database, and memory (default).
364 ## types are file, ext:memcached, ext:database, and memory (default).
365 beaker.session.type = file
365 beaker.session.type = file
366 beaker.session.data_dir = %(here)s/data/sessions/data
366 beaker.session.data_dir = %(here)s/data/sessions/data
367
367
368 ## db based session, fast, and allows easy management over logged in users
368 ## db based session, fast, and allows easy management over logged in users
369 #beaker.session.type = ext:database
369 #beaker.session.type = ext:database
370 #beaker.session.table_name = db_session
370 #beaker.session.table_name = db_session
371 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
371 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
372 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
372 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
373 #beaker.session.sa.pool_recycle = 3600
373 #beaker.session.sa.pool_recycle = 3600
374 #beaker.session.sa.echo = false
374 #beaker.session.sa.echo = false
375
375
376 beaker.session.key = rhodecode
376 beaker.session.key = rhodecode
377 beaker.session.secret = develop-rc-uytcxaz
377 beaker.session.secret = develop-rc-uytcxaz
378 beaker.session.lock_dir = %(here)s/data/sessions/lock
378 beaker.session.lock_dir = %(here)s/data/sessions/lock
379
379
380 ## Secure encrypted cookie. Requires AES and AES python libraries
380 ## Secure encrypted cookie. Requires AES and AES python libraries
381 ## you must disable beaker.session.secret to use this
381 ## you must disable beaker.session.secret to use this
382 #beaker.session.encrypt_key = key_for_encryption
382 #beaker.session.encrypt_key = key_for_encryption
383 #beaker.session.validate_key = validation_key
383 #beaker.session.validate_key = validation_key
384
384
385 ## sets session as invalid(also logging out user) if it haven not been
385 ## sets session as invalid(also logging out user) if it haven not been
386 ## accessed for given amount of time in seconds
386 ## accessed for given amount of time in seconds
387 beaker.session.timeout = 2592000
387 beaker.session.timeout = 2592000
388 beaker.session.httponly = true
388 beaker.session.httponly = true
389 ## Path to use for the cookie. Set to prefix if you use prefix middleware
389 ## Path to use for the cookie. Set to prefix if you use prefix middleware
390 #beaker.session.cookie_path = /custom_prefix
390 #beaker.session.cookie_path = /custom_prefix
391
391
392 ## uncomment for https secure cookie
392 ## uncomment for https secure cookie
393 beaker.session.secure = false
393 beaker.session.secure = false
394
394
395 ## auto save the session to not to use .save()
395 ## auto save the session to not to use .save()
396 beaker.session.auto = false
396 beaker.session.auto = false
397
397
398 ## default cookie expiration time in seconds, set to `true` to set expire
398 ## default cookie expiration time in seconds, set to `true` to set expire
399 ## at browser close
399 ## at browser close
400 #beaker.session.cookie_expires = 3600
400 #beaker.session.cookie_expires = 3600
401
401
402 ###################################
402 ###################################
403 ## SEARCH INDEXING CONFIGURATION ##
403 ## SEARCH INDEXING CONFIGURATION ##
404 ###################################
404 ###################################
405 ## Full text search indexer is available in rhodecode-tools under
405 ## Full text search indexer is available in rhodecode-tools under
406 ## `rhodecode-tools index` command
406 ## `rhodecode-tools index` command
407
407
408 ## WHOOSH Backend, doesn't require additional services to run
408 ## WHOOSH Backend, doesn't require additional services to run
409 ## it works good with few dozen repos
409 ## it works good with few dozen repos
410 search.module = rhodecode.lib.index.whoosh
410 search.module = rhodecode.lib.index.whoosh
411 search.location = %(here)s/data/index
411 search.location = %(here)s/data/index
412
412
413 ########################################
413 ########################################
414 ### CHANNELSTREAM CONFIG ####
414 ### CHANNELSTREAM CONFIG ####
415 ########################################
415 ########################################
416 ## channelstream enables persistent connections and live notification
416 ## channelstream enables persistent connections and live notification
417 ## in the system. It's also used by the chat system
417 ## in the system. It's also used by the chat system
418 channelstream.enabled = false
418 channelstream.enabled = false
419
419
420 ## server address for channelstream server on the backend
420 ## server address for channelstream server on the backend
421 channelstream.server = 127.0.0.1:9800
421 channelstream.server = 127.0.0.1:9800
422
422
423 ## location of the channelstream server from outside world
423 ## location of the channelstream server from outside world
424 ## use ws:// for http or wss:// for https. This address needs to be handled
424 ## use ws:// for http or wss:// for https. This address needs to be handled
425 ## by external HTTP server such as Nginx or Apache
425 ## by external HTTP server such as Nginx or Apache
426 ## see nginx/apache configuration examples in our docs
426 ## see nginx/apache configuration examples in our docs
427 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
427 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
428 channelstream.secret = secret
428 channelstream.secret = secret
429 channelstream.history.location = %(here)s/channelstream_history
429 channelstream.history.location = %(here)s/channelstream_history
430
430
431 ## Internal application path that Javascript uses to connect into.
431 ## Internal application path that Javascript uses to connect into.
432 ## If you use proxy-prefix the prefix should be added before /_channelstream
432 ## If you use proxy-prefix the prefix should be added before /_channelstream
433 channelstream.proxy_path = /_channelstream
433 channelstream.proxy_path = /_channelstream
434
434
435
435
436 ###################################
436 ###################################
437 ## APPENLIGHT CONFIG ##
437 ## APPENLIGHT CONFIG ##
438 ###################################
438 ###################################
439
439
440 ## Appenlight is tailored to work with RhodeCode, see
440 ## Appenlight is tailored to work with RhodeCode, see
441 ## http://appenlight.com for details how to obtain an account
441 ## http://appenlight.com for details how to obtain an account
442
442
443 ## appenlight integration enabled
443 ## appenlight integration enabled
444 appenlight = false
444 appenlight = false
445
445
446 appenlight.server_url = https://api.appenlight.com
446 appenlight.server_url = https://api.appenlight.com
447 appenlight.api_key = YOUR_API_KEY
447 appenlight.api_key = YOUR_API_KEY
448 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
448 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
449
449
450 # used for JS client
450 # used for JS client
451 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
451 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
452
452
453 ## TWEAK AMOUNT OF INFO SENT HERE
453 ## TWEAK AMOUNT OF INFO SENT HERE
454
454
455 ## enables 404 error logging (default False)
455 ## enables 404 error logging (default False)
456 appenlight.report_404 = false
456 appenlight.report_404 = false
457
457
458 ## time in seconds after request is considered being slow (default 1)
458 ## time in seconds after request is considered being slow (default 1)
459 appenlight.slow_request_time = 1
459 appenlight.slow_request_time = 1
460
460
461 ## record slow requests in application
461 ## record slow requests in application
462 ## (needs to be enabled for slow datastore recording and time tracking)
462 ## (needs to be enabled for slow datastore recording and time tracking)
463 appenlight.slow_requests = true
463 appenlight.slow_requests = true
464
464
465 ## enable hooking to application loggers
465 ## enable hooking to application loggers
466 appenlight.logging = true
466 appenlight.logging = true
467
467
468 ## minimum log level for log capture
468 ## minimum log level for log capture
469 appenlight.logging.level = WARNING
469 appenlight.logging.level = WARNING
470
470
471 ## send logs only from erroneous/slow requests
471 ## send logs only from erroneous/slow requests
472 ## (saves API quota for intensive logging)
472 ## (saves API quota for intensive logging)
473 appenlight.logging_on_error = false
473 appenlight.logging_on_error = false
474
474
475 ## list of additonal keywords that should be grabbed from environ object
475 ## list of additonal keywords that should be grabbed from environ object
476 ## can be string with comma separated list of words in lowercase
476 ## can be string with comma separated list of words in lowercase
477 ## (by default client will always send following info:
477 ## (by default client will always send following info:
478 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
478 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
479 ## start with HTTP* this list be extended with additional keywords here
479 ## start with HTTP* this list be extended with additional keywords here
480 appenlight.environ_keys_whitelist =
480 appenlight.environ_keys_whitelist =
481
481
482 ## list of keywords that should be blanked from request object
482 ## list of keywords that should be blanked from request object
483 ## can be string with comma separated list of words in lowercase
483 ## can be string with comma separated list of words in lowercase
484 ## (by default client will always blank keys that contain following words
484 ## (by default client will always blank keys that contain following words
485 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
485 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
486 ## this list be extended with additional keywords set here
486 ## this list be extended with additional keywords set here
487 appenlight.request_keys_blacklist =
487 appenlight.request_keys_blacklist =
488
488
489 ## list of namespaces that should be ignores when gathering log entries
489 ## list of namespaces that should be ignores when gathering log entries
490 ## can be string with comma separated list of namespaces
490 ## can be string with comma separated list of namespaces
491 ## (by default the client ignores own entries: appenlight_client.client)
491 ## (by default the client ignores own entries: appenlight_client.client)
492 appenlight.log_namespace_blacklist =
492 appenlight.log_namespace_blacklist =
493
493
494
494
495 ################################################################################
495 ################################################################################
496 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
496 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
497 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
497 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
498 ## execute malicious code after an exception is raised. ##
498 ## execute malicious code after an exception is raised. ##
499 ################################################################################
499 ################################################################################
500 #set debug = false
500 #set debug = false
501
501
502
502
503 ##############
503 ##############
504 ## STYLING ##
504 ## STYLING ##
505 ##############
505 ##############
506 debug_style = true
506 debug_style = true
507
507
508 ###########################################
508 ###########################################
509 ### MAIN RHODECODE DATABASE CONFIG ###
509 ### MAIN RHODECODE DATABASE CONFIG ###
510 ###########################################
510 ###########################################
511 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
511 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
512 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
512 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
513 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode
513 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode
514 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
514 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
515
515
516 # see sqlalchemy docs for other advanced settings
516 # see sqlalchemy docs for other advanced settings
517
517
518 ## print the sql statements to output
518 ## print the sql statements to output
519 sqlalchemy.db1.echo = false
519 sqlalchemy.db1.echo = false
520 ## recycle the connections after this amount of seconds
520 ## recycle the connections after this amount of seconds
521 sqlalchemy.db1.pool_recycle = 3600
521 sqlalchemy.db1.pool_recycle = 3600
522 sqlalchemy.db1.convert_unicode = true
522 sqlalchemy.db1.convert_unicode = true
523
523
524 ## the number of connections to keep open inside the connection pool.
524 ## the number of connections to keep open inside the connection pool.
525 ## 0 indicates no limit
525 ## 0 indicates no limit
526 #sqlalchemy.db1.pool_size = 5
526 #sqlalchemy.db1.pool_size = 5
527
527
528 ## the number of connections to allow in connection pool "overflow", that is
528 ## the number of connections to allow in connection pool "overflow", that is
529 ## connections that can be opened above and beyond the pool_size setting,
529 ## connections that can be opened above and beyond the pool_size setting,
530 ## which defaults to five.
530 ## which defaults to five.
531 #sqlalchemy.db1.max_overflow = 10
531 #sqlalchemy.db1.max_overflow = 10
532
532
533
533
534 ##################
534 ##################
535 ### VCS CONFIG ###
535 ### VCS CONFIG ###
536 ##################
536 ##################
537 vcs.server.enable = true
537 vcs.server.enable = true
538 vcs.server = localhost:9900
538 vcs.server = localhost:9900
539
539
540 ## Web server connectivity protocol, responsible for web based VCS operatations
540 ## Web server connectivity protocol, responsible for web based VCS operatations
541 ## Available protocols are:
541 ## Available protocols are:
542 ## `http` - use http-rpc backend (default)
542 ## `http` - use http-rpc backend (default)
543 vcs.server.protocol = http
543 vcs.server.protocol = http
544
544
545 ## Push/Pull operations protocol, available options are:
545 ## Push/Pull operations protocol, available options are:
546 ## `http` - use http-rpc backend (default)
546 ## `http` - use http-rpc backend (default)
547 ##
547 ##
548 vcs.scm_app_implementation = http
548 vcs.scm_app_implementation = http
549
549
550 ## Push/Pull operations hooks protocol, available options are:
550 ## Push/Pull operations hooks protocol, available options are:
551 ## `http` - use http-rpc backend (default)
551 ## `http` - use http-rpc backend (default)
552 vcs.hooks.protocol = http
552 vcs.hooks.protocol = http
553
553
554 ## Host on which this instance is listening for hooks. If vcsserver is in other location
555 ## this should be adjusted.
556 vcs.hooks.host = 127.0.0.1
557
554 vcs.server.log_level = debug
558 vcs.server.log_level = debug
555 ## Start VCSServer with this instance as a subprocess, usefull for development
559 ## Start VCSServer with this instance as a subprocess, usefull for development
556 vcs.start_server = false
560 vcs.start_server = false
557
561
558 ## List of enabled VCS backends, available options are:
562 ## List of enabled VCS backends, available options are:
559 ## `hg` - mercurial
563 ## `hg` - mercurial
560 ## `git` - git
564 ## `git` - git
561 ## `svn` - subversion
565 ## `svn` - subversion
562 vcs.backends = hg, git, svn
566 vcs.backends = hg, git, svn
563
567
564 vcs.connection_timeout = 3600
568 vcs.connection_timeout = 3600
565 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
569 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
566 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
570 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
567 #vcs.svn.compatible_version = pre-1.8-compatible
571 #vcs.svn.compatible_version = pre-1.8-compatible
568
572
569
573
570 ############################################################
574 ############################################################
571 ### Subversion proxy support (mod_dav_svn) ###
575 ### Subversion proxy support (mod_dav_svn) ###
572 ### Maps RhodeCode repo groups into SVN paths for Apache ###
576 ### Maps RhodeCode repo groups into SVN paths for Apache ###
573 ############################################################
577 ############################################################
574 ## Enable or disable the config file generation.
578 ## Enable or disable the config file generation.
575 svn.proxy.generate_config = false
579 svn.proxy.generate_config = false
576 ## Generate config file with `SVNListParentPath` set to `On`.
580 ## Generate config file with `SVNListParentPath` set to `On`.
577 svn.proxy.list_parent_path = true
581 svn.proxy.list_parent_path = true
578 ## Set location and file name of generated config file.
582 ## Set location and file name of generated config file.
579 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
583 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
580 ## alternative mod_dav config template. This needs to be a mako template
584 ## alternative mod_dav config template. This needs to be a mako template
581 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
585 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
582 ## Used as a prefix to the `Location` block in the generated config file.
586 ## Used as a prefix to the `Location` block in the generated config file.
583 ## In most cases it should be set to `/`.
587 ## In most cases it should be set to `/`.
584 svn.proxy.location_root = /
588 svn.proxy.location_root = /
585 ## Command to reload the mod dav svn configuration on change.
589 ## Command to reload the mod dav svn configuration on change.
586 ## Example: `/etc/init.d/apache2 reload`
590 ## Example: `/etc/init.d/apache2 reload`
587 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
591 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
588 ## If the timeout expires before the reload command finishes, the command will
592 ## If the timeout expires before the reload command finishes, the command will
589 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
593 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
590 #svn.proxy.reload_timeout = 10
594 #svn.proxy.reload_timeout = 10
591
595
592 ############################################################
596 ############################################################
593 ### SSH Support Settings ###
597 ### SSH Support Settings ###
594 ############################################################
598 ############################################################
595
599
596 ## Defines if a custom authorized_keys file should be created and written on
600 ## Defines if a custom authorized_keys file should be created and written on
597 ## any change user ssh keys. Setting this to false also disables posibility
601 ## any change user ssh keys. Setting this to false also disables posibility
598 ## of adding SSH keys by users from web interface. Super admins can still
602 ## of adding SSH keys by users from web interface. Super admins can still
599 ## manage SSH Keys.
603 ## manage SSH Keys.
600 ssh.generate_authorized_keyfile = false
604 ssh.generate_authorized_keyfile = false
601
605
602 ## Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
606 ## Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
603 # ssh.authorized_keys_ssh_opts =
607 # ssh.authorized_keys_ssh_opts =
604
608
605 ## Path to the authrozied_keys file where the generate entries are placed.
609 ## Path to the authrozied_keys file where the generate entries are placed.
606 ## It is possible to have multiple key files specified in `sshd_config` e.g.
610 ## It is possible to have multiple key files specified in `sshd_config` e.g.
607 ## AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
611 ## AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
608 ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode
612 ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode
609
613
610 ## Command to execute the SSH wrapper. The binary is available in the
614 ## Command to execute the SSH wrapper. The binary is available in the
611 ## rhodecode installation directory.
615 ## rhodecode installation directory.
612 ## e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
616 ## e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
613 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
617 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
614
618
615 ## Allow shell when executing the ssh-wrapper command
619 ## Allow shell when executing the ssh-wrapper command
616 ssh.wrapper_cmd_allow_shell = false
620 ssh.wrapper_cmd_allow_shell = false
617
621
618 ## Enables logging, and detailed output send back to the client during SSH
622 ## Enables logging, and detailed output send back to the client during SSH
619 ## operations. Usefull for debugging, shouldn't be used in production.
623 ## operations. Usefull for debugging, shouldn't be used in production.
620 ssh.enable_debug_logging = true
624 ssh.enable_debug_logging = true
621
625
622 ## Paths to binary executable, by default they are the names, but we can
626 ## Paths to binary executable, by default they are the names, but we can
623 ## override them if we want to use a custom one
627 ## override them if we want to use a custom one
624 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
628 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
625 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
629 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
626 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
630 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
627
631
628
632
629 ## Dummy marker to add new entries after.
633 ## Dummy marker to add new entries after.
630 ## Add any custom entries below. Please don't remove.
634 ## Add any custom entries below. Please don't remove.
631 custom.conf = 1
635 custom.conf = 1
632
636
633
637
634 ################################
638 ################################
635 ### LOGGING CONFIGURATION ####
639 ### LOGGING CONFIGURATION ####
636 ################################
640 ################################
637 [loggers]
641 [loggers]
638 keys = root, sqlalchemy, beaker, rhodecode, ssh_wrapper, celery
642 keys = root, sqlalchemy, beaker, rhodecode, ssh_wrapper, celery
639
643
640 [handlers]
644 [handlers]
641 keys = console, console_sql
645 keys = console, console_sql
642
646
643 [formatters]
647 [formatters]
644 keys = generic, color_formatter, color_formatter_sql
648 keys = generic, color_formatter, color_formatter_sql
645
649
646 #############
650 #############
647 ## LOGGERS ##
651 ## LOGGERS ##
648 #############
652 #############
649 [logger_root]
653 [logger_root]
650 level = NOTSET
654 level = NOTSET
651 handlers = console
655 handlers = console
652
656
653 [logger_sqlalchemy]
657 [logger_sqlalchemy]
654 level = INFO
658 level = INFO
655 handlers = console_sql
659 handlers = console_sql
656 qualname = sqlalchemy.engine
660 qualname = sqlalchemy.engine
657 propagate = 0
661 propagate = 0
658
662
659 [logger_beaker]
663 [logger_beaker]
660 level = DEBUG
664 level = DEBUG
661 handlers =
665 handlers =
662 qualname = beaker.container
666 qualname = beaker.container
663 propagate = 1
667 propagate = 1
664
668
665 [logger_rhodecode]
669 [logger_rhodecode]
666 level = DEBUG
670 level = DEBUG
667 handlers =
671 handlers =
668 qualname = rhodecode
672 qualname = rhodecode
669 propagate = 1
673 propagate = 1
670
674
671 [logger_ssh_wrapper]
675 [logger_ssh_wrapper]
672 level = DEBUG
676 level = DEBUG
673 handlers =
677 handlers =
674 qualname = ssh_wrapper
678 qualname = ssh_wrapper
675 propagate = 1
679 propagate = 1
676
680
677 [logger_celery]
681 [logger_celery]
678 level = DEBUG
682 level = DEBUG
679 handlers =
683 handlers =
680 qualname = celery
684 qualname = celery
681
685
682
686
683 ##############
687 ##############
684 ## HANDLERS ##
688 ## HANDLERS ##
685 ##############
689 ##############
686
690
687 [handler_console]
691 [handler_console]
688 class = StreamHandler
692 class = StreamHandler
689 args = (sys.stderr, )
693 args = (sys.stderr, )
690 level = DEBUG
694 level = DEBUG
691 formatter = color_formatter
695 formatter = color_formatter
692
696
693 [handler_console_sql]
697 [handler_console_sql]
694 class = StreamHandler
698 class = StreamHandler
695 args = (sys.stderr, )
699 args = (sys.stderr, )
696 level = DEBUG
700 level = DEBUG
697 formatter = color_formatter_sql
701 formatter = color_formatter_sql
698
702
699 ################
703 ################
700 ## FORMATTERS ##
704 ## FORMATTERS ##
701 ################
705 ################
702
706
703 [formatter_generic]
707 [formatter_generic]
704 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
708 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
705 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
709 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
706 datefmt = %Y-%m-%d %H:%M:%S
710 datefmt = %Y-%m-%d %H:%M:%S
707
711
708 [formatter_color_formatter]
712 [formatter_color_formatter]
709 class = rhodecode.lib.logging_formatter.ColorFormatter
713 class = rhodecode.lib.logging_formatter.ColorFormatter
710 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
714 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
711 datefmt = %Y-%m-%d %H:%M:%S
715 datefmt = %Y-%m-%d %H:%M:%S
712
716
713 [formatter_color_formatter_sql]
717 [formatter_color_formatter_sql]
714 class = rhodecode.lib.logging_formatter.ColorFormatterSql
718 class = rhodecode.lib.logging_formatter.ColorFormatterSql
715 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
719 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
716 datefmt = %Y-%m-%d %H:%M:%S
720 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,686 +1,689 b''
1
1
2
2
3 ################################################################################
3 ################################################################################
4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
5 # The %(here)s variable will be replaced with the parent directory of this file#
5 # The %(here)s variable will be replaced with the parent directory of this file#
6 ################################################################################
6 ################################################################################
7
7
8 [DEFAULT]
8 [DEFAULT]
9 debug = true
9 debug = true
10
10
11 ################################################################################
11 ################################################################################
12 ## EMAIL CONFIGURATION ##
12 ## EMAIL CONFIGURATION ##
13 ## Uncomment and replace with the email address which should receive ##
13 ## Uncomment and replace with the email address which should receive ##
14 ## any error reports after an application crash ##
14 ## any error reports after an application crash ##
15 ## Additionally these settings will be used by the RhodeCode mailing system ##
15 ## Additionally these settings will be used by the RhodeCode mailing system ##
16 ################################################################################
16 ################################################################################
17
17
18 ## prefix all emails subjects with given prefix, helps filtering out emails
18 ## prefix all emails subjects with given prefix, helps filtering out emails
19 #email_prefix = [RhodeCode]
19 #email_prefix = [RhodeCode]
20
20
21 ## email FROM address all mails will be sent
21 ## email FROM address all mails will be sent
22 #app_email_from = rhodecode-noreply@localhost
22 #app_email_from = rhodecode-noreply@localhost
23
23
24 ## Uncomment and replace with the address which should receive any error report
24 ## Uncomment and replace with the address which should receive any error report
25 ## note: using appenlight for error handling doesn't need this to be uncommented
25 ## note: using appenlight for error handling doesn't need this to be uncommented
26 #email_to = admin@localhost
26 #email_to = admin@localhost
27
27
28 ## in case of Application errors, sent an error email form
28 ## in case of Application errors, sent an error email form
29 #error_email_from = rhodecode_error@localhost
29 #error_email_from = rhodecode_error@localhost
30
30
31 ## additional error message to be send in case of server crash
31 ## additional error message to be send in case of server crash
32 #error_message =
32 #error_message =
33
33
34
34
35 #smtp_server = mail.server.com
35 #smtp_server = mail.server.com
36 #smtp_username =
36 #smtp_username =
37 #smtp_password =
37 #smtp_password =
38 #smtp_port =
38 #smtp_port =
39 #smtp_use_tls = false
39 #smtp_use_tls = false
40 #smtp_use_ssl = true
40 #smtp_use_ssl = true
41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
42 #smtp_auth =
42 #smtp_auth =
43
43
44 [server:main]
44 [server:main]
45 ## COMMON ##
45 ## COMMON ##
46 host = 127.0.0.1
46 host = 127.0.0.1
47 port = 5000
47 port = 5000
48
48
49 ##################################
49 ##################################
50 ## WAITRESS WSGI SERVER ##
50 ## WAITRESS WSGI SERVER ##
51 ## Recommended for Development ##
51 ## Recommended for Development ##
52 ##################################
52 ##################################
53
53
54 #use = egg:waitress#main
54 #use = egg:waitress#main
55 ## number of worker threads
55 ## number of worker threads
56 #threads = 5
56 #threads = 5
57 ## MAX BODY SIZE 100GB
57 ## MAX BODY SIZE 100GB
58 #max_request_body_size = 107374182400
58 #max_request_body_size = 107374182400
59 ## Use poll instead of select, fixes file descriptors limits problems.
59 ## Use poll instead of select, fixes file descriptors limits problems.
60 ## May not work on old windows systems.
60 ## May not work on old windows systems.
61 #asyncore_use_poll = true
61 #asyncore_use_poll = true
62
62
63
63
64 ##########################
64 ##########################
65 ## GUNICORN WSGI SERVER ##
65 ## GUNICORN WSGI SERVER ##
66 ##########################
66 ##########################
67 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
67 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
68
68
69 use = egg:gunicorn#main
69 use = egg:gunicorn#main
70 ## Sets the number of process workers. You must set `instance_id = *`
70 ## Sets the number of process workers. You must set `instance_id = *`
71 ## when this option is set to more than one worker, recommended
71 ## when this option is set to more than one worker, recommended
72 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
72 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
73 ## The `instance_id = *` must be set in the [app:main] section below
73 ## The `instance_id = *` must be set in the [app:main] section below
74 workers = 2
74 workers = 2
75 ## number of threads for each of the worker, must be set to 1 for gevent
75 ## number of threads for each of the worker, must be set to 1 for gevent
76 ## generally recommended to be at 1
76 ## generally recommended to be at 1
77 #threads = 1
77 #threads = 1
78 ## process name
78 ## process name
79 proc_name = rhodecode
79 proc_name = rhodecode
80 ## type of worker class, one of sync, gevent
80 ## type of worker class, one of sync, gevent
81 ## recommended for bigger setup is using of of other than sync one
81 ## recommended for bigger setup is using of of other than sync one
82 worker_class = gevent
82 worker_class = gevent
83 ## The maximum number of simultaneous clients. Valid only for Gevent
83 ## The maximum number of simultaneous clients. Valid only for Gevent
84 #worker_connections = 10
84 #worker_connections = 10
85 ## max number of requests that worker will handle before being gracefully
85 ## max number of requests that worker will handle before being gracefully
86 ## restarted, could prevent memory leaks
86 ## restarted, could prevent memory leaks
87 max_requests = 1000
87 max_requests = 1000
88 max_requests_jitter = 30
88 max_requests_jitter = 30
89 ## amount of time a worker can spend with handling a request before it
89 ## amount of time a worker can spend with handling a request before it
90 ## gets killed and restarted. Set to 6hrs
90 ## gets killed and restarted. Set to 6hrs
91 timeout = 21600
91 timeout = 21600
92
92
93
93
94 ## prefix middleware for RhodeCode.
94 ## prefix middleware for RhodeCode.
95 ## recommended when using proxy setup.
95 ## recommended when using proxy setup.
96 ## allows to set RhodeCode under a prefix in server.
96 ## allows to set RhodeCode under a prefix in server.
97 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
97 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
98 ## And set your prefix like: `prefix = /custom_prefix`
98 ## And set your prefix like: `prefix = /custom_prefix`
99 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
99 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
100 ## to make your cookies only work on prefix url
100 ## to make your cookies only work on prefix url
101 [filter:proxy-prefix]
101 [filter:proxy-prefix]
102 use = egg:PasteDeploy#prefix
102 use = egg:PasteDeploy#prefix
103 prefix = /
103 prefix = /
104
104
105 [app:main]
105 [app:main]
106 use = egg:rhodecode-enterprise-ce
106 use = egg:rhodecode-enterprise-ce
107
107
108 ## enable proxy prefix middleware, defined above
108 ## enable proxy prefix middleware, defined above
109 #filter-with = proxy-prefix
109 #filter-with = proxy-prefix
110
110
111 ## encryption key used to encrypt social plugin tokens,
111 ## encryption key used to encrypt social plugin tokens,
112 ## remote_urls with credentials etc, if not set it defaults to
112 ## remote_urls with credentials etc, if not set it defaults to
113 ## `beaker.session.secret`
113 ## `beaker.session.secret`
114 #rhodecode.encrypted_values.secret =
114 #rhodecode.encrypted_values.secret =
115
115
116 ## decryption strict mode (enabled by default). It controls if decryption raises
116 ## decryption strict mode (enabled by default). It controls if decryption raises
117 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
117 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
118 #rhodecode.encrypted_values.strict = false
118 #rhodecode.encrypted_values.strict = false
119
119
120 ## return gzipped responses from Rhodecode (static files/application)
120 ## return gzipped responses from Rhodecode (static files/application)
121 gzip_responses = false
121 gzip_responses = false
122
122
123 ## autogenerate javascript routes file on startup
123 ## autogenerate javascript routes file on startup
124 generate_js_files = false
124 generate_js_files = false
125
125
126 ## Optional Languages
126 ## Optional Languages
127 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
127 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
128 lang = en
128 lang = en
129
129
130 ## perform a full repository scan on each server start, this should be
130 ## perform a full repository scan on each server start, this should be
131 ## set to false after first startup, to allow faster server restarts.
131 ## set to false after first startup, to allow faster server restarts.
132 startup.import_repos = false
132 startup.import_repos = false
133
133
134 ## Uncomment and set this path to use archive download cache.
134 ## Uncomment and set this path to use archive download cache.
135 ## Once enabled, generated archives will be cached at this location
135 ## Once enabled, generated archives will be cached at this location
136 ## and served from the cache during subsequent requests for the same archive of
136 ## and served from the cache during subsequent requests for the same archive of
137 ## the repository.
137 ## the repository.
138 #archive_cache_dir = /tmp/tarballcache
138 #archive_cache_dir = /tmp/tarballcache
139
139
140 ## URL at which the application is running. This is used for bootstraping
140 ## URL at which the application is running. This is used for bootstraping
141 ## requests in context when no web request is available. Used in ishell, or
141 ## requests in context when no web request is available. Used in ishell, or
142 ## SSH calls. Set this for events to receive proper url for SSH calls.
142 ## SSH calls. Set this for events to receive proper url for SSH calls.
143 app.base_url = http://rhodecode.local
143 app.base_url = http://rhodecode.local
144
144
145 ## change this to unique ID for security
145 ## change this to unique ID for security
146 app_instance_uuid = rc-production
146 app_instance_uuid = rc-production
147
147
148 ## cut off limit for large diffs (size in bytes). If overall diff size on
148 ## cut off limit for large diffs (size in bytes). If overall diff size on
149 ## commit, or pull request exceeds this limit this diff will be displayed
149 ## commit, or pull request exceeds this limit this diff will be displayed
150 ## partially. E.g 512000 == 512Kb
150 ## partially. E.g 512000 == 512Kb
151 cut_off_limit_diff = 512000
151 cut_off_limit_diff = 512000
152
152
153 ## cut off limit for large files inside diffs (size in bytes). Each individual
153 ## cut off limit for large files inside diffs (size in bytes). Each individual
154 ## file inside diff which exceeds this limit will be displayed partially.
154 ## file inside diff which exceeds this limit will be displayed partially.
155 ## E.g 128000 == 128Kb
155 ## E.g 128000 == 128Kb
156 cut_off_limit_file = 128000
156 cut_off_limit_file = 128000
157
157
158 ## use cache version of scm repo everywhere
158 ## use cache version of scm repo everywhere
159 vcs_full_cache = true
159 vcs_full_cache = true
160
160
161 ## force https in RhodeCode, fixes https redirects, assumes it's always https
161 ## force https in RhodeCode, fixes https redirects, assumes it's always https
162 ## Normally this is controlled by proper http flags sent from http server
162 ## Normally this is controlled by proper http flags sent from http server
163 force_https = false
163 force_https = false
164
164
165 ## use Strict-Transport-Security headers
165 ## use Strict-Transport-Security headers
166 use_htsts = false
166 use_htsts = false
167
167
168 ## git rev filter option, --all is the default filter, if you need to
168 ## git rev filter option, --all is the default filter, if you need to
169 ## hide all refs in changelog switch this to --branches --tags
169 ## hide all refs in changelog switch this to --branches --tags
170 git_rev_filter = --branches --tags
170 git_rev_filter = --branches --tags
171
171
172 # Set to true if your repos are exposed using the dumb protocol
172 # Set to true if your repos are exposed using the dumb protocol
173 git_update_server_info = false
173 git_update_server_info = false
174
174
175 ## RSS/ATOM feed options
175 ## RSS/ATOM feed options
176 rss_cut_off_limit = 256000
176 rss_cut_off_limit = 256000
177 rss_items_per_page = 10
177 rss_items_per_page = 10
178 rss_include_diff = false
178 rss_include_diff = false
179
179
180 ## gist URL alias, used to create nicer urls for gist. This should be an
180 ## gist URL alias, used to create nicer urls for gist. This should be an
181 ## url that does rewrites to _admin/gists/{gistid}.
181 ## url that does rewrites to _admin/gists/{gistid}.
182 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
182 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
183 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
183 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
184 gist_alias_url =
184 gist_alias_url =
185
185
186 ## List of views (using glob pattern syntax) that AUTH TOKENS could be
186 ## List of views (using glob pattern syntax) that AUTH TOKENS could be
187 ## used for access.
187 ## used for access.
188 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
188 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
189 ## came from the the logged in user who own this authentication token.
189 ## came from the the logged in user who own this authentication token.
190 ## Additionally @TOKEN syntaxt can be used to bound the view to specific
190 ## Additionally @TOKEN syntaxt can be used to bound the view to specific
191 ## authentication token. Such view would be only accessible when used together
191 ## authentication token. Such view would be only accessible when used together
192 ## with this authentication token
192 ## with this authentication token
193 ##
193 ##
194 ## list of all views can be found under `/_admin/permissions/auth_token_access`
194 ## list of all views can be found under `/_admin/permissions/auth_token_access`
195 ## The list should be "," separated and on a single line.
195 ## The list should be "," separated and on a single line.
196 ##
196 ##
197 ## Most common views to enable:
197 ## Most common views to enable:
198 # RepoCommitsView:repo_commit_download
198 # RepoCommitsView:repo_commit_download
199 # RepoCommitsView:repo_commit_patch
199 # RepoCommitsView:repo_commit_patch
200 # RepoCommitsView:repo_commit_raw
200 # RepoCommitsView:repo_commit_raw
201 # RepoCommitsView:repo_commit_raw@TOKEN
201 # RepoCommitsView:repo_commit_raw@TOKEN
202 # RepoFilesView:repo_files_diff
202 # RepoFilesView:repo_files_diff
203 # RepoFilesView:repo_archivefile
203 # RepoFilesView:repo_archivefile
204 # RepoFilesView:repo_file_raw
204 # RepoFilesView:repo_file_raw
205 # GistView:*
205 # GistView:*
206 api_access_controllers_whitelist =
206 api_access_controllers_whitelist =
207
207
208 ## default encoding used to convert from and to unicode
208 ## default encoding used to convert from and to unicode
209 ## can be also a comma separated list of encoding in case of mixed encodings
209 ## can be also a comma separated list of encoding in case of mixed encodings
210 default_encoding = UTF-8
210 default_encoding = UTF-8
211
211
212 ## instance-id prefix
212 ## instance-id prefix
213 ## a prefix key for this instance used for cache invalidation when running
213 ## a prefix key for this instance used for cache invalidation when running
214 ## multiple instances of rhodecode, make sure it's globally unique for
214 ## multiple instances of rhodecode, make sure it's globally unique for
215 ## all running rhodecode instances. Leave empty if you don't use it
215 ## all running rhodecode instances. Leave empty if you don't use it
216 instance_id =
216 instance_id =
217
217
218 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
218 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
219 ## of an authentication plugin also if it is disabled by it's settings.
219 ## of an authentication plugin also if it is disabled by it's settings.
220 ## This could be useful if you are unable to log in to the system due to broken
220 ## This could be useful if you are unable to log in to the system due to broken
221 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
221 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
222 ## module to log in again and fix the settings.
222 ## module to log in again and fix the settings.
223 ##
223 ##
224 ## Available builtin plugin IDs (hash is part of the ID):
224 ## Available builtin plugin IDs (hash is part of the ID):
225 ## egg:rhodecode-enterprise-ce#rhodecode
225 ## egg:rhodecode-enterprise-ce#rhodecode
226 ## egg:rhodecode-enterprise-ce#pam
226 ## egg:rhodecode-enterprise-ce#pam
227 ## egg:rhodecode-enterprise-ce#ldap
227 ## egg:rhodecode-enterprise-ce#ldap
228 ## egg:rhodecode-enterprise-ce#jasig_cas
228 ## egg:rhodecode-enterprise-ce#jasig_cas
229 ## egg:rhodecode-enterprise-ce#headers
229 ## egg:rhodecode-enterprise-ce#headers
230 ## egg:rhodecode-enterprise-ce#crowd
230 ## egg:rhodecode-enterprise-ce#crowd
231 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
231 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
232
232
233 ## alternative return HTTP header for failed authentication. Default HTTP
233 ## alternative return HTTP header for failed authentication. Default HTTP
234 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
234 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
235 ## handling that causing a series of failed authentication calls.
235 ## handling that causing a series of failed authentication calls.
236 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
236 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
237 ## This will be served instead of default 401 on bad authnetication
237 ## This will be served instead of default 401 on bad authnetication
238 auth_ret_code =
238 auth_ret_code =
239
239
240 ## use special detection method when serving auth_ret_code, instead of serving
240 ## use special detection method when serving auth_ret_code, instead of serving
241 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
241 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
242 ## and then serve auth_ret_code to clients
242 ## and then serve auth_ret_code to clients
243 auth_ret_code_detection = false
243 auth_ret_code_detection = false
244
244
245 ## locking return code. When repository is locked return this HTTP code. 2XX
245 ## locking return code. When repository is locked return this HTTP code. 2XX
246 ## codes don't break the transactions while 4XX codes do
246 ## codes don't break the transactions while 4XX codes do
247 lock_ret_code = 423
247 lock_ret_code = 423
248
248
249 ## allows to change the repository location in settings page
249 ## allows to change the repository location in settings page
250 allow_repo_location_change = true
250 allow_repo_location_change = true
251
251
252 ## allows to setup custom hooks in settings page
252 ## allows to setup custom hooks in settings page
253 allow_custom_hooks_settings = true
253 allow_custom_hooks_settings = true
254
254
255 ## generated license token, goto license page in RhodeCode settings to obtain
255 ## generated license token, goto license page in RhodeCode settings to obtain
256 ## new token
256 ## new token
257 license_token =
257 license_token =
258
258
259 ## supervisor connection uri, for managing supervisor and logs.
259 ## supervisor connection uri, for managing supervisor and logs.
260 supervisor.uri =
260 supervisor.uri =
261 ## supervisord group name/id we only want this RC instance to handle
261 ## supervisord group name/id we only want this RC instance to handle
262 supervisor.group_id = prod
262 supervisor.group_id = prod
263
263
264 ## Display extended labs settings
264 ## Display extended labs settings
265 labs_settings_active = true
265 labs_settings_active = true
266
266
267 ####################################
267 ####################################
268 ### CELERY CONFIG ####
268 ### CELERY CONFIG ####
269 ####################################
269 ####################################
270 ## run: /path/to/celery worker \
270 ## run: /path/to/celery worker \
271 ## -E --beat --app rhodecode.lib.celerylib.loader \
271 ## -E --beat --app rhodecode.lib.celerylib.loader \
272 ## --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler \
272 ## --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler \
273 ## --loglevel DEBUG --ini /path/to/rhodecode.ini
273 ## --loglevel DEBUG --ini /path/to/rhodecode.ini
274
274
275 use_celery = false
275 use_celery = false
276
276
277 ## connection url to the message broker (default rabbitmq)
277 ## connection url to the message broker (default rabbitmq)
278 celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
278 celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
279
279
280 ## maximum tasks to execute before worker restart
280 ## maximum tasks to execute before worker restart
281 celery.max_tasks_per_child = 100
281 celery.max_tasks_per_child = 100
282
282
283 ## tasks will never be sent to the queue, but executed locally instead.
283 ## tasks will never be sent to the queue, but executed locally instead.
284 celery.task_always_eager = false
284 celery.task_always_eager = false
285
285
286 ####################################
286 ####################################
287 ### BEAKER CACHE ####
287 ### BEAKER CACHE ####
288 ####################################
288 ####################################
289 # default cache dir for templates. Putting this into a ramdisk
289 # default cache dir for templates. Putting this into a ramdisk
290 ## can boost performance, eg. %(here)s/data_ramdisk
290 ## can boost performance, eg. %(here)s/data_ramdisk
291 cache_dir = %(here)s/data
291 cache_dir = %(here)s/data
292
292
293 ## locking and default file storage for Beaker. Putting this into a ramdisk
293 ## locking and default file storage for Beaker. Putting this into a ramdisk
294 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
294 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
295 beaker.cache.data_dir = %(here)s/data/cache/beaker_data
295 beaker.cache.data_dir = %(here)s/data/cache/beaker_data
296 beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock
296 beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock
297
297
298 beaker.cache.regions = short_term, long_term, sql_cache_short, auth_plugins, repo_cache_long
298 beaker.cache.regions = short_term, long_term, sql_cache_short, auth_plugins, repo_cache_long
299
299
300 # used for caching user permissions
300 # used for caching user permissions
301 beaker.cache.short_term.type = file
301 beaker.cache.short_term.type = file
302 beaker.cache.short_term.expire = 0
302 beaker.cache.short_term.expire = 0
303 beaker.cache.short_term.key_length = 256
303 beaker.cache.short_term.key_length = 256
304
304
305 beaker.cache.long_term.type = memory
305 beaker.cache.long_term.type = memory
306 beaker.cache.long_term.expire = 36000
306 beaker.cache.long_term.expire = 36000
307 beaker.cache.long_term.key_length = 256
307 beaker.cache.long_term.key_length = 256
308
308
309 beaker.cache.sql_cache_short.type = memory
309 beaker.cache.sql_cache_short.type = memory
310 beaker.cache.sql_cache_short.expire = 10
310 beaker.cache.sql_cache_short.expire = 10
311 beaker.cache.sql_cache_short.key_length = 256
311 beaker.cache.sql_cache_short.key_length = 256
312
312
313 ## default is memory cache, configure only if required
313 ## default is memory cache, configure only if required
314 ## using multi-node or multi-worker setup
314 ## using multi-node or multi-worker setup
315 #beaker.cache.auth_plugins.type = ext:database
315 #beaker.cache.auth_plugins.type = ext:database
316 #beaker.cache.auth_plugins.lock_dir = %(here)s/data/cache/auth_plugin_lock
316 #beaker.cache.auth_plugins.lock_dir = %(here)s/data/cache/auth_plugin_lock
317 #beaker.cache.auth_plugins.url = postgresql://postgres:secret@localhost/rhodecode
317 #beaker.cache.auth_plugins.url = postgresql://postgres:secret@localhost/rhodecode
318 #beaker.cache.auth_plugins.url = mysql://root:secret@127.0.0.1/rhodecode
318 #beaker.cache.auth_plugins.url = mysql://root:secret@127.0.0.1/rhodecode
319 #beaker.cache.auth_plugins.sa.pool_recycle = 3600
319 #beaker.cache.auth_plugins.sa.pool_recycle = 3600
320 #beaker.cache.auth_plugins.sa.pool_size = 10
320 #beaker.cache.auth_plugins.sa.pool_size = 10
321 #beaker.cache.auth_plugins.sa.max_overflow = 0
321 #beaker.cache.auth_plugins.sa.max_overflow = 0
322
322
323 beaker.cache.repo_cache_long.type = memorylru_base
323 beaker.cache.repo_cache_long.type = memorylru_base
324 beaker.cache.repo_cache_long.max_items = 4096
324 beaker.cache.repo_cache_long.max_items = 4096
325 beaker.cache.repo_cache_long.expire = 2592000
325 beaker.cache.repo_cache_long.expire = 2592000
326
326
327 ## default is memorylru_base cache, configure only if required
327 ## default is memorylru_base cache, configure only if required
328 ## using multi-node or multi-worker setup
328 ## using multi-node or multi-worker setup
329 #beaker.cache.repo_cache_long.type = ext:memcached
329 #beaker.cache.repo_cache_long.type = ext:memcached
330 #beaker.cache.repo_cache_long.url = localhost:11211
330 #beaker.cache.repo_cache_long.url = localhost:11211
331 #beaker.cache.repo_cache_long.expire = 1209600
331 #beaker.cache.repo_cache_long.expire = 1209600
332 #beaker.cache.repo_cache_long.key_length = 256
332 #beaker.cache.repo_cache_long.key_length = 256
333
333
334 ####################################
334 ####################################
335 ### BEAKER SESSION ####
335 ### BEAKER SESSION ####
336 ####################################
336 ####################################
337
337
338 ## .session.type is type of storage options for the session, current allowed
338 ## .session.type is type of storage options for the session, current allowed
339 ## types are file, ext:memcached, ext:database, and memory (default).
339 ## types are file, ext:memcached, ext:database, and memory (default).
340 beaker.session.type = file
340 beaker.session.type = file
341 beaker.session.data_dir = %(here)s/data/sessions/data
341 beaker.session.data_dir = %(here)s/data/sessions/data
342
342
343 ## db based session, fast, and allows easy management over logged in users
343 ## db based session, fast, and allows easy management over logged in users
344 #beaker.session.type = ext:database
344 #beaker.session.type = ext:database
345 #beaker.session.table_name = db_session
345 #beaker.session.table_name = db_session
346 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
346 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
347 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
347 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
348 #beaker.session.sa.pool_recycle = 3600
348 #beaker.session.sa.pool_recycle = 3600
349 #beaker.session.sa.echo = false
349 #beaker.session.sa.echo = false
350
350
351 beaker.session.key = rhodecode
351 beaker.session.key = rhodecode
352 beaker.session.secret = production-rc-uytcxaz
352 beaker.session.secret = production-rc-uytcxaz
353 beaker.session.lock_dir = %(here)s/data/sessions/lock
353 beaker.session.lock_dir = %(here)s/data/sessions/lock
354
354
355 ## Secure encrypted cookie. Requires AES and AES python libraries
355 ## Secure encrypted cookie. Requires AES and AES python libraries
356 ## you must disable beaker.session.secret to use this
356 ## you must disable beaker.session.secret to use this
357 #beaker.session.encrypt_key = key_for_encryption
357 #beaker.session.encrypt_key = key_for_encryption
358 #beaker.session.validate_key = validation_key
358 #beaker.session.validate_key = validation_key
359
359
360 ## sets session as invalid(also logging out user) if it haven not been
360 ## sets session as invalid(also logging out user) if it haven not been
361 ## accessed for given amount of time in seconds
361 ## accessed for given amount of time in seconds
362 beaker.session.timeout = 2592000
362 beaker.session.timeout = 2592000
363 beaker.session.httponly = true
363 beaker.session.httponly = true
364 ## Path to use for the cookie. Set to prefix if you use prefix middleware
364 ## Path to use for the cookie. Set to prefix if you use prefix middleware
365 #beaker.session.cookie_path = /custom_prefix
365 #beaker.session.cookie_path = /custom_prefix
366
366
367 ## uncomment for https secure cookie
367 ## uncomment for https secure cookie
368 beaker.session.secure = false
368 beaker.session.secure = false
369
369
370 ## auto save the session to not to use .save()
370 ## auto save the session to not to use .save()
371 beaker.session.auto = false
371 beaker.session.auto = false
372
372
373 ## default cookie expiration time in seconds, set to `true` to set expire
373 ## default cookie expiration time in seconds, set to `true` to set expire
374 ## at browser close
374 ## at browser close
375 #beaker.session.cookie_expires = 3600
375 #beaker.session.cookie_expires = 3600
376
376
377 ###################################
377 ###################################
378 ## SEARCH INDEXING CONFIGURATION ##
378 ## SEARCH INDEXING CONFIGURATION ##
379 ###################################
379 ###################################
380 ## Full text search indexer is available in rhodecode-tools under
380 ## Full text search indexer is available in rhodecode-tools under
381 ## `rhodecode-tools index` command
381 ## `rhodecode-tools index` command
382
382
383 ## WHOOSH Backend, doesn't require additional services to run
383 ## WHOOSH Backend, doesn't require additional services to run
384 ## it works good with few dozen repos
384 ## it works good with few dozen repos
385 search.module = rhodecode.lib.index.whoosh
385 search.module = rhodecode.lib.index.whoosh
386 search.location = %(here)s/data/index
386 search.location = %(here)s/data/index
387
387
388 ########################################
388 ########################################
389 ### CHANNELSTREAM CONFIG ####
389 ### CHANNELSTREAM CONFIG ####
390 ########################################
390 ########################################
391 ## channelstream enables persistent connections and live notification
391 ## channelstream enables persistent connections and live notification
392 ## in the system. It's also used by the chat system
392 ## in the system. It's also used by the chat system
393 channelstream.enabled = false
393 channelstream.enabled = false
394
394
395 ## server address for channelstream server on the backend
395 ## server address for channelstream server on the backend
396 channelstream.server = 127.0.0.1:9800
396 channelstream.server = 127.0.0.1:9800
397
397
398 ## location of the channelstream server from outside world
398 ## location of the channelstream server from outside world
399 ## use ws:// for http or wss:// for https. This address needs to be handled
399 ## use ws:// for http or wss:// for https. This address needs to be handled
400 ## by external HTTP server such as Nginx or Apache
400 ## by external HTTP server such as Nginx or Apache
401 ## see nginx/apache configuration examples in our docs
401 ## see nginx/apache configuration examples in our docs
402 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
402 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
403 channelstream.secret = secret
403 channelstream.secret = secret
404 channelstream.history.location = %(here)s/channelstream_history
404 channelstream.history.location = %(here)s/channelstream_history
405
405
406 ## Internal application path that Javascript uses to connect into.
406 ## Internal application path that Javascript uses to connect into.
407 ## If you use proxy-prefix the prefix should be added before /_channelstream
407 ## If you use proxy-prefix the prefix should be added before /_channelstream
408 channelstream.proxy_path = /_channelstream
408 channelstream.proxy_path = /_channelstream
409
409
410
410
411 ###################################
411 ###################################
412 ## APPENLIGHT CONFIG ##
412 ## APPENLIGHT CONFIG ##
413 ###################################
413 ###################################
414
414
415 ## Appenlight is tailored to work with RhodeCode, see
415 ## Appenlight is tailored to work with RhodeCode, see
416 ## http://appenlight.com for details how to obtain an account
416 ## http://appenlight.com for details how to obtain an account
417
417
418 ## appenlight integration enabled
418 ## appenlight integration enabled
419 appenlight = false
419 appenlight = false
420
420
421 appenlight.server_url = https://api.appenlight.com
421 appenlight.server_url = https://api.appenlight.com
422 appenlight.api_key = YOUR_API_KEY
422 appenlight.api_key = YOUR_API_KEY
423 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
423 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
424
424
425 # used for JS client
425 # used for JS client
426 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
426 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
427
427
428 ## TWEAK AMOUNT OF INFO SENT HERE
428 ## TWEAK AMOUNT OF INFO SENT HERE
429
429
430 ## enables 404 error logging (default False)
430 ## enables 404 error logging (default False)
431 appenlight.report_404 = false
431 appenlight.report_404 = false
432
432
433 ## time in seconds after request is considered being slow (default 1)
433 ## time in seconds after request is considered being slow (default 1)
434 appenlight.slow_request_time = 1
434 appenlight.slow_request_time = 1
435
435
436 ## record slow requests in application
436 ## record slow requests in application
437 ## (needs to be enabled for slow datastore recording and time tracking)
437 ## (needs to be enabled for slow datastore recording and time tracking)
438 appenlight.slow_requests = true
438 appenlight.slow_requests = true
439
439
440 ## enable hooking to application loggers
440 ## enable hooking to application loggers
441 appenlight.logging = true
441 appenlight.logging = true
442
442
443 ## minimum log level for log capture
443 ## minimum log level for log capture
444 appenlight.logging.level = WARNING
444 appenlight.logging.level = WARNING
445
445
446 ## send logs only from erroneous/slow requests
446 ## send logs only from erroneous/slow requests
447 ## (saves API quota for intensive logging)
447 ## (saves API quota for intensive logging)
448 appenlight.logging_on_error = false
448 appenlight.logging_on_error = false
449
449
450 ## list of additonal keywords that should be grabbed from environ object
450 ## list of additonal keywords that should be grabbed from environ object
451 ## can be string with comma separated list of words in lowercase
451 ## can be string with comma separated list of words in lowercase
452 ## (by default client will always send following info:
452 ## (by default client will always send following info:
453 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
453 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
454 ## start with HTTP* this list be extended with additional keywords here
454 ## start with HTTP* this list be extended with additional keywords here
455 appenlight.environ_keys_whitelist =
455 appenlight.environ_keys_whitelist =
456
456
457 ## list of keywords that should be blanked from request object
457 ## list of keywords that should be blanked from request object
458 ## can be string with comma separated list of words in lowercase
458 ## can be string with comma separated list of words in lowercase
459 ## (by default client will always blank keys that contain following words
459 ## (by default client will always blank keys that contain following words
460 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
460 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
461 ## this list be extended with additional keywords set here
461 ## this list be extended with additional keywords set here
462 appenlight.request_keys_blacklist =
462 appenlight.request_keys_blacklist =
463
463
464 ## list of namespaces that should be ignores when gathering log entries
464 ## list of namespaces that should be ignores when gathering log entries
465 ## can be string with comma separated list of namespaces
465 ## can be string with comma separated list of namespaces
466 ## (by default the client ignores own entries: appenlight_client.client)
466 ## (by default the client ignores own entries: appenlight_client.client)
467 appenlight.log_namespace_blacklist =
467 appenlight.log_namespace_blacklist =
468
468
469
469
470 ################################################################################
470 ################################################################################
471 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
471 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
472 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
472 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
473 ## execute malicious code after an exception is raised. ##
473 ## execute malicious code after an exception is raised. ##
474 ################################################################################
474 ################################################################################
475 set debug = false
475 set debug = false
476
476
477
477
478 ###########################################
478 ###########################################
479 ### MAIN RHODECODE DATABASE CONFIG ###
479 ### MAIN RHODECODE DATABASE CONFIG ###
480 ###########################################
480 ###########################################
481 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
481 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
482 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
482 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
483 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode
483 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode
484 sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
484 sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
485
485
486 # see sqlalchemy docs for other advanced settings
486 # see sqlalchemy docs for other advanced settings
487
487
488 ## print the sql statements to output
488 ## print the sql statements to output
489 sqlalchemy.db1.echo = false
489 sqlalchemy.db1.echo = false
490 ## recycle the connections after this amount of seconds
490 ## recycle the connections after this amount of seconds
491 sqlalchemy.db1.pool_recycle = 3600
491 sqlalchemy.db1.pool_recycle = 3600
492 sqlalchemy.db1.convert_unicode = true
492 sqlalchemy.db1.convert_unicode = true
493
493
494 ## the number of connections to keep open inside the connection pool.
494 ## the number of connections to keep open inside the connection pool.
495 ## 0 indicates no limit
495 ## 0 indicates no limit
496 #sqlalchemy.db1.pool_size = 5
496 #sqlalchemy.db1.pool_size = 5
497
497
498 ## the number of connections to allow in connection pool "overflow", that is
498 ## the number of connections to allow in connection pool "overflow", that is
499 ## connections that can be opened above and beyond the pool_size setting,
499 ## connections that can be opened above and beyond the pool_size setting,
500 ## which defaults to five.
500 ## which defaults to five.
501 #sqlalchemy.db1.max_overflow = 10
501 #sqlalchemy.db1.max_overflow = 10
502
502
503
503
504 ##################
504 ##################
505 ### VCS CONFIG ###
505 ### VCS CONFIG ###
506 ##################
506 ##################
507 vcs.server.enable = true
507 vcs.server.enable = true
508 vcs.server = localhost:9900
508 vcs.server = localhost:9900
509
509
510 ## Web server connectivity protocol, responsible for web based VCS operatations
510 ## Web server connectivity protocol, responsible for web based VCS operatations
511 ## Available protocols are:
511 ## Available protocols are:
512 ## `http` - use http-rpc backend (default)
512 ## `http` - use http-rpc backend (default)
513 vcs.server.protocol = http
513 vcs.server.protocol = http
514
514
515 ## Push/Pull operations protocol, available options are:
515 ## Push/Pull operations protocol, available options are:
516 ## `http` - use http-rpc backend (default)
516 ## `http` - use http-rpc backend (default)
517 ##
517 ##
518 vcs.scm_app_implementation = http
518 vcs.scm_app_implementation = http
519
519
520 ## Push/Pull operations hooks protocol, available options are:
520 ## Push/Pull operations hooks protocol, available options are:
521 ## `http` - use http-rpc backend (default)
521 ## `http` - use http-rpc backend (default)
522 vcs.hooks.protocol = http
522 vcs.hooks.protocol = http
523 ## Host on which this instance is listening for hooks. If vcsserver is in other location
524 ## this should be adjusted.
525 vcs.hooks.host = 127.0.0.1
523
526
524 vcs.server.log_level = info
527 vcs.server.log_level = info
525 ## Start VCSServer with this instance as a subprocess, usefull for development
528 ## Start VCSServer with this instance as a subprocess, usefull for development
526 vcs.start_server = false
529 vcs.start_server = false
527
530
528 ## List of enabled VCS backends, available options are:
531 ## List of enabled VCS backends, available options are:
529 ## `hg` - mercurial
532 ## `hg` - mercurial
530 ## `git` - git
533 ## `git` - git
531 ## `svn` - subversion
534 ## `svn` - subversion
532 vcs.backends = hg, git, svn
535 vcs.backends = hg, git, svn
533
536
534 vcs.connection_timeout = 3600
537 vcs.connection_timeout = 3600
535 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
538 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
536 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
539 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
537 #vcs.svn.compatible_version = pre-1.8-compatible
540 #vcs.svn.compatible_version = pre-1.8-compatible
538
541
539
542
540 ############################################################
543 ############################################################
541 ### Subversion proxy support (mod_dav_svn) ###
544 ### Subversion proxy support (mod_dav_svn) ###
542 ### Maps RhodeCode repo groups into SVN paths for Apache ###
545 ### Maps RhodeCode repo groups into SVN paths for Apache ###
543 ############################################################
546 ############################################################
544 ## Enable or disable the config file generation.
547 ## Enable or disable the config file generation.
545 svn.proxy.generate_config = false
548 svn.proxy.generate_config = false
546 ## Generate config file with `SVNListParentPath` set to `On`.
549 ## Generate config file with `SVNListParentPath` set to `On`.
547 svn.proxy.list_parent_path = true
550 svn.proxy.list_parent_path = true
548 ## Set location and file name of generated config file.
551 ## Set location and file name of generated config file.
549 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
552 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
550 ## alternative mod_dav config template. This needs to be a mako template
553 ## alternative mod_dav config template. This needs to be a mako template
551 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
554 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
552 ## Used as a prefix to the `Location` block in the generated config file.
555 ## Used as a prefix to the `Location` block in the generated config file.
553 ## In most cases it should be set to `/`.
556 ## In most cases it should be set to `/`.
554 svn.proxy.location_root = /
557 svn.proxy.location_root = /
555 ## Command to reload the mod dav svn configuration on change.
558 ## Command to reload the mod dav svn configuration on change.
556 ## Example: `/etc/init.d/apache2 reload`
559 ## Example: `/etc/init.d/apache2 reload`
557 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
560 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
558 ## If the timeout expires before the reload command finishes, the command will
561 ## If the timeout expires before the reload command finishes, the command will
559 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
562 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
560 #svn.proxy.reload_timeout = 10
563 #svn.proxy.reload_timeout = 10
561
564
562 ############################################################
565 ############################################################
563 ### SSH Support Settings ###
566 ### SSH Support Settings ###
564 ############################################################
567 ############################################################
565
568
566 ## Defines if a custom authorized_keys file should be created and written on
569 ## Defines if a custom authorized_keys file should be created and written on
567 ## any change user ssh keys. Setting this to false also disables posibility
570 ## any change user ssh keys. Setting this to false also disables posibility
568 ## of adding SSH keys by users from web interface. Super admins can still
571 ## of adding SSH keys by users from web interface. Super admins can still
569 ## manage SSH Keys.
572 ## manage SSH Keys.
570 ssh.generate_authorized_keyfile = false
573 ssh.generate_authorized_keyfile = false
571
574
572 ## Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
575 ## Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
573 # ssh.authorized_keys_ssh_opts =
576 # ssh.authorized_keys_ssh_opts =
574
577
575 ## Path to the authrozied_keys file where the generate entries are placed.
578 ## Path to the authrozied_keys file where the generate entries are placed.
576 ## It is possible to have multiple key files specified in `sshd_config` e.g.
579 ## It is possible to have multiple key files specified in `sshd_config` e.g.
577 ## AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
580 ## AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
578 ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode
581 ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode
579
582
580 ## Command to execute the SSH wrapper. The binary is available in the
583 ## Command to execute the SSH wrapper. The binary is available in the
581 ## rhodecode installation directory.
584 ## rhodecode installation directory.
582 ## e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
585 ## e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
583 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
586 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
584
587
585 ## Allow shell when executing the ssh-wrapper command
588 ## Allow shell when executing the ssh-wrapper command
586 ssh.wrapper_cmd_allow_shell = false
589 ssh.wrapper_cmd_allow_shell = false
587
590
588 ## Enables logging, and detailed output send back to the client during SSH
591 ## Enables logging, and detailed output send back to the client during SSH
589 ## operations. Usefull for debugging, shouldn't be used in production.
592 ## operations. Usefull for debugging, shouldn't be used in production.
590 ssh.enable_debug_logging = false
593 ssh.enable_debug_logging = false
591
594
592 ## Paths to binary executable, by default they are the names, but we can
595 ## Paths to binary executable, by default they are the names, but we can
593 ## override them if we want to use a custom one
596 ## override them if we want to use a custom one
594 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
597 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
595 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
598 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
596 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
599 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
597
600
598
601
599 ## Dummy marker to add new entries after.
602 ## Dummy marker to add new entries after.
600 ## Add any custom entries below. Please don't remove.
603 ## Add any custom entries below. Please don't remove.
601 custom.conf = 1
604 custom.conf = 1
602
605
603
606
604 ################################
607 ################################
605 ### LOGGING CONFIGURATION ####
608 ### LOGGING CONFIGURATION ####
606 ################################
609 ################################
607 [loggers]
610 [loggers]
608 keys = root, sqlalchemy, beaker, rhodecode, ssh_wrapper, celery
611 keys = root, sqlalchemy, beaker, rhodecode, ssh_wrapper, celery
609
612
610 [handlers]
613 [handlers]
611 keys = console, console_sql
614 keys = console, console_sql
612
615
613 [formatters]
616 [formatters]
614 keys = generic, color_formatter, color_formatter_sql
617 keys = generic, color_formatter, color_formatter_sql
615
618
616 #############
619 #############
617 ## LOGGERS ##
620 ## LOGGERS ##
618 #############
621 #############
619 [logger_root]
622 [logger_root]
620 level = NOTSET
623 level = NOTSET
621 handlers = console
624 handlers = console
622
625
623 [logger_sqlalchemy]
626 [logger_sqlalchemy]
624 level = INFO
627 level = INFO
625 handlers = console_sql
628 handlers = console_sql
626 qualname = sqlalchemy.engine
629 qualname = sqlalchemy.engine
627 propagate = 0
630 propagate = 0
628
631
629 [logger_beaker]
632 [logger_beaker]
630 level = DEBUG
633 level = DEBUG
631 handlers =
634 handlers =
632 qualname = beaker.container
635 qualname = beaker.container
633 propagate = 1
636 propagate = 1
634
637
635 [logger_rhodecode]
638 [logger_rhodecode]
636 level = DEBUG
639 level = DEBUG
637 handlers =
640 handlers =
638 qualname = rhodecode
641 qualname = rhodecode
639 propagate = 1
642 propagate = 1
640
643
641 [logger_ssh_wrapper]
644 [logger_ssh_wrapper]
642 level = DEBUG
645 level = DEBUG
643 handlers =
646 handlers =
644 qualname = ssh_wrapper
647 qualname = ssh_wrapper
645 propagate = 1
648 propagate = 1
646
649
647 [logger_celery]
650 [logger_celery]
648 level = DEBUG
651 level = DEBUG
649 handlers =
652 handlers =
650 qualname = celery
653 qualname = celery
651
654
652
655
653 ##############
656 ##############
654 ## HANDLERS ##
657 ## HANDLERS ##
655 ##############
658 ##############
656
659
657 [handler_console]
660 [handler_console]
658 class = StreamHandler
661 class = StreamHandler
659 args = (sys.stderr, )
662 args = (sys.stderr, )
660 level = INFO
663 level = INFO
661 formatter = generic
664 formatter = generic
662
665
663 [handler_console_sql]
666 [handler_console_sql]
664 class = StreamHandler
667 class = StreamHandler
665 args = (sys.stderr, )
668 args = (sys.stderr, )
666 level = WARN
669 level = WARN
667 formatter = generic
670 formatter = generic
668
671
669 ################
672 ################
670 ## FORMATTERS ##
673 ## FORMATTERS ##
671 ################
674 ################
672
675
673 [formatter_generic]
676 [formatter_generic]
674 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
677 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
675 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
678 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
676 datefmt = %Y-%m-%d %H:%M:%S
679 datefmt = %Y-%m-%d %H:%M:%S
677
680
678 [formatter_color_formatter]
681 [formatter_color_formatter]
679 class = rhodecode.lib.logging_formatter.ColorFormatter
682 class = rhodecode.lib.logging_formatter.ColorFormatter
680 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
683 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
681 datefmt = %Y-%m-%d %H:%M:%S
684 datefmt = %Y-%m-%d %H:%M:%S
682
685
683 [formatter_color_formatter_sql]
686 [formatter_color_formatter_sql]
684 class = rhodecode.lib.logging_formatter.ColorFormatterSql
687 class = rhodecode.lib.logging_formatter.ColorFormatterSql
685 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
688 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
686 datefmt = %Y-%m-%d %H:%M:%S
689 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,150 +1,151 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2018 RhodeCode GmbH
3 # Copyright (C) 2016-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22 import sys
22 import sys
23 import json
23 import json
24 import logging
24 import logging
25
25
26 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
26 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
27 from rhodecode.lib.vcs.conf import settings as vcs_settings
27 from rhodecode.lib.vcs.conf import settings as vcs_settings
28 from rhodecode.model.scm import ScmModel
28 from rhodecode.model.scm import ScmModel
29
29
30 log = logging.getLogger(__name__)
30 log = logging.getLogger(__name__)
31
31
32
32
33 class VcsServer(object):
33 class VcsServer(object):
34 _path = None # set executable path for hg/git/svn binary
34 _path = None # set executable path for hg/git/svn binary
35 backend = None # set in child classes
35 backend = None # set in child classes
36 tunnel = None # subprocess handling tunnel
36 tunnel = None # subprocess handling tunnel
37 write_perms = ['repository.admin', 'repository.write']
37 write_perms = ['repository.admin', 'repository.write']
38 read_perms = ['repository.read', 'repository.admin', 'repository.write']
38 read_perms = ['repository.read', 'repository.admin', 'repository.write']
39
39
40 def __init__(self, user, user_permissions, config, env):
40 def __init__(self, user, user_permissions, config, env):
41 self.user = user
41 self.user = user
42 self.user_permissions = user_permissions
42 self.user_permissions = user_permissions
43 self.config = config
43 self.config = config
44 self.env = env
44 self.env = env
45 self.stdin = sys.stdin
45 self.stdin = sys.stdin
46
46
47 self.repo_name = None
47 self.repo_name = None
48 self.repo_mode = None
48 self.repo_mode = None
49 self.store = ''
49 self.store = ''
50 self.ini_path = ''
50 self.ini_path = ''
51
51
52 def _invalidate_cache(self, repo_name):
52 def _invalidate_cache(self, repo_name):
53 """
53 """
54 Set's cache for this repository for invalidation on next access
54 Set's cache for this repository for invalidation on next access
55
55
56 :param repo_name: full repo name, also a cache key
56 :param repo_name: full repo name, also a cache key
57 """
57 """
58 ScmModel().mark_for_invalidation(repo_name)
58 ScmModel().mark_for_invalidation(repo_name)
59
59
60 def has_write_perm(self):
60 def has_write_perm(self):
61 permission = self.user_permissions.get(self.repo_name)
61 permission = self.user_permissions.get(self.repo_name)
62 if permission in ['repository.write', 'repository.admin']:
62 if permission in ['repository.write', 'repository.admin']:
63 return True
63 return True
64
64
65 return False
65 return False
66
66
67 def _check_permissions(self, action):
67 def _check_permissions(self, action):
68 permission = self.user_permissions.get(self.repo_name)
68 permission = self.user_permissions.get(self.repo_name)
69 log.debug(
69 log.debug(
70 'permission for %s on %s are: %s',
70 'permission for %s on %s are: %s',
71 self.user, self.repo_name, permission)
71 self.user, self.repo_name, permission)
72
72
73 if action == 'pull':
73 if action == 'pull':
74 if permission in self.read_perms:
74 if permission in self.read_perms:
75 log.info(
75 log.info(
76 'READ Permissions for User "%s" detected to repo "%s"!',
76 'READ Permissions for User "%s" detected to repo "%s"!',
77 self.user, self.repo_name)
77 self.user, self.repo_name)
78 return 0
78 return 0
79 else:
79 else:
80 if permission in self.write_perms:
80 if permission in self.write_perms:
81 log.info(
81 log.info(
82 'WRITE+ Permissions for User "%s" detected to repo "%s"!',
82 'WRITE+ Permissions for User "%s" detected to repo "%s"!',
83 self.user, self.repo_name)
83 self.user, self.repo_name)
84 return 0
84 return 0
85
85
86 log.error('Cannot properly fetch or allow user %s permissions. '
86 log.error('Cannot properly fetch or allow user %s permissions. '
87 'Return value is: %s, req action: %s',
87 'Return value is: %s, req action: %s',
88 self.user, permission, action)
88 self.user, permission, action)
89 return -2
89 return -2
90
90
91 def update_environment(self, action, extras=None):
91 def update_environment(self, action, extras=None):
92
92
93 scm_data = {
93 scm_data = {
94 'ip': os.environ['SSH_CLIENT'].split()[0],
94 'ip': os.environ['SSH_CLIENT'].split()[0],
95 'username': self.user.username,
95 'username': self.user.username,
96 'user_id': self.user.user_id,
96 'user_id': self.user.user_id,
97 'action': action,
97 'action': action,
98 'repository': self.repo_name,
98 'repository': self.repo_name,
99 'scm': self.backend,
99 'scm': self.backend,
100 'config': self.ini_path,
100 'config': self.ini_path,
101 'make_lock': None,
101 'make_lock': None,
102 'locked_by': [None, None],
102 'locked_by': [None, None],
103 'server_url': None,
103 'server_url': None,
104 'is_shadow_repo': False,
104 'is_shadow_repo': False,
105 'hooks_module': 'rhodecode.lib.hooks_daemon',
105 'hooks_module': 'rhodecode.lib.hooks_daemon',
106 'hooks': ['push', 'pull'],
106 'hooks': ['push', 'pull'],
107 'SSH': True,
107 'SSH': True,
108 'SSH_PERMISSIONS': self.user_permissions.get(self.repo_name)
108 'SSH_PERMISSIONS': self.user_permissions.get(self.repo_name)
109 }
109 }
110 if extras:
110 if extras:
111 scm_data.update(extras)
111 scm_data.update(extras)
112 os.putenv("RC_SCM_DATA", json.dumps(scm_data))
112 os.putenv("RC_SCM_DATA", json.dumps(scm_data))
113
113
114 def get_root_store(self):
114 def get_root_store(self):
115 root_store = self.store
115 root_store = self.store
116 if not root_store.endswith('/'):
116 if not root_store.endswith('/'):
117 # always append trailing slash
117 # always append trailing slash
118 root_store = root_store + '/'
118 root_store = root_store + '/'
119 return root_store
119 return root_store
120
120
121 def _handle_tunnel(self, extras):
121 def _handle_tunnel(self, extras):
122 # pre-auth
122 # pre-auth
123 action = 'pull'
123 action = 'pull'
124 exit_code = self._check_permissions(action)
124 exit_code = self._check_permissions(action)
125 if exit_code:
125 if exit_code:
126 return exit_code, False
126 return exit_code, False
127
127
128 req = self.env['request']
128 req = self.env['request']
129 server_url = req.host_url + req.script_name
129 server_url = req.host_url + req.script_name
130 extras['server_url'] = server_url
130 extras['server_url'] = server_url
131
131
132 log.debug('Using %s binaries from path %s', self.backend, self._path)
132 log.debug('Using %s binaries from path %s', self.backend, self._path)
133 exit_code = self.tunnel.run(extras)
133 exit_code = self.tunnel.run(extras)
134
134
135 return exit_code, action == "push"
135 return exit_code, action == "push"
136
136
137 def run(self):
137 def run(self):
138 extras = {}
138 extras = {}
139
139
140 callback_daemon, extras = prepare_callback_daemon(
140 callback_daemon, extras = prepare_callback_daemon(
141 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
141 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
142 host=vcs_settings.HOOKS_HOST,
142 use_direct_calls=False)
143 use_direct_calls=False)
143
144
144 with callback_daemon:
145 with callback_daemon:
145 try:
146 try:
146 return self._handle_tunnel(extras)
147 return self._handle_tunnel(extras)
147 finally:
148 finally:
148 log.debug('Running cleanup with cache invalidation')
149 log.debug('Running cleanup with cache invalidation')
149 if self.repo_name:
150 if self.repo_name:
150 self._invalidate_cache(self.repo_name)
151 self._invalidate_cache(self.repo_name)
@@ -1,460 +1,461 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22 import logging
22 import logging
23 import traceback
23 import traceback
24 import collections
24 import collections
25
25
26 from paste.gzipper import make_gzip_middleware
26 from paste.gzipper import make_gzip_middleware
27 from pyramid.wsgi import wsgiapp
27 from pyramid.wsgi import wsgiapp
28 from pyramid.authorization import ACLAuthorizationPolicy
28 from pyramid.authorization import ACLAuthorizationPolicy
29 from pyramid.config import Configurator
29 from pyramid.config import Configurator
30 from pyramid.settings import asbool, aslist
30 from pyramid.settings import asbool, aslist
31 from pyramid.httpexceptions import (
31 from pyramid.httpexceptions import (
32 HTTPException, HTTPError, HTTPInternalServerError, HTTPFound, HTTPNotFound)
32 HTTPException, HTTPError, HTTPInternalServerError, HTTPFound, HTTPNotFound)
33 from pyramid.events import ApplicationCreated
33 from pyramid.events import ApplicationCreated
34 from pyramid.renderers import render_to_response
34 from pyramid.renderers import render_to_response
35
35
36 from rhodecode.model import meta
36 from rhodecode.model import meta
37 from rhodecode.config import patches
37 from rhodecode.config import patches
38 from rhodecode.config import utils as config_utils
38 from rhodecode.config import utils as config_utils
39 from rhodecode.config.environment import load_pyramid_environment
39 from rhodecode.config.environment import load_pyramid_environment
40
40
41 from rhodecode.lib.middleware.vcs import VCSMiddleware
41 from rhodecode.lib.middleware.vcs import VCSMiddleware
42 from rhodecode.lib.request import Request
42 from rhodecode.lib.request import Request
43 from rhodecode.lib.vcs import VCSCommunicationError
43 from rhodecode.lib.vcs import VCSCommunicationError
44 from rhodecode.lib.exceptions import VCSServerUnavailable
44 from rhodecode.lib.exceptions import VCSServerUnavailable
45 from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
45 from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
46 from rhodecode.lib.middleware.https_fixup import HttpsFixup
46 from rhodecode.lib.middleware.https_fixup import HttpsFixup
47 from rhodecode.lib.celerylib.loader import configure_celery
47 from rhodecode.lib.celerylib.loader import configure_celery
48 from rhodecode.lib.plugins.utils import register_rhodecode_plugin
48 from rhodecode.lib.plugins.utils import register_rhodecode_plugin
49 from rhodecode.lib.utils2 import aslist as rhodecode_aslist, AttributeDict
49 from rhodecode.lib.utils2 import aslist as rhodecode_aslist, AttributeDict
50 from rhodecode.subscribers import (
50 from rhodecode.subscribers import (
51 scan_repositories_if_enabled, write_js_routes_if_enabled,
51 scan_repositories_if_enabled, write_js_routes_if_enabled,
52 write_metadata_if_needed, inject_app_settings)
52 write_metadata_if_needed, inject_app_settings)
53
53
54
54
55 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
56
56
57
57
58 def is_http_error(response):
58 def is_http_error(response):
59 # error which should have traceback
59 # error which should have traceback
60 return response.status_code > 499
60 return response.status_code > 499
61
61
62
62
63 def make_pyramid_app(global_config, **settings):
63 def make_pyramid_app(global_config, **settings):
64 """
64 """
65 Constructs the WSGI application based on Pyramid.
65 Constructs the WSGI application based on Pyramid.
66
66
67 Specials:
67 Specials:
68
68
69 * The application can also be integrated like a plugin via the call to
69 * The application can also be integrated like a plugin via the call to
70 `includeme`. This is accompanied with the other utility functions which
70 `includeme`. This is accompanied with the other utility functions which
71 are called. Changing this should be done with great care to not break
71 are called. Changing this should be done with great care to not break
72 cases when these fragments are assembled from another place.
72 cases when these fragments are assembled from another place.
73
73
74 """
74 """
75
75
76 # Allows to use format style "{ENV_NAME}" placeholders in the configuration. It
76 # Allows to use format style "{ENV_NAME}" placeholders in the configuration. It
77 # will be replaced by the value of the environment variable "NAME" in this case.
77 # will be replaced by the value of the environment variable "NAME" in this case.
78 environ = {
78 environ = {
79 'ENV_{}'.format(key): value for key, value in os.environ.items()}
79 'ENV_{}'.format(key): value for key, value in os.environ.items()}
80
80
81 global_config = _substitute_values(global_config, environ)
81 global_config = _substitute_values(global_config, environ)
82 settings = _substitute_values(settings, environ)
82 settings = _substitute_values(settings, environ)
83
83
84 sanitize_settings_and_apply_defaults(settings)
84 sanitize_settings_and_apply_defaults(settings)
85
85
86 config = Configurator(settings=settings)
86 config = Configurator(settings=settings)
87
87
88 # Apply compatibility patches
88 # Apply compatibility patches
89 patches.inspect_getargspec()
89 patches.inspect_getargspec()
90
90
91 load_pyramid_environment(global_config, settings)
91 load_pyramid_environment(global_config, settings)
92
92
93 # Static file view comes first
93 # Static file view comes first
94 includeme_first(config)
94 includeme_first(config)
95
95
96 includeme(config)
96 includeme(config)
97
97
98 pyramid_app = config.make_wsgi_app()
98 pyramid_app = config.make_wsgi_app()
99 pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config)
99 pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config)
100 pyramid_app.config = config
100 pyramid_app.config = config
101
101
102 config.configure_celery(global_config['__file__'])
102 config.configure_celery(global_config['__file__'])
103 # creating the app uses a connection - return it after we are done
103 # creating the app uses a connection - return it after we are done
104 meta.Session.remove()
104 meta.Session.remove()
105
105
106 log.info('Pyramid app %s created and configured.', pyramid_app)
106 log.info('Pyramid app %s created and configured.', pyramid_app)
107 return pyramid_app
107 return pyramid_app
108
108
109
109
110 def not_found_view(request):
110 def not_found_view(request):
111 """
111 """
112 This creates the view which should be registered as not-found-view to
112 This creates the view which should be registered as not-found-view to
113 pyramid.
113 pyramid.
114 """
114 """
115
115
116 if not getattr(request, 'vcs_call', None):
116 if not getattr(request, 'vcs_call', None):
117 # handle like regular case with our error_handler
117 # handle like regular case with our error_handler
118 return error_handler(HTTPNotFound(), request)
118 return error_handler(HTTPNotFound(), request)
119
119
120 # handle not found view as a vcs call
120 # handle not found view as a vcs call
121 settings = request.registry.settings
121 settings = request.registry.settings
122 ae_client = getattr(request, 'ae_client', None)
122 ae_client = getattr(request, 'ae_client', None)
123 vcs_app = VCSMiddleware(
123 vcs_app = VCSMiddleware(
124 HTTPNotFound(), request.registry, settings,
124 HTTPNotFound(), request.registry, settings,
125 appenlight_client=ae_client)
125 appenlight_client=ae_client)
126
126
127 return wsgiapp(vcs_app)(None, request)
127 return wsgiapp(vcs_app)(None, request)
128
128
129
129
130 def error_handler(exception, request):
130 def error_handler(exception, request):
131 import rhodecode
131 import rhodecode
132 from rhodecode.lib import helpers
132 from rhodecode.lib import helpers
133
133
134 rhodecode_title = rhodecode.CONFIG.get('rhodecode_title') or 'RhodeCode'
134 rhodecode_title = rhodecode.CONFIG.get('rhodecode_title') or 'RhodeCode'
135
135
136 base_response = HTTPInternalServerError()
136 base_response = HTTPInternalServerError()
137 # prefer original exception for the response since it may have headers set
137 # prefer original exception for the response since it may have headers set
138 if isinstance(exception, HTTPException):
138 if isinstance(exception, HTTPException):
139 base_response = exception
139 base_response = exception
140 elif isinstance(exception, VCSCommunicationError):
140 elif isinstance(exception, VCSCommunicationError):
141 base_response = VCSServerUnavailable()
141 base_response = VCSServerUnavailable()
142
142
143 if is_http_error(base_response):
143 if is_http_error(base_response):
144 log.exception(
144 log.exception(
145 'error occurred handling this request for path: %s', request.path)
145 'error occurred handling this request for path: %s', request.path)
146
146
147 error_explanation = base_response.explanation or str(base_response)
147 error_explanation = base_response.explanation or str(base_response)
148 if base_response.status_code == 404:
148 if base_response.status_code == 404:
149 error_explanation += " Or you don't have permission to access it."
149 error_explanation += " Or you don't have permission to access it."
150 c = AttributeDict()
150 c = AttributeDict()
151 c.error_message = base_response.status
151 c.error_message = base_response.status
152 c.error_explanation = error_explanation
152 c.error_explanation = error_explanation
153 c.visual = AttributeDict()
153 c.visual = AttributeDict()
154
154
155 c.visual.rhodecode_support_url = (
155 c.visual.rhodecode_support_url = (
156 request.registry.settings.get('rhodecode_support_url') or
156 request.registry.settings.get('rhodecode_support_url') or
157 request.route_url('rhodecode_support')
157 request.route_url('rhodecode_support')
158 )
158 )
159 c.redirect_time = 0
159 c.redirect_time = 0
160 c.rhodecode_name = rhodecode_title
160 c.rhodecode_name = rhodecode_title
161 if not c.rhodecode_name:
161 if not c.rhodecode_name:
162 c.rhodecode_name = 'Rhodecode'
162 c.rhodecode_name = 'Rhodecode'
163
163
164 c.causes = []
164 c.causes = []
165 if is_http_error(base_response):
165 if is_http_error(base_response):
166 c.causes.append('Server is overloaded.')
166 c.causes.append('Server is overloaded.')
167 c.causes.append('Server database connection is lost.')
167 c.causes.append('Server database connection is lost.')
168 c.causes.append('Server expected unhandled error.')
168 c.causes.append('Server expected unhandled error.')
169
169
170 if hasattr(base_response, 'causes'):
170 if hasattr(base_response, 'causes'):
171 c.causes = base_response.causes
171 c.causes = base_response.causes
172
172
173 c.messages = helpers.flash.pop_messages(request=request)
173 c.messages = helpers.flash.pop_messages(request=request)
174 c.traceback = traceback.format_exc()
174 c.traceback = traceback.format_exc()
175 response = render_to_response(
175 response = render_to_response(
176 '/errors/error_document.mako', {'c': c, 'h': helpers}, request=request,
176 '/errors/error_document.mako', {'c': c, 'h': helpers}, request=request,
177 response=base_response)
177 response=base_response)
178
178
179 return response
179 return response
180
180
181
181
182 def includeme_first(config):
182 def includeme_first(config):
183 # redirect automatic browser favicon.ico requests to correct place
183 # redirect automatic browser favicon.ico requests to correct place
184 def favicon_redirect(context, request):
184 def favicon_redirect(context, request):
185 return HTTPFound(
185 return HTTPFound(
186 request.static_path('rhodecode:public/images/favicon.ico'))
186 request.static_path('rhodecode:public/images/favicon.ico'))
187
187
188 config.add_view(favicon_redirect, route_name='favicon')
188 config.add_view(favicon_redirect, route_name='favicon')
189 config.add_route('favicon', '/favicon.ico')
189 config.add_route('favicon', '/favicon.ico')
190
190
191 def robots_redirect(context, request):
191 def robots_redirect(context, request):
192 return HTTPFound(
192 return HTTPFound(
193 request.static_path('rhodecode:public/robots.txt'))
193 request.static_path('rhodecode:public/robots.txt'))
194
194
195 config.add_view(robots_redirect, route_name='robots')
195 config.add_view(robots_redirect, route_name='robots')
196 config.add_route('robots', '/robots.txt')
196 config.add_route('robots', '/robots.txt')
197
197
198 config.add_static_view(
198 config.add_static_view(
199 '_static/deform', 'deform:static')
199 '_static/deform', 'deform:static')
200 config.add_static_view(
200 config.add_static_view(
201 '_static/rhodecode', path='rhodecode:public', cache_max_age=3600 * 24)
201 '_static/rhodecode', path='rhodecode:public', cache_max_age=3600 * 24)
202
202
203
203
204 def includeme(config):
204 def includeme(config):
205 settings = config.registry.settings
205 settings = config.registry.settings
206 config.set_request_factory(Request)
206 config.set_request_factory(Request)
207
207
208 # plugin information
208 # plugin information
209 config.registry.rhodecode_plugins = collections.OrderedDict()
209 config.registry.rhodecode_plugins = collections.OrderedDict()
210
210
211 config.add_directive(
211 config.add_directive(
212 'register_rhodecode_plugin', register_rhodecode_plugin)
212 'register_rhodecode_plugin', register_rhodecode_plugin)
213
213
214 config.add_directive('configure_celery', configure_celery)
214 config.add_directive('configure_celery', configure_celery)
215
215
216 if asbool(settings.get('appenlight', 'false')):
216 if asbool(settings.get('appenlight', 'false')):
217 config.include('appenlight_client.ext.pyramid_tween')
217 config.include('appenlight_client.ext.pyramid_tween')
218
218
219 # Includes which are required. The application would fail without them.
219 # Includes which are required. The application would fail without them.
220 config.include('pyramid_mako')
220 config.include('pyramid_mako')
221 config.include('pyramid_beaker')
221 config.include('pyramid_beaker')
222 config.include('rhodecode.lib.caches')
222 config.include('rhodecode.lib.caches')
223
223
224 config.include('rhodecode.authentication')
224 config.include('rhodecode.authentication')
225 config.include('rhodecode.integrations')
225 config.include('rhodecode.integrations')
226
226
227 # apps
227 # apps
228 config.include('rhodecode.apps._base')
228 config.include('rhodecode.apps._base')
229 config.include('rhodecode.apps.ops')
229 config.include('rhodecode.apps.ops')
230
230
231 config.include('rhodecode.apps.admin')
231 config.include('rhodecode.apps.admin')
232 config.include('rhodecode.apps.channelstream')
232 config.include('rhodecode.apps.channelstream')
233 config.include('rhodecode.apps.login')
233 config.include('rhodecode.apps.login')
234 config.include('rhodecode.apps.home')
234 config.include('rhodecode.apps.home')
235 config.include('rhodecode.apps.journal')
235 config.include('rhodecode.apps.journal')
236 config.include('rhodecode.apps.repository')
236 config.include('rhodecode.apps.repository')
237 config.include('rhodecode.apps.repo_group')
237 config.include('rhodecode.apps.repo_group')
238 config.include('rhodecode.apps.user_group')
238 config.include('rhodecode.apps.user_group')
239 config.include('rhodecode.apps.search')
239 config.include('rhodecode.apps.search')
240 config.include('rhodecode.apps.user_profile')
240 config.include('rhodecode.apps.user_profile')
241 config.include('rhodecode.apps.user_group_profile')
241 config.include('rhodecode.apps.user_group_profile')
242 config.include('rhodecode.apps.my_account')
242 config.include('rhodecode.apps.my_account')
243 config.include('rhodecode.apps.svn_support')
243 config.include('rhodecode.apps.svn_support')
244 config.include('rhodecode.apps.ssh_support')
244 config.include('rhodecode.apps.ssh_support')
245 config.include('rhodecode.apps.gist')
245 config.include('rhodecode.apps.gist')
246
246
247 config.include('rhodecode.apps.debug_style')
247 config.include('rhodecode.apps.debug_style')
248 config.include('rhodecode.tweens')
248 config.include('rhodecode.tweens')
249 config.include('rhodecode.api')
249 config.include('rhodecode.api')
250
250
251 config.add_route(
251 config.add_route(
252 'rhodecode_support', 'https://rhodecode.com/help/', static=True)
252 'rhodecode_support', 'https://rhodecode.com/help/', static=True)
253
253
254 config.add_translation_dirs('rhodecode:i18n/')
254 config.add_translation_dirs('rhodecode:i18n/')
255 settings['default_locale_name'] = settings.get('lang', 'en')
255 settings['default_locale_name'] = settings.get('lang', 'en')
256
256
257 # Add subscribers.
257 # Add subscribers.
258 config.add_subscriber(inject_app_settings, ApplicationCreated)
258 config.add_subscriber(inject_app_settings, ApplicationCreated)
259 config.add_subscriber(scan_repositories_if_enabled, ApplicationCreated)
259 config.add_subscriber(scan_repositories_if_enabled, ApplicationCreated)
260 config.add_subscriber(write_metadata_if_needed, ApplicationCreated)
260 config.add_subscriber(write_metadata_if_needed, ApplicationCreated)
261 config.add_subscriber(write_js_routes_if_enabled, ApplicationCreated)
261 config.add_subscriber(write_js_routes_if_enabled, ApplicationCreated)
262
262
263 # events
263 # events
264 # TODO(marcink): this should be done when pyramid migration is finished
264 # TODO(marcink): this should be done when pyramid migration is finished
265 # config.add_subscriber(
265 # config.add_subscriber(
266 # 'rhodecode.integrations.integrations_event_handler',
266 # 'rhodecode.integrations.integrations_event_handler',
267 # 'rhodecode.events.RhodecodeEvent')
267 # 'rhodecode.events.RhodecodeEvent')
268
268
269 # request custom methods
269 # request custom methods
270 config.add_request_method(
270 config.add_request_method(
271 'rhodecode.lib.partial_renderer.get_partial_renderer',
271 'rhodecode.lib.partial_renderer.get_partial_renderer',
272 'get_partial_renderer')
272 'get_partial_renderer')
273
273
274 # Set the authorization policy.
274 # Set the authorization policy.
275 authz_policy = ACLAuthorizationPolicy()
275 authz_policy = ACLAuthorizationPolicy()
276 config.set_authorization_policy(authz_policy)
276 config.set_authorization_policy(authz_policy)
277
277
278 # Set the default renderer for HTML templates to mako.
278 # Set the default renderer for HTML templates to mako.
279 config.add_mako_renderer('.html')
279 config.add_mako_renderer('.html')
280
280
281 config.add_renderer(
281 config.add_renderer(
282 name='json_ext',
282 name='json_ext',
283 factory='rhodecode.lib.ext_json_renderer.pyramid_ext_json')
283 factory='rhodecode.lib.ext_json_renderer.pyramid_ext_json')
284
284
285 # include RhodeCode plugins
285 # include RhodeCode plugins
286 includes = aslist(settings.get('rhodecode.includes', []))
286 includes = aslist(settings.get('rhodecode.includes', []))
287 for inc in includes:
287 for inc in includes:
288 config.include(inc)
288 config.include(inc)
289
289
290 # custom not found view, if our pyramid app doesn't know how to handle
290 # custom not found view, if our pyramid app doesn't know how to handle
291 # the request pass it to potential VCS handling ap
291 # the request pass it to potential VCS handling ap
292 config.add_notfound_view(not_found_view)
292 config.add_notfound_view(not_found_view)
293 if not settings.get('debugtoolbar.enabled', False):
293 if not settings.get('debugtoolbar.enabled', False):
294 # disabled debugtoolbar handle all exceptions via the error_handlers
294 # disabled debugtoolbar handle all exceptions via the error_handlers
295 config.add_view(error_handler, context=Exception)
295 config.add_view(error_handler, context=Exception)
296
296
297 # all errors including 403/404/50X
297 # all errors including 403/404/50X
298 config.add_view(error_handler, context=HTTPError)
298 config.add_view(error_handler, context=HTTPError)
299
299
300
300
301 def wrap_app_in_wsgi_middlewares(pyramid_app, config):
301 def wrap_app_in_wsgi_middlewares(pyramid_app, config):
302 """
302 """
303 Apply outer WSGI middlewares around the application.
303 Apply outer WSGI middlewares around the application.
304 """
304 """
305 settings = config.registry.settings
305 settings = config.registry.settings
306
306
307 # enable https redirects based on HTTP_X_URL_SCHEME set by proxy
307 # enable https redirects based on HTTP_X_URL_SCHEME set by proxy
308 pyramid_app = HttpsFixup(pyramid_app, settings)
308 pyramid_app = HttpsFixup(pyramid_app, settings)
309
309
310 pyramid_app, _ae_client = wrap_in_appenlight_if_enabled(
310 pyramid_app, _ae_client = wrap_in_appenlight_if_enabled(
311 pyramid_app, settings)
311 pyramid_app, settings)
312 config.registry.ae_client = _ae_client
312 config.registry.ae_client = _ae_client
313
313
314 if settings['gzip_responses']:
314 if settings['gzip_responses']:
315 pyramid_app = make_gzip_middleware(
315 pyramid_app = make_gzip_middleware(
316 pyramid_app, settings, compress_level=1)
316 pyramid_app, settings, compress_level=1)
317
317
318 # this should be the outer most middleware in the wsgi stack since
318 # this should be the outer most middleware in the wsgi stack since
319 # middleware like Routes make database calls
319 # middleware like Routes make database calls
320 def pyramid_app_with_cleanup(environ, start_response):
320 def pyramid_app_with_cleanup(environ, start_response):
321 try:
321 try:
322 return pyramid_app(environ, start_response)
322 return pyramid_app(environ, start_response)
323 finally:
323 finally:
324 # Dispose current database session and rollback uncommitted
324 # Dispose current database session and rollback uncommitted
325 # transactions.
325 # transactions.
326 meta.Session.remove()
326 meta.Session.remove()
327
327
328 # In a single threaded mode server, on non sqlite db we should have
328 # In a single threaded mode server, on non sqlite db we should have
329 # '0 Current Checked out connections' at the end of a request,
329 # '0 Current Checked out connections' at the end of a request,
330 # if not, then something, somewhere is leaving a connection open
330 # if not, then something, somewhere is leaving a connection open
331 pool = meta.Base.metadata.bind.engine.pool
331 pool = meta.Base.metadata.bind.engine.pool
332 log.debug('sa pool status: %s', pool.status())
332 log.debug('sa pool status: %s', pool.status())
333
333
334 return pyramid_app_with_cleanup
334 return pyramid_app_with_cleanup
335
335
336
336
337 def sanitize_settings_and_apply_defaults(settings):
337 def sanitize_settings_and_apply_defaults(settings):
338 """
338 """
339 Applies settings defaults and does all type conversion.
339 Applies settings defaults and does all type conversion.
340
340
341 We would move all settings parsing and preparation into this place, so that
341 We would move all settings parsing and preparation into this place, so that
342 we have only one place left which deals with this part. The remaining parts
342 we have only one place left which deals with this part. The remaining parts
343 of the application would start to rely fully on well prepared settings.
343 of the application would start to rely fully on well prepared settings.
344
344
345 This piece would later be split up per topic to avoid a big fat monster
345 This piece would later be split up per topic to avoid a big fat monster
346 function.
346 function.
347 """
347 """
348
348
349 settings.setdefault('rhodecode.edition', 'Community Edition')
349 settings.setdefault('rhodecode.edition', 'Community Edition')
350
350
351 if 'mako.default_filters' not in settings:
351 if 'mako.default_filters' not in settings:
352 # set custom default filters if we don't have it defined
352 # set custom default filters if we don't have it defined
353 settings['mako.imports'] = 'from rhodecode.lib.base import h_filter'
353 settings['mako.imports'] = 'from rhodecode.lib.base import h_filter'
354 settings['mako.default_filters'] = 'h_filter'
354 settings['mako.default_filters'] = 'h_filter'
355
355
356 if 'mako.directories' not in settings:
356 if 'mako.directories' not in settings:
357 mako_directories = settings.setdefault('mako.directories', [
357 mako_directories = settings.setdefault('mako.directories', [
358 # Base templates of the original application
358 # Base templates of the original application
359 'rhodecode:templates',
359 'rhodecode:templates',
360 ])
360 ])
361 log.debug(
361 log.debug(
362 "Using the following Mako template directories: %s",
362 "Using the following Mako template directories: %s",
363 mako_directories)
363 mako_directories)
364
364
365 # Default includes, possible to change as a user
365 # Default includes, possible to change as a user
366 pyramid_includes = settings.setdefault('pyramid.includes', [
366 pyramid_includes = settings.setdefault('pyramid.includes', [
367 'rhodecode.lib.middleware.request_wrapper',
367 'rhodecode.lib.middleware.request_wrapper',
368 ])
368 ])
369 log.debug(
369 log.debug(
370 "Using the following pyramid.includes: %s",
370 "Using the following pyramid.includes: %s",
371 pyramid_includes)
371 pyramid_includes)
372
372
373 # TODO: johbo: Re-think this, usually the call to config.include
373 # TODO: johbo: Re-think this, usually the call to config.include
374 # should allow to pass in a prefix.
374 # should allow to pass in a prefix.
375 settings.setdefault('rhodecode.api.url', '/_admin/api')
375 settings.setdefault('rhodecode.api.url', '/_admin/api')
376
376
377 # Sanitize generic settings.
377 # Sanitize generic settings.
378 _list_setting(settings, 'default_encoding', 'UTF-8')
378 _list_setting(settings, 'default_encoding', 'UTF-8')
379 _bool_setting(settings, 'is_test', 'false')
379 _bool_setting(settings, 'is_test', 'false')
380 _bool_setting(settings, 'gzip_responses', 'false')
380 _bool_setting(settings, 'gzip_responses', 'false')
381
381
382 # Call split out functions that sanitize settings for each topic.
382 # Call split out functions that sanitize settings for each topic.
383 _sanitize_appenlight_settings(settings)
383 _sanitize_appenlight_settings(settings)
384 _sanitize_vcs_settings(settings)
384 _sanitize_vcs_settings(settings)
385
385
386 # configure instance id
386 # configure instance id
387 config_utils.set_instance_id(settings)
387 config_utils.set_instance_id(settings)
388
388
389 return settings
389 return settings
390
390
391
391
392 def _sanitize_appenlight_settings(settings):
392 def _sanitize_appenlight_settings(settings):
393 _bool_setting(settings, 'appenlight', 'false')
393 _bool_setting(settings, 'appenlight', 'false')
394
394
395
395
396 def _sanitize_vcs_settings(settings):
396 def _sanitize_vcs_settings(settings):
397 """
397 """
398 Applies settings defaults and does type conversion for all VCS related
398 Applies settings defaults and does type conversion for all VCS related
399 settings.
399 settings.
400 """
400 """
401 _string_setting(settings, 'vcs.svn.compatible_version', '')
401 _string_setting(settings, 'vcs.svn.compatible_version', '')
402 _string_setting(settings, 'git_rev_filter', '--all')
402 _string_setting(settings, 'git_rev_filter', '--all')
403 _string_setting(settings, 'vcs.hooks.protocol', 'http')
403 _string_setting(settings, 'vcs.hooks.protocol', 'http')
404 _string_setting(settings, 'vcs.hooks.host', '127.0.0.1')
404 _string_setting(settings, 'vcs.scm_app_implementation', 'http')
405 _string_setting(settings, 'vcs.scm_app_implementation', 'http')
405 _string_setting(settings, 'vcs.server', '')
406 _string_setting(settings, 'vcs.server', '')
406 _string_setting(settings, 'vcs.server.log_level', 'debug')
407 _string_setting(settings, 'vcs.server.log_level', 'debug')
407 _string_setting(settings, 'vcs.server.protocol', 'http')
408 _string_setting(settings, 'vcs.server.protocol', 'http')
408 _bool_setting(settings, 'startup.import_repos', 'false')
409 _bool_setting(settings, 'startup.import_repos', 'false')
409 _bool_setting(settings, 'vcs.hooks.direct_calls', 'false')
410 _bool_setting(settings, 'vcs.hooks.direct_calls', 'false')
410 _bool_setting(settings, 'vcs.server.enable', 'true')
411 _bool_setting(settings, 'vcs.server.enable', 'true')
411 _bool_setting(settings, 'vcs.start_server', 'false')
412 _bool_setting(settings, 'vcs.start_server', 'false')
412 _list_setting(settings, 'vcs.backends', 'hg, git, svn')
413 _list_setting(settings, 'vcs.backends', 'hg, git, svn')
413 _int_setting(settings, 'vcs.connection_timeout', 3600)
414 _int_setting(settings, 'vcs.connection_timeout', 3600)
414
415
415 # Support legacy values of vcs.scm_app_implementation. Legacy
416 # Support legacy values of vcs.scm_app_implementation. Legacy
416 # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http'
417 # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http'
417 # which is now mapped to 'http'.
418 # which is now mapped to 'http'.
418 scm_app_impl = settings['vcs.scm_app_implementation']
419 scm_app_impl = settings['vcs.scm_app_implementation']
419 if scm_app_impl == 'rhodecode.lib.middleware.utils.scm_app_http':
420 if scm_app_impl == 'rhodecode.lib.middleware.utils.scm_app_http':
420 settings['vcs.scm_app_implementation'] = 'http'
421 settings['vcs.scm_app_implementation'] = 'http'
421
422
422
423
423 def _int_setting(settings, name, default):
424 def _int_setting(settings, name, default):
424 settings[name] = int(settings.get(name, default))
425 settings[name] = int(settings.get(name, default))
425
426
426
427
427 def _bool_setting(settings, name, default):
428 def _bool_setting(settings, name, default):
428 input_val = settings.get(name, default)
429 input_val = settings.get(name, default)
429 if isinstance(input_val, unicode):
430 if isinstance(input_val, unicode):
430 input_val = input_val.encode('utf8')
431 input_val = input_val.encode('utf8')
431 settings[name] = asbool(input_val)
432 settings[name] = asbool(input_val)
432
433
433
434
434 def _list_setting(settings, name, default):
435 def _list_setting(settings, name, default):
435 raw_value = settings.get(name, default)
436 raw_value = settings.get(name, default)
436
437
437 old_separator = ','
438 old_separator = ','
438 if old_separator in raw_value:
439 if old_separator in raw_value:
439 # If we get a comma separated list, pass it to our own function.
440 # If we get a comma separated list, pass it to our own function.
440 settings[name] = rhodecode_aslist(raw_value, sep=old_separator)
441 settings[name] = rhodecode_aslist(raw_value, sep=old_separator)
441 else:
442 else:
442 # Otherwise we assume it uses pyramids space/newline separation.
443 # Otherwise we assume it uses pyramids space/newline separation.
443 settings[name] = aslist(raw_value)
444 settings[name] = aslist(raw_value)
444
445
445
446
446 def _string_setting(settings, name, default, lower=True):
447 def _string_setting(settings, name, default, lower=True):
447 value = settings.get(name, default)
448 value = settings.get(name, default)
448 if lower:
449 if lower:
449 value = value.lower()
450 value = value.lower()
450 settings[name] = value
451 settings[name] = value
451
452
452
453
453 def _substitute_values(mapping, substitutions):
454 def _substitute_values(mapping, substitutions):
454 result = {
455 result = {
455 # Note: Cannot use regular replacements, since they would clash
456 # Note: Cannot use regular replacements, since they would clash
456 # with the implementation of ConfigParser. Using "format" instead.
457 # with the implementation of ConfigParser. Using "format" instead.
457 key: value.format(**substitutions)
458 key: value.format(**substitutions)
458 for key, value in mapping.items()
459 for key, value in mapping.items()
459 }
460 }
460 return result
461 return result
@@ -1,87 +1,89 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22 import shlex
22 import shlex
23 import platform
23 import platform
24
24
25 from rhodecode.model import init_model
25 from rhodecode.model import init_model
26
26
27
27
28
29 def configure_vcs(config):
28 def configure_vcs(config):
30 """
29 """
31 Patch VCS config with some RhodeCode specific stuff
30 Patch VCS config with some RhodeCode specific stuff
32 """
31 """
33 from rhodecode.lib.vcs import conf
32 from rhodecode.lib.vcs import conf
33 import rhodecode.lib.vcs.conf.settings
34
34 conf.settings.BACKENDS = {
35 conf.settings.BACKENDS = {
35 'hg': 'rhodecode.lib.vcs.backends.hg.MercurialRepository',
36 'hg': 'rhodecode.lib.vcs.backends.hg.MercurialRepository',
36 'git': 'rhodecode.lib.vcs.backends.git.GitRepository',
37 'git': 'rhodecode.lib.vcs.backends.git.GitRepository',
37 'svn': 'rhodecode.lib.vcs.backends.svn.SubversionRepository',
38 'svn': 'rhodecode.lib.vcs.backends.svn.SubversionRepository',
38 }
39 }
39
40
40 conf.settings.HOOKS_PROTOCOL = config['vcs.hooks.protocol']
41 conf.settings.HOOKS_PROTOCOL = config['vcs.hooks.protocol']
42 conf.settings.HOOKS_HOST = config['vcs.hooks.host']
41 conf.settings.HOOKS_DIRECT_CALLS = config['vcs.hooks.direct_calls']
43 conf.settings.HOOKS_DIRECT_CALLS = config['vcs.hooks.direct_calls']
42 conf.settings.GIT_REV_FILTER = shlex.split(config['git_rev_filter'])
44 conf.settings.GIT_REV_FILTER = shlex.split(config['git_rev_filter'])
43 conf.settings.DEFAULT_ENCODINGS = config['default_encoding']
45 conf.settings.DEFAULT_ENCODINGS = config['default_encoding']
44 conf.settings.ALIASES[:] = config['vcs.backends']
46 conf.settings.ALIASES[:] = config['vcs.backends']
45 conf.settings.SVN_COMPATIBLE_VERSION = config['vcs.svn.compatible_version']
47 conf.settings.SVN_COMPATIBLE_VERSION = config['vcs.svn.compatible_version']
46
48
47
49
48 def initialize_database(config):
50 def initialize_database(config):
49 from rhodecode.lib.utils2 import engine_from_config, get_encryption_key
51 from rhodecode.lib.utils2 import engine_from_config, get_encryption_key
50 engine = engine_from_config(config, 'sqlalchemy.db1.')
52 engine = engine_from_config(config, 'sqlalchemy.db1.')
51 init_model(engine, encryption_key=get_encryption_key(config))
53 init_model(engine, encryption_key=get_encryption_key(config))
52
54
53
55
54 def initialize_test_environment(settings, test_env=None):
56 def initialize_test_environment(settings, test_env=None):
55 if test_env is None:
57 if test_env is None:
56 test_env = not int(os.environ.get('RC_NO_TMP_PATH', 0))
58 test_env = not int(os.environ.get('RC_NO_TMP_PATH', 0))
57
59
58 from rhodecode.lib.utils import (
60 from rhodecode.lib.utils import (
59 create_test_directory, create_test_database, create_test_repositories,
61 create_test_directory, create_test_database, create_test_repositories,
60 create_test_index)
62 create_test_index)
61 from rhodecode.tests import TESTS_TMP_PATH
63 from rhodecode.tests import TESTS_TMP_PATH
62 from rhodecode.lib.vcs.backends.hg import largefiles_store
64 from rhodecode.lib.vcs.backends.hg import largefiles_store
63 from rhodecode.lib.vcs.backends.git import lfs_store
65 from rhodecode.lib.vcs.backends.git import lfs_store
64
66
65 # test repos
67 # test repos
66 if test_env:
68 if test_env:
67 create_test_directory(TESTS_TMP_PATH)
69 create_test_directory(TESTS_TMP_PATH)
68 # large object stores
70 # large object stores
69 create_test_directory(largefiles_store(TESTS_TMP_PATH))
71 create_test_directory(largefiles_store(TESTS_TMP_PATH))
70 create_test_directory(lfs_store(TESTS_TMP_PATH))
72 create_test_directory(lfs_store(TESTS_TMP_PATH))
71
73
72 create_test_database(TESTS_TMP_PATH, settings)
74 create_test_database(TESTS_TMP_PATH, settings)
73 create_test_repositories(TESTS_TMP_PATH, settings)
75 create_test_repositories(TESTS_TMP_PATH, settings)
74 create_test_index(TESTS_TMP_PATH, settings)
76 create_test_index(TESTS_TMP_PATH, settings)
75
77
76
78
77 def get_vcs_server_protocol(config):
79 def get_vcs_server_protocol(config):
78 return config['vcs.server.protocol']
80 return config['vcs.server.protocol']
79
81
80
82
81 def set_instance_id(config):
83 def set_instance_id(config):
82 """ Sets a dynamic generated config['instance_id'] if missing or '*' """
84 """ Sets a dynamic generated config['instance_id'] if missing or '*' """
83
85
84 config['instance_id'] = config.get('instance_id') or ''
86 config['instance_id'] = config.get('instance_id') or ''
85 if config['instance_id'] == '*' or not config['instance_id']:
87 if config['instance_id'] == '*' or not config['instance_id']:
86 _platform_id = platform.uname()[1] or 'instance'
88 _platform_id = platform.uname()[1] or 'instance'
87 config['instance_id'] = '%s-%s' % (_platform_id, os.getpid())
89 config['instance_id'] = '%s-%s' % (_platform_id, os.getpid())
@@ -1,313 +1,313 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22 import time
22 import time
23 import logging
23 import logging
24 import tempfile
24 import tempfile
25 import traceback
25 import traceback
26 import threading
26 import threading
27
27
28 from BaseHTTPServer import BaseHTTPRequestHandler
28 from BaseHTTPServer import BaseHTTPRequestHandler
29 from SocketServer import TCPServer
29 from SocketServer import TCPServer
30
30
31 import rhodecode
31 import rhodecode
32 from rhodecode.model import meta
32 from rhodecode.model import meta
33 from rhodecode.lib.base import bootstrap_request, bootstrap_config
33 from rhodecode.lib.base import bootstrap_request, bootstrap_config
34 from rhodecode.lib import hooks_base
34 from rhodecode.lib import hooks_base
35 from rhodecode.lib.utils2 import AttributeDict
35 from rhodecode.lib.utils2 import AttributeDict
36 from rhodecode.lib.ext_json import json
36 from rhodecode.lib.ext_json import json
37
37
38
38
39 log = logging.getLogger(__name__)
39 log = logging.getLogger(__name__)
40
40
41
41
42 class HooksHttpHandler(BaseHTTPRequestHandler):
42 class HooksHttpHandler(BaseHTTPRequestHandler):
43
43
44 def do_POST(self):
44 def do_POST(self):
45 method, extras = self._read_request()
45 method, extras = self._read_request()
46 txn_id = getattr(self.server, 'txn_id', None)
46 txn_id = getattr(self.server, 'txn_id', None)
47 if txn_id:
47 if txn_id:
48 from rhodecode.lib.caches import compute_key_from_params
48 from rhodecode.lib.caches import compute_key_from_params
49 log.debug('Computing TXN_ID based on `%s`:`%s`',
49 log.debug('Computing TXN_ID based on `%s`:`%s`',
50 extras['repository'], extras['txn_id'])
50 extras['repository'], extras['txn_id'])
51 computed_txn_id = compute_key_from_params(
51 computed_txn_id = compute_key_from_params(
52 extras['repository'], extras['txn_id'])
52 extras['repository'], extras['txn_id'])
53 if txn_id != computed_txn_id:
53 if txn_id != computed_txn_id:
54 raise Exception(
54 raise Exception(
55 'TXN ID fail: expected {} got {} instead'.format(
55 'TXN ID fail: expected {} got {} instead'.format(
56 txn_id, computed_txn_id))
56 txn_id, computed_txn_id))
57
57
58 try:
58 try:
59 result = self._call_hook(method, extras)
59 result = self._call_hook(method, extras)
60 except Exception as e:
60 except Exception as e:
61 exc_tb = traceback.format_exc()
61 exc_tb = traceback.format_exc()
62 result = {
62 result = {
63 'exception': e.__class__.__name__,
63 'exception': e.__class__.__name__,
64 'exception_traceback': exc_tb,
64 'exception_traceback': exc_tb,
65 'exception_args': e.args
65 'exception_args': e.args
66 }
66 }
67 self._write_response(result)
67 self._write_response(result)
68
68
69 def _read_request(self):
69 def _read_request(self):
70 length = int(self.headers['Content-Length'])
70 length = int(self.headers['Content-Length'])
71 body = self.rfile.read(length).decode('utf-8')
71 body = self.rfile.read(length).decode('utf-8')
72 data = json.loads(body)
72 data = json.loads(body)
73 return data['method'], data['extras']
73 return data['method'], data['extras']
74
74
75 def _write_response(self, result):
75 def _write_response(self, result):
76 self.send_response(200)
76 self.send_response(200)
77 self.send_header("Content-type", "text/json")
77 self.send_header("Content-type", "text/json")
78 self.end_headers()
78 self.end_headers()
79 self.wfile.write(json.dumps(result))
79 self.wfile.write(json.dumps(result))
80
80
81 def _call_hook(self, method, extras):
81 def _call_hook(self, method, extras):
82 hooks = Hooks()
82 hooks = Hooks()
83 try:
83 try:
84 result = getattr(hooks, method)(extras)
84 result = getattr(hooks, method)(extras)
85 finally:
85 finally:
86 meta.Session.remove()
86 meta.Session.remove()
87 return result
87 return result
88
88
89 def log_message(self, format, *args):
89 def log_message(self, format, *args):
90 """
90 """
91 This is an overridden method of BaseHTTPRequestHandler which logs using
91 This is an overridden method of BaseHTTPRequestHandler which logs using
92 logging library instead of writing directly to stderr.
92 logging library instead of writing directly to stderr.
93 """
93 """
94
94
95 message = format % args
95 message = format % args
96
96
97 log.debug(
97 log.debug(
98 "%s - - [%s] %s", self.client_address[0],
98 "%s - - [%s] %s", self.client_address[0],
99 self.log_date_time_string(), message)
99 self.log_date_time_string(), message)
100
100
101
101
102 class DummyHooksCallbackDaemon(object):
102 class DummyHooksCallbackDaemon(object):
103 hooks_uri = ''
103 hooks_uri = ''
104
104
105 def __init__(self):
105 def __init__(self):
106 self.hooks_module = Hooks.__module__
106 self.hooks_module = Hooks.__module__
107
107
108 def __enter__(self):
108 def __enter__(self):
109 log.debug('Running dummy hooks callback daemon')
109 log.debug('Running dummy hooks callback daemon')
110 return self
110 return self
111
111
112 def __exit__(self, exc_type, exc_val, exc_tb):
112 def __exit__(self, exc_type, exc_val, exc_tb):
113 log.debug('Exiting dummy hooks callback daemon')
113 log.debug('Exiting dummy hooks callback daemon')
114
114
115
115
116 class ThreadedHookCallbackDaemon(object):
116 class ThreadedHookCallbackDaemon(object):
117
117
118 _callback_thread = None
118 _callback_thread = None
119 _daemon = None
119 _daemon = None
120 _done = False
120 _done = False
121
121
122 def __init__(self, txn_id=None, port=None):
122 def __init__(self, txn_id=None, host=None, port=None):
123 self._prepare(txn_id=txn_id, port=port)
123 self._prepare(txn_id=txn_id, host=None, port=port)
124
124
125 def __enter__(self):
125 def __enter__(self):
126 self._run()
126 self._run()
127 return self
127 return self
128
128
129 def __exit__(self, exc_type, exc_val, exc_tb):
129 def __exit__(self, exc_type, exc_val, exc_tb):
130 log.debug('Callback daemon exiting now...')
130 log.debug('Callback daemon exiting now...')
131 self._stop()
131 self._stop()
132
132
133 def _prepare(self, txn_id=None, port=None):
133 def _prepare(self, txn_id=None, host=None, port=None):
134 raise NotImplementedError()
134 raise NotImplementedError()
135
135
136 def _run(self):
136 def _run(self):
137 raise NotImplementedError()
137 raise NotImplementedError()
138
138
139 def _stop(self):
139 def _stop(self):
140 raise NotImplementedError()
140 raise NotImplementedError()
141
141
142
142
143 class HttpHooksCallbackDaemon(ThreadedHookCallbackDaemon):
143 class HttpHooksCallbackDaemon(ThreadedHookCallbackDaemon):
144 """
144 """
145 Context manager which will run a callback daemon in a background thread.
145 Context manager which will run a callback daemon in a background thread.
146 """
146 """
147
147
148 hooks_uri = None
148 hooks_uri = None
149
149
150 IP_ADDRESS = '127.0.0.1'
151
152 # From Python docs: Polling reduces our responsiveness to a shutdown
150 # From Python docs: Polling reduces our responsiveness to a shutdown
153 # request and wastes cpu at all other times.
151 # request and wastes cpu at all other times.
154 POLL_INTERVAL = 0.01
152 POLL_INTERVAL = 0.01
155
153
156 def _prepare(self, txn_id=None, port=None):
154 def _prepare(self, txn_id=None, host=None, port=None):
155 host = host or '127.0.0.1'
157 self._done = False
156 self._done = False
158 self._daemon = TCPServer((self.IP_ADDRESS, port or 0), HooksHttpHandler)
157 self._daemon = TCPServer((host, port or 0), HooksHttpHandler)
159 _, port = self._daemon.server_address
158 _, port = self._daemon.server_address
160 self.hooks_uri = '{}:{}'.format(self.IP_ADDRESS, port)
159 self.hooks_uri = '{}:{}'.format(host, port)
161 self.txn_id = txn_id
160 self.txn_id = txn_id
162 # inject transaction_id for later verification
161 # inject transaction_id for later verification
163 self._daemon.txn_id = self.txn_id
162 self._daemon.txn_id = self.txn_id
164
163
165 log.debug(
164 log.debug(
166 "Preparing HTTP callback daemon at `%s` and registering hook object",
165 "Preparing HTTP callback daemon at `%s` and registering hook object",
167 self.hooks_uri)
166 self.hooks_uri)
168
167
169 def _run(self):
168 def _run(self):
170 log.debug("Running event loop of callback daemon in background thread")
169 log.debug("Running event loop of callback daemon in background thread")
171 callback_thread = threading.Thread(
170 callback_thread = threading.Thread(
172 target=self._daemon.serve_forever,
171 target=self._daemon.serve_forever,
173 kwargs={'poll_interval': self.POLL_INTERVAL})
172 kwargs={'poll_interval': self.POLL_INTERVAL})
174 callback_thread.daemon = True
173 callback_thread.daemon = True
175 callback_thread.start()
174 callback_thread.start()
176 self._callback_thread = callback_thread
175 self._callback_thread = callback_thread
177
176
178 def _stop(self):
177 def _stop(self):
179 log.debug("Waiting for background thread to finish.")
178 log.debug("Waiting for background thread to finish.")
180 self._daemon.shutdown()
179 self._daemon.shutdown()
181 self._callback_thread.join()
180 self._callback_thread.join()
182 self._daemon = None
181 self._daemon = None
183 self._callback_thread = None
182 self._callback_thread = None
184 if self.txn_id:
183 if self.txn_id:
185 txn_id_file = get_txn_id_data_path(self.txn_id)
184 txn_id_file = get_txn_id_data_path(self.txn_id)
186 log.debug('Cleaning up TXN ID %s', txn_id_file)
185 log.debug('Cleaning up TXN ID %s', txn_id_file)
187 if os.path.isfile(txn_id_file):
186 if os.path.isfile(txn_id_file):
188 os.remove(txn_id_file)
187 os.remove(txn_id_file)
189
188
190 log.debug("Background thread done.")
189 log.debug("Background thread done.")
191
190
192
191
193 def get_txn_id_data_path(txn_id):
192 def get_txn_id_data_path(txn_id):
194 root = tempfile.gettempdir()
193 root = tempfile.gettempdir()
195 return os.path.join(root, 'rc_txn_id_{}'.format(txn_id))
194 return os.path.join(root, 'rc_txn_id_{}'.format(txn_id))
196
195
197
196
198 def store_txn_id_data(txn_id, data_dict):
197 def store_txn_id_data(txn_id, data_dict):
199 if not txn_id:
198 if not txn_id:
200 log.warning('Cannot store txn_id because it is empty')
199 log.warning('Cannot store txn_id because it is empty')
201 return
200 return
202
201
203 path = get_txn_id_data_path(txn_id)
202 path = get_txn_id_data_path(txn_id)
204 try:
203 try:
205 with open(path, 'wb') as f:
204 with open(path, 'wb') as f:
206 f.write(json.dumps(data_dict))
205 f.write(json.dumps(data_dict))
207 except Exception:
206 except Exception:
208 log.exception('Failed to write txn_id metadata')
207 log.exception('Failed to write txn_id metadata')
209
208
210
209
211 def get_txn_id_from_store(txn_id):
210 def get_txn_id_from_store(txn_id):
212 """
211 """
213 Reads txn_id from store and if present returns the data for callback manager
212 Reads txn_id from store and if present returns the data for callback manager
214 """
213 """
215 path = get_txn_id_data_path(txn_id)
214 path = get_txn_id_data_path(txn_id)
216 try:
215 try:
217 with open(path, 'rb') as f:
216 with open(path, 'rb') as f:
218 return json.loads(f.read())
217 return json.loads(f.read())
219 except Exception:
218 except Exception:
220 return {}
219 return {}
221
220
222
221
223 def prepare_callback_daemon(extras, protocol, use_direct_calls, txn_id=None):
222 def prepare_callback_daemon(extras, protocol, host, use_direct_calls, txn_id=None):
224 txn_details = get_txn_id_from_store(txn_id)
223 txn_details = get_txn_id_from_store(txn_id)
225 port = txn_details.get('port', 0)
224 port = txn_details.get('port', 0)
226 if use_direct_calls:
225 if use_direct_calls:
227 callback_daemon = DummyHooksCallbackDaemon()
226 callback_daemon = DummyHooksCallbackDaemon()
228 extras['hooks_module'] = callback_daemon.hooks_module
227 extras['hooks_module'] = callback_daemon.hooks_module
229 else:
228 else:
230 if protocol == 'http':
229 if protocol == 'http':
231 callback_daemon = HttpHooksCallbackDaemon(txn_id=txn_id, port=port)
230 callback_daemon = HttpHooksCallbackDaemon(
231 txn_id=txn_id, host=host, port=port)
232 else:
232 else:
233 log.error('Unsupported callback daemon protocol "%s"', protocol)
233 log.error('Unsupported callback daemon protocol "%s"', protocol)
234 raise Exception('Unsupported callback daemon protocol.')
234 raise Exception('Unsupported callback daemon protocol.')
235
235
236 extras['hooks_uri'] = callback_daemon.hooks_uri
236 extras['hooks_uri'] = callback_daemon.hooks_uri
237 extras['hooks_protocol'] = protocol
237 extras['hooks_protocol'] = protocol
238 extras['time'] = time.time()
238 extras['time'] = time.time()
239
239
240 # register txn_id
240 # register txn_id
241 extras['txn_id'] = txn_id
241 extras['txn_id'] = txn_id
242
242
243 log.debug('Prepared a callback daemon: %s at url `%s`',
243 log.debug('Prepared a callback daemon: %s at url `%s`',
244 callback_daemon.__class__.__name__, callback_daemon.hooks_uri)
244 callback_daemon.__class__.__name__, callback_daemon.hooks_uri)
245 return callback_daemon, extras
245 return callback_daemon, extras
246
246
247
247
248 class Hooks(object):
248 class Hooks(object):
249 """
249 """
250 Exposes the hooks for remote call backs
250 Exposes the hooks for remote call backs
251 """
251 """
252
252
253 def repo_size(self, extras):
253 def repo_size(self, extras):
254 log.debug("Called repo_size of %s object", self)
254 log.debug("Called repo_size of %s object", self)
255 return self._call_hook(hooks_base.repo_size, extras)
255 return self._call_hook(hooks_base.repo_size, extras)
256
256
257 def pre_pull(self, extras):
257 def pre_pull(self, extras):
258 log.debug("Called pre_pull of %s object", self)
258 log.debug("Called pre_pull of %s object", self)
259 return self._call_hook(hooks_base.pre_pull, extras)
259 return self._call_hook(hooks_base.pre_pull, extras)
260
260
261 def post_pull(self, extras):
261 def post_pull(self, extras):
262 log.debug("Called post_pull of %s object", self)
262 log.debug("Called post_pull of %s object", self)
263 return self._call_hook(hooks_base.post_pull, extras)
263 return self._call_hook(hooks_base.post_pull, extras)
264
264
265 def pre_push(self, extras):
265 def pre_push(self, extras):
266 log.debug("Called pre_push of %s object", self)
266 log.debug("Called pre_push of %s object", self)
267 return self._call_hook(hooks_base.pre_push, extras)
267 return self._call_hook(hooks_base.pre_push, extras)
268
268
269 def post_push(self, extras):
269 def post_push(self, extras):
270 log.debug("Called post_push of %s object", self)
270 log.debug("Called post_push of %s object", self)
271 return self._call_hook(hooks_base.post_push, extras)
271 return self._call_hook(hooks_base.post_push, extras)
272
272
273 def _call_hook(self, hook, extras):
273 def _call_hook(self, hook, extras):
274 extras = AttributeDict(extras)
274 extras = AttributeDict(extras)
275 server_url = extras['server_url']
275 server_url = extras['server_url']
276 request = bootstrap_request(application_url=server_url)
276 request = bootstrap_request(application_url=server_url)
277
277
278 bootstrap_config(request) # inject routes and other interfaces
278 bootstrap_config(request) # inject routes and other interfaces
279
279
280 # inject the user for usage in hooks
280 # inject the user for usage in hooks
281 request.user = AttributeDict({'username': extras.username,
281 request.user = AttributeDict({'username': extras.username,
282 'ip_addr': extras.ip,
282 'ip_addr': extras.ip,
283 'user_id': extras.user_id})
283 'user_id': extras.user_id})
284
284
285 extras.request = request
285 extras.request = request
286
286
287 try:
287 try:
288 result = hook(extras)
288 result = hook(extras)
289 except Exception as error:
289 except Exception as error:
290 exc_tb = traceback.format_exc()
290 exc_tb = traceback.format_exc()
291 log.exception('Exception when handling hook %s', hook)
291 log.exception('Exception when handling hook %s', hook)
292 error_args = error.args
292 error_args = error.args
293 return {
293 return {
294 'status': 128,
294 'status': 128,
295 'output': '',
295 'output': '',
296 'exception': type(error).__name__,
296 'exception': type(error).__name__,
297 'exception_traceback': exc_tb,
297 'exception_traceback': exc_tb,
298 'exception_args': error_args,
298 'exception_args': error_args,
299 }
299 }
300 finally:
300 finally:
301 meta.Session.remove()
301 meta.Session.remove()
302
302
303 log.debug('Got hook call response %s', result)
303 log.debug('Got hook call response %s', result)
304 return {
304 return {
305 'status': result.status,
305 'status': result.status,
306 'output': result.output,
306 'output': result.output,
307 }
307 }
308
308
309 def __enter__(self):
309 def __enter__(self):
310 return self
310 return self
311
311
312 def __exit__(self, exc_type, exc_val, exc_tb):
312 def __exit__(self, exc_type, exc_val, exc_tb):
313 pass
313 pass
@@ -1,674 +1,674 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2018 RhodeCode GmbH
3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 SimpleVCS middleware for handling protocol request (push/clone etc.)
22 SimpleVCS middleware for handling protocol request (push/clone etc.)
23 It's implemented with basic auth function
23 It's implemented with basic auth function
24 """
24 """
25
25
26 import os
26 import os
27 import re
27 import re
28 import logging
28 import logging
29 import importlib
29 import importlib
30 from functools import wraps
30 from functools import wraps
31 from StringIO import StringIO
31 from StringIO import StringIO
32 from lxml import etree
32 from lxml import etree
33
33
34 import time
34 import time
35 from paste.httpheaders import REMOTE_USER, AUTH_TYPE
35 from paste.httpheaders import REMOTE_USER, AUTH_TYPE
36
36
37 from pyramid.httpexceptions import (
37 from pyramid.httpexceptions import (
38 HTTPNotFound, HTTPForbidden, HTTPNotAcceptable, HTTPInternalServerError)
38 HTTPNotFound, HTTPForbidden, HTTPNotAcceptable, HTTPInternalServerError)
39 from zope.cachedescriptors.property import Lazy as LazyProperty
39 from zope.cachedescriptors.property import Lazy as LazyProperty
40
40
41 import rhodecode
41 import rhodecode
42 from rhodecode.authentication.base import (
42 from rhodecode.authentication.base import (
43 authenticate, get_perms_cache_manager, VCS_TYPE, loadplugin)
43 authenticate, get_perms_cache_manager, VCS_TYPE, loadplugin)
44 from rhodecode.lib import caches
44 from rhodecode.lib import caches
45 from rhodecode.lib.auth import AuthUser, HasPermissionAnyMiddleware
45 from rhodecode.lib.auth import AuthUser, HasPermissionAnyMiddleware
46 from rhodecode.lib.base import (
46 from rhodecode.lib.base import (
47 BasicAuth, get_ip_addr, get_user_agent, vcs_operation_context)
47 BasicAuth, get_ip_addr, get_user_agent, vcs_operation_context)
48 from rhodecode.lib.exceptions import (UserCreationError, NotAllowedToCreateUserError)
48 from rhodecode.lib.exceptions import (UserCreationError, NotAllowedToCreateUserError)
49 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
49 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
50 from rhodecode.lib.middleware import appenlight
50 from rhodecode.lib.middleware import appenlight
51 from rhodecode.lib.middleware.utils import scm_app_http
51 from rhodecode.lib.middleware.utils import scm_app_http
52 from rhodecode.lib.utils import is_valid_repo, SLUG_RE
52 from rhodecode.lib.utils import is_valid_repo, SLUG_RE
53 from rhodecode.lib.utils2 import safe_str, fix_PATH, str2bool, safe_unicode
53 from rhodecode.lib.utils2 import safe_str, fix_PATH, str2bool, safe_unicode
54 from rhodecode.lib.vcs.conf import settings as vcs_settings
54 from rhodecode.lib.vcs.conf import settings as vcs_settings
55 from rhodecode.lib.vcs.backends import base
55 from rhodecode.lib.vcs.backends import base
56
56
57 from rhodecode.model import meta
57 from rhodecode.model import meta
58 from rhodecode.model.db import User, Repository, PullRequest
58 from rhodecode.model.db import User, Repository, PullRequest
59 from rhodecode.model.scm import ScmModel
59 from rhodecode.model.scm import ScmModel
60 from rhodecode.model.pull_request import PullRequestModel
60 from rhodecode.model.pull_request import PullRequestModel
61 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
61 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
62
62
63 log = logging.getLogger(__name__)
63 log = logging.getLogger(__name__)
64
64
65
65
66 def extract_svn_txn_id(acl_repo_name, data):
66 def extract_svn_txn_id(acl_repo_name, data):
67 """
67 """
68 Helper method for extraction of svn txn_id from submited XML data during
68 Helper method for extraction of svn txn_id from submited XML data during
69 POST operations
69 POST operations
70 """
70 """
71 try:
71 try:
72 root = etree.fromstring(data)
72 root = etree.fromstring(data)
73 pat = re.compile(r'/txn/(?P<txn_id>.*)')
73 pat = re.compile(r'/txn/(?P<txn_id>.*)')
74 for el in root:
74 for el in root:
75 if el.tag == '{DAV:}source':
75 if el.tag == '{DAV:}source':
76 for sub_el in el:
76 for sub_el in el:
77 if sub_el.tag == '{DAV:}href':
77 if sub_el.tag == '{DAV:}href':
78 match = pat.search(sub_el.text)
78 match = pat.search(sub_el.text)
79 if match:
79 if match:
80 svn_tx_id = match.groupdict()['txn_id']
80 svn_tx_id = match.groupdict()['txn_id']
81 txn_id = caches.compute_key_from_params(
81 txn_id = caches.compute_key_from_params(
82 acl_repo_name, svn_tx_id)
82 acl_repo_name, svn_tx_id)
83 return txn_id
83 return txn_id
84 except Exception:
84 except Exception:
85 log.exception('Failed to extract txn_id')
85 log.exception('Failed to extract txn_id')
86
86
87
87
88 def initialize_generator(factory):
88 def initialize_generator(factory):
89 """
89 """
90 Initializes the returned generator by draining its first element.
90 Initializes the returned generator by draining its first element.
91
91
92 This can be used to give a generator an initializer, which is the code
92 This can be used to give a generator an initializer, which is the code
93 up to the first yield statement. This decorator enforces that the first
93 up to the first yield statement. This decorator enforces that the first
94 produced element has the value ``"__init__"`` to make its special
94 produced element has the value ``"__init__"`` to make its special
95 purpose very explicit in the using code.
95 purpose very explicit in the using code.
96 """
96 """
97
97
98 @wraps(factory)
98 @wraps(factory)
99 def wrapper(*args, **kwargs):
99 def wrapper(*args, **kwargs):
100 gen = factory(*args, **kwargs)
100 gen = factory(*args, **kwargs)
101 try:
101 try:
102 init = gen.next()
102 init = gen.next()
103 except StopIteration:
103 except StopIteration:
104 raise ValueError('Generator must yield at least one element.')
104 raise ValueError('Generator must yield at least one element.')
105 if init != "__init__":
105 if init != "__init__":
106 raise ValueError('First yielded element must be "__init__".')
106 raise ValueError('First yielded element must be "__init__".')
107 return gen
107 return gen
108 return wrapper
108 return wrapper
109
109
110
110
111 class SimpleVCS(object):
111 class SimpleVCS(object):
112 """Common functionality for SCM HTTP handlers."""
112 """Common functionality for SCM HTTP handlers."""
113
113
114 SCM = 'unknown'
114 SCM = 'unknown'
115
115
116 acl_repo_name = None
116 acl_repo_name = None
117 url_repo_name = None
117 url_repo_name = None
118 vcs_repo_name = None
118 vcs_repo_name = None
119 rc_extras = {}
119 rc_extras = {}
120
120
121 # We have to handle requests to shadow repositories different than requests
121 # We have to handle requests to shadow repositories different than requests
122 # to normal repositories. Therefore we have to distinguish them. To do this
122 # to normal repositories. Therefore we have to distinguish them. To do this
123 # we use this regex which will match only on URLs pointing to shadow
123 # we use this regex which will match only on URLs pointing to shadow
124 # repositories.
124 # repositories.
125 shadow_repo_re = re.compile(
125 shadow_repo_re = re.compile(
126 '(?P<groups>(?:{slug_pat}/)*)' # repo groups
126 '(?P<groups>(?:{slug_pat}/)*)' # repo groups
127 '(?P<target>{slug_pat})/' # target repo
127 '(?P<target>{slug_pat})/' # target repo
128 'pull-request/(?P<pr_id>\d+)/' # pull request
128 'pull-request/(?P<pr_id>\d+)/' # pull request
129 'repository$' # shadow repo
129 'repository$' # shadow repo
130 .format(slug_pat=SLUG_RE.pattern))
130 .format(slug_pat=SLUG_RE.pattern))
131
131
132 def __init__(self, config, registry):
132 def __init__(self, config, registry):
133 self.registry = registry
133 self.registry = registry
134 self.config = config
134 self.config = config
135 # re-populated by specialized middleware
135 # re-populated by specialized middleware
136 self.repo_vcs_config = base.Config()
136 self.repo_vcs_config = base.Config()
137 self.rhodecode_settings = SettingsModel().get_all_settings(cache=True)
137 self.rhodecode_settings = SettingsModel().get_all_settings(cache=True)
138
138
139 registry.rhodecode_settings = self.rhodecode_settings
139 registry.rhodecode_settings = self.rhodecode_settings
140 # authenticate this VCS request using authfunc
140 # authenticate this VCS request using authfunc
141 auth_ret_code_detection = \
141 auth_ret_code_detection = \
142 str2bool(self.config.get('auth_ret_code_detection', False))
142 str2bool(self.config.get('auth_ret_code_detection', False))
143 self.authenticate = BasicAuth(
143 self.authenticate = BasicAuth(
144 '', authenticate, registry, config.get('auth_ret_code'),
144 '', authenticate, registry, config.get('auth_ret_code'),
145 auth_ret_code_detection)
145 auth_ret_code_detection)
146 self.ip_addr = '0.0.0.0'
146 self.ip_addr = '0.0.0.0'
147
147
148 @LazyProperty
148 @LazyProperty
149 def global_vcs_config(self):
149 def global_vcs_config(self):
150 try:
150 try:
151 return VcsSettingsModel().get_ui_settings_as_config_obj()
151 return VcsSettingsModel().get_ui_settings_as_config_obj()
152 except Exception:
152 except Exception:
153 return base.Config()
153 return base.Config()
154
154
155 @property
155 @property
156 def base_path(self):
156 def base_path(self):
157 settings_path = self.repo_vcs_config.get(
157 settings_path = self.repo_vcs_config.get(
158 *VcsSettingsModel.PATH_SETTING)
158 *VcsSettingsModel.PATH_SETTING)
159
159
160 if not settings_path:
160 if not settings_path:
161 settings_path = self.global_vcs_config.get(
161 settings_path = self.global_vcs_config.get(
162 *VcsSettingsModel.PATH_SETTING)
162 *VcsSettingsModel.PATH_SETTING)
163
163
164 if not settings_path:
164 if not settings_path:
165 # try, maybe we passed in explicitly as config option
165 # try, maybe we passed in explicitly as config option
166 settings_path = self.config.get('base_path')
166 settings_path = self.config.get('base_path')
167
167
168 if not settings_path:
168 if not settings_path:
169 raise ValueError('FATAL: base_path is empty')
169 raise ValueError('FATAL: base_path is empty')
170 return settings_path
170 return settings_path
171
171
172 def set_repo_names(self, environ):
172 def set_repo_names(self, environ):
173 """
173 """
174 This will populate the attributes acl_repo_name, url_repo_name,
174 This will populate the attributes acl_repo_name, url_repo_name,
175 vcs_repo_name and is_shadow_repo. In case of requests to normal (non
175 vcs_repo_name and is_shadow_repo. In case of requests to normal (non
176 shadow) repositories all names are equal. In case of requests to a
176 shadow) repositories all names are equal. In case of requests to a
177 shadow repository the acl-name points to the target repo of the pull
177 shadow repository the acl-name points to the target repo of the pull
178 request and the vcs-name points to the shadow repo file system path.
178 request and the vcs-name points to the shadow repo file system path.
179 The url-name is always the URL used by the vcs client program.
179 The url-name is always the URL used by the vcs client program.
180
180
181 Example in case of a shadow repo:
181 Example in case of a shadow repo:
182 acl_repo_name = RepoGroup/MyRepo
182 acl_repo_name = RepoGroup/MyRepo
183 url_repo_name = RepoGroup/MyRepo/pull-request/3/repository
183 url_repo_name = RepoGroup/MyRepo/pull-request/3/repository
184 vcs_repo_name = /repo/base/path/RepoGroup/.__shadow_MyRepo_pr-3'
184 vcs_repo_name = /repo/base/path/RepoGroup/.__shadow_MyRepo_pr-3'
185 """
185 """
186 # First we set the repo name from URL for all attributes. This is the
186 # First we set the repo name from URL for all attributes. This is the
187 # default if handling normal (non shadow) repo requests.
187 # default if handling normal (non shadow) repo requests.
188 self.url_repo_name = self._get_repository_name(environ)
188 self.url_repo_name = self._get_repository_name(environ)
189 self.acl_repo_name = self.vcs_repo_name = self.url_repo_name
189 self.acl_repo_name = self.vcs_repo_name = self.url_repo_name
190 self.is_shadow_repo = False
190 self.is_shadow_repo = False
191
191
192 # Check if this is a request to a shadow repository.
192 # Check if this is a request to a shadow repository.
193 match = self.shadow_repo_re.match(self.url_repo_name)
193 match = self.shadow_repo_re.match(self.url_repo_name)
194 if match:
194 if match:
195 match_dict = match.groupdict()
195 match_dict = match.groupdict()
196
196
197 # Build acl repo name from regex match.
197 # Build acl repo name from regex match.
198 acl_repo_name = safe_unicode('{groups}{target}'.format(
198 acl_repo_name = safe_unicode('{groups}{target}'.format(
199 groups=match_dict['groups'] or '',
199 groups=match_dict['groups'] or '',
200 target=match_dict['target']))
200 target=match_dict['target']))
201
201
202 # Retrieve pull request instance by ID from regex match.
202 # Retrieve pull request instance by ID from regex match.
203 pull_request = PullRequest.get(match_dict['pr_id'])
203 pull_request = PullRequest.get(match_dict['pr_id'])
204
204
205 # Only proceed if we got a pull request and if acl repo name from
205 # Only proceed if we got a pull request and if acl repo name from
206 # URL equals the target repo name of the pull request.
206 # URL equals the target repo name of the pull request.
207 if pull_request and \
207 if pull_request and \
208 (acl_repo_name == pull_request.target_repo.repo_name):
208 (acl_repo_name == pull_request.target_repo.repo_name):
209 repo_id = pull_request.target_repo.repo_id
209 repo_id = pull_request.target_repo.repo_id
210 # Get file system path to shadow repository.
210 # Get file system path to shadow repository.
211 workspace_id = PullRequestModel()._workspace_id(pull_request)
211 workspace_id = PullRequestModel()._workspace_id(pull_request)
212 target_vcs = pull_request.target_repo.scm_instance()
212 target_vcs = pull_request.target_repo.scm_instance()
213 vcs_repo_name = target_vcs._get_shadow_repository_path(
213 vcs_repo_name = target_vcs._get_shadow_repository_path(
214 repo_id, workspace_id)
214 repo_id, workspace_id)
215
215
216 # Store names for later usage.
216 # Store names for later usage.
217 self.vcs_repo_name = vcs_repo_name
217 self.vcs_repo_name = vcs_repo_name
218 self.acl_repo_name = acl_repo_name
218 self.acl_repo_name = acl_repo_name
219 self.is_shadow_repo = True
219 self.is_shadow_repo = True
220
220
221 log.debug('Setting all VCS repository names: %s', {
221 log.debug('Setting all VCS repository names: %s', {
222 'acl_repo_name': self.acl_repo_name,
222 'acl_repo_name': self.acl_repo_name,
223 'url_repo_name': self.url_repo_name,
223 'url_repo_name': self.url_repo_name,
224 'vcs_repo_name': self.vcs_repo_name,
224 'vcs_repo_name': self.vcs_repo_name,
225 })
225 })
226
226
227 @property
227 @property
228 def scm_app(self):
228 def scm_app(self):
229 custom_implementation = self.config['vcs.scm_app_implementation']
229 custom_implementation = self.config['vcs.scm_app_implementation']
230 if custom_implementation == 'http':
230 if custom_implementation == 'http':
231 log.info('Using HTTP implementation of scm app.')
231 log.info('Using HTTP implementation of scm app.')
232 scm_app_impl = scm_app_http
232 scm_app_impl = scm_app_http
233 else:
233 else:
234 log.info('Using custom implementation of scm_app: "{}"'.format(
234 log.info('Using custom implementation of scm_app: "{}"'.format(
235 custom_implementation))
235 custom_implementation))
236 scm_app_impl = importlib.import_module(custom_implementation)
236 scm_app_impl = importlib.import_module(custom_implementation)
237 return scm_app_impl
237 return scm_app_impl
238
238
239 def _get_by_id(self, repo_name):
239 def _get_by_id(self, repo_name):
240 """
240 """
241 Gets a special pattern _<ID> from clone url and tries to replace it
241 Gets a special pattern _<ID> from clone url and tries to replace it
242 with a repository_name for support of _<ID> non changeable urls
242 with a repository_name for support of _<ID> non changeable urls
243 """
243 """
244
244
245 data = repo_name.split('/')
245 data = repo_name.split('/')
246 if len(data) >= 2:
246 if len(data) >= 2:
247 from rhodecode.model.repo import RepoModel
247 from rhodecode.model.repo import RepoModel
248 by_id_match = RepoModel().get_repo_by_id(repo_name)
248 by_id_match = RepoModel().get_repo_by_id(repo_name)
249 if by_id_match:
249 if by_id_match:
250 data[1] = by_id_match.repo_name
250 data[1] = by_id_match.repo_name
251
251
252 return safe_str('/'.join(data))
252 return safe_str('/'.join(data))
253
253
254 def _invalidate_cache(self, repo_name):
254 def _invalidate_cache(self, repo_name):
255 """
255 """
256 Set's cache for this repository for invalidation on next access
256 Set's cache for this repository for invalidation on next access
257
257
258 :param repo_name: full repo name, also a cache key
258 :param repo_name: full repo name, also a cache key
259 """
259 """
260 ScmModel().mark_for_invalidation(repo_name)
260 ScmModel().mark_for_invalidation(repo_name)
261
261
262 def is_valid_and_existing_repo(self, repo_name, base_path, scm_type):
262 def is_valid_and_existing_repo(self, repo_name, base_path, scm_type):
263 db_repo = Repository.get_by_repo_name(repo_name)
263 db_repo = Repository.get_by_repo_name(repo_name)
264 if not db_repo:
264 if not db_repo:
265 log.debug('Repository `%s` not found inside the database.',
265 log.debug('Repository `%s` not found inside the database.',
266 repo_name)
266 repo_name)
267 return False
267 return False
268
268
269 if db_repo.repo_type != scm_type:
269 if db_repo.repo_type != scm_type:
270 log.warning(
270 log.warning(
271 'Repository `%s` have incorrect scm_type, expected %s got %s',
271 'Repository `%s` have incorrect scm_type, expected %s got %s',
272 repo_name, db_repo.repo_type, scm_type)
272 repo_name, db_repo.repo_type, scm_type)
273 return False
273 return False
274
274
275 config = db_repo._config
275 config = db_repo._config
276 config.set('extensions', 'largefiles', '')
276 config.set('extensions', 'largefiles', '')
277 return is_valid_repo(
277 return is_valid_repo(
278 repo_name, base_path,
278 repo_name, base_path,
279 explicit_scm=scm_type, expect_scm=scm_type, config=config)
279 explicit_scm=scm_type, expect_scm=scm_type, config=config)
280
280
281 def valid_and_active_user(self, user):
281 def valid_and_active_user(self, user):
282 """
282 """
283 Checks if that user is not empty, and if it's actually object it checks
283 Checks if that user is not empty, and if it's actually object it checks
284 if he's active.
284 if he's active.
285
285
286 :param user: user object or None
286 :param user: user object or None
287 :return: boolean
287 :return: boolean
288 """
288 """
289 if user is None:
289 if user is None:
290 return False
290 return False
291
291
292 elif user.active:
292 elif user.active:
293 return True
293 return True
294
294
295 return False
295 return False
296
296
297 @property
297 @property
298 def is_shadow_repo_dir(self):
298 def is_shadow_repo_dir(self):
299 return os.path.isdir(self.vcs_repo_name)
299 return os.path.isdir(self.vcs_repo_name)
300
300
301 def _check_permission(self, action, user, repo_name, ip_addr=None,
301 def _check_permission(self, action, user, repo_name, ip_addr=None,
302 plugin_id='', plugin_cache_active=False, cache_ttl=0):
302 plugin_id='', plugin_cache_active=False, cache_ttl=0):
303 """
303 """
304 Checks permissions using action (push/pull) user and repository
304 Checks permissions using action (push/pull) user and repository
305 name. If plugin_cache and ttl is set it will use the plugin which
305 name. If plugin_cache and ttl is set it will use the plugin which
306 authenticated the user to store the cached permissions result for N
306 authenticated the user to store the cached permissions result for N
307 amount of seconds as in cache_ttl
307 amount of seconds as in cache_ttl
308
308
309 :param action: push or pull action
309 :param action: push or pull action
310 :param user: user instance
310 :param user: user instance
311 :param repo_name: repository name
311 :param repo_name: repository name
312 """
312 """
313
313
314 # get instance of cache manager configured for a namespace
314 # get instance of cache manager configured for a namespace
315 cache_manager = get_perms_cache_manager(
315 cache_manager = get_perms_cache_manager(
316 custom_ttl=cache_ttl, suffix=user.user_id)
316 custom_ttl=cache_ttl, suffix=user.user_id)
317 log.debug('AUTH_CACHE_TTL for permissions `%s` active: %s (TTL: %s)',
317 log.debug('AUTH_CACHE_TTL for permissions `%s` active: %s (TTL: %s)',
318 plugin_id, plugin_cache_active, cache_ttl)
318 plugin_id, plugin_cache_active, cache_ttl)
319
319
320 # for environ based password can be empty, but then the validation is
320 # for environ based password can be empty, but then the validation is
321 # on the server that fills in the env data needed for authentication
321 # on the server that fills in the env data needed for authentication
322 _perm_calc_hash = caches.compute_key_from_params(
322 _perm_calc_hash = caches.compute_key_from_params(
323 plugin_id, action, user.user_id, repo_name, ip_addr)
323 plugin_id, action, user.user_id, repo_name, ip_addr)
324
324
325 # _authenticate is a wrapper for .auth() method of plugin.
325 # _authenticate is a wrapper for .auth() method of plugin.
326 # it checks if .auth() sends proper data.
326 # it checks if .auth() sends proper data.
327 # For RhodeCodeExternalAuthPlugin it also maps users to
327 # For RhodeCodeExternalAuthPlugin it also maps users to
328 # Database and maps the attributes returned from .auth()
328 # Database and maps the attributes returned from .auth()
329 # to RhodeCode database. If this function returns data
329 # to RhodeCode database. If this function returns data
330 # then auth is correct.
330 # then auth is correct.
331 start = time.time()
331 start = time.time()
332 log.debug('Running plugin `%s` permissions check', plugin_id)
332 log.debug('Running plugin `%s` permissions check', plugin_id)
333
333
334 def perm_func():
334 def perm_func():
335 """
335 """
336 This function is used internally in Cache of Beaker to calculate
336 This function is used internally in Cache of Beaker to calculate
337 Results
337 Results
338 """
338 """
339 log.debug('auth: calculating permission access now...')
339 log.debug('auth: calculating permission access now...')
340 # check IP
340 # check IP
341 inherit = user.inherit_default_permissions
341 inherit = user.inherit_default_permissions
342 ip_allowed = AuthUser.check_ip_allowed(
342 ip_allowed = AuthUser.check_ip_allowed(
343 user.user_id, ip_addr, inherit_from_default=inherit)
343 user.user_id, ip_addr, inherit_from_default=inherit)
344 if ip_allowed:
344 if ip_allowed:
345 log.info('Access for IP:%s allowed', ip_addr)
345 log.info('Access for IP:%s allowed', ip_addr)
346 else:
346 else:
347 return False
347 return False
348
348
349 if action == 'push':
349 if action == 'push':
350 perms = ('repository.write', 'repository.admin')
350 perms = ('repository.write', 'repository.admin')
351 if not HasPermissionAnyMiddleware(*perms)(user, repo_name):
351 if not HasPermissionAnyMiddleware(*perms)(user, repo_name):
352 return False
352 return False
353
353
354 else:
354 else:
355 # any other action need at least read permission
355 # any other action need at least read permission
356 perms = (
356 perms = (
357 'repository.read', 'repository.write', 'repository.admin')
357 'repository.read', 'repository.write', 'repository.admin')
358 if not HasPermissionAnyMiddleware(*perms)(user, repo_name):
358 if not HasPermissionAnyMiddleware(*perms)(user, repo_name):
359 return False
359 return False
360
360
361 return True
361 return True
362
362
363 if plugin_cache_active:
363 if plugin_cache_active:
364 log.debug('Trying to fetch cached perms by %s', _perm_calc_hash[:6])
364 log.debug('Trying to fetch cached perms by %s', _perm_calc_hash[:6])
365 perm_result = cache_manager.get(
365 perm_result = cache_manager.get(
366 _perm_calc_hash, createfunc=perm_func)
366 _perm_calc_hash, createfunc=perm_func)
367 else:
367 else:
368 perm_result = perm_func()
368 perm_result = perm_func()
369
369
370 auth_time = time.time() - start
370 auth_time = time.time() - start
371 log.debug('Permissions for plugin `%s` completed in %.3fs, '
371 log.debug('Permissions for plugin `%s` completed in %.3fs, '
372 'expiration time of fetched cache %.1fs.',
372 'expiration time of fetched cache %.1fs.',
373 plugin_id, auth_time, cache_ttl)
373 plugin_id, auth_time, cache_ttl)
374
374
375 return perm_result
375 return perm_result
376
376
377 def _check_ssl(self, environ, start_response):
377 def _check_ssl(self, environ, start_response):
378 """
378 """
379 Checks the SSL check flag and returns False if SSL is not present
379 Checks the SSL check flag and returns False if SSL is not present
380 and required True otherwise
380 and required True otherwise
381 """
381 """
382 org_proto = environ['wsgi._org_proto']
382 org_proto = environ['wsgi._org_proto']
383 # check if we have SSL required ! if not it's a bad request !
383 # check if we have SSL required ! if not it's a bad request !
384 require_ssl = str2bool(self.repo_vcs_config.get('web', 'push_ssl'))
384 require_ssl = str2bool(self.repo_vcs_config.get('web', 'push_ssl'))
385 if require_ssl and org_proto == 'http':
385 if require_ssl and org_proto == 'http':
386 log.debug(
386 log.debug(
387 'Bad request: detected protocol is `%s` and '
387 'Bad request: detected protocol is `%s` and '
388 'SSL/HTTPS is required.', org_proto)
388 'SSL/HTTPS is required.', org_proto)
389 return False
389 return False
390 return True
390 return True
391
391
392 def _get_default_cache_ttl(self):
392 def _get_default_cache_ttl(self):
393 # take AUTH_CACHE_TTL from the `rhodecode` auth plugin
393 # take AUTH_CACHE_TTL from the `rhodecode` auth plugin
394 plugin = loadplugin('egg:rhodecode-enterprise-ce#rhodecode')
394 plugin = loadplugin('egg:rhodecode-enterprise-ce#rhodecode')
395 plugin_settings = plugin.get_settings()
395 plugin_settings = plugin.get_settings()
396 plugin_cache_active, cache_ttl = plugin.get_ttl_cache(
396 plugin_cache_active, cache_ttl = plugin.get_ttl_cache(
397 plugin_settings) or (False, 0)
397 plugin_settings) or (False, 0)
398 return plugin_cache_active, cache_ttl
398 return plugin_cache_active, cache_ttl
399
399
400 def __call__(self, environ, start_response):
400 def __call__(self, environ, start_response):
401 try:
401 try:
402 return self._handle_request(environ, start_response)
402 return self._handle_request(environ, start_response)
403 except Exception:
403 except Exception:
404 log.exception("Exception while handling request")
404 log.exception("Exception while handling request")
405 appenlight.track_exception(environ)
405 appenlight.track_exception(environ)
406 return HTTPInternalServerError()(environ, start_response)
406 return HTTPInternalServerError()(environ, start_response)
407 finally:
407 finally:
408 meta.Session.remove()
408 meta.Session.remove()
409
409
410 def _handle_request(self, environ, start_response):
410 def _handle_request(self, environ, start_response):
411
411
412 if not self._check_ssl(environ, start_response):
412 if not self._check_ssl(environ, start_response):
413 reason = ('SSL required, while RhodeCode was unable '
413 reason = ('SSL required, while RhodeCode was unable '
414 'to detect this as SSL request')
414 'to detect this as SSL request')
415 log.debug('User not allowed to proceed, %s', reason)
415 log.debug('User not allowed to proceed, %s', reason)
416 return HTTPNotAcceptable(reason)(environ, start_response)
416 return HTTPNotAcceptable(reason)(environ, start_response)
417
417
418 if not self.url_repo_name:
418 if not self.url_repo_name:
419 log.warning('Repository name is empty: %s', self.url_repo_name)
419 log.warning('Repository name is empty: %s', self.url_repo_name)
420 # failed to get repo name, we fail now
420 # failed to get repo name, we fail now
421 return HTTPNotFound()(environ, start_response)
421 return HTTPNotFound()(environ, start_response)
422 log.debug('Extracted repo name is %s', self.url_repo_name)
422 log.debug('Extracted repo name is %s', self.url_repo_name)
423
423
424 ip_addr = get_ip_addr(environ)
424 ip_addr = get_ip_addr(environ)
425 user_agent = get_user_agent(environ)
425 user_agent = get_user_agent(environ)
426 username = None
426 username = None
427
427
428 # skip passing error to error controller
428 # skip passing error to error controller
429 environ['pylons.status_code_redirect'] = True
429 environ['pylons.status_code_redirect'] = True
430
430
431 # ======================================================================
431 # ======================================================================
432 # GET ACTION PULL or PUSH
432 # GET ACTION PULL or PUSH
433 # ======================================================================
433 # ======================================================================
434 action = self._get_action(environ)
434 action = self._get_action(environ)
435
435
436 # ======================================================================
436 # ======================================================================
437 # Check if this is a request to a shadow repository of a pull request.
437 # Check if this is a request to a shadow repository of a pull request.
438 # In this case only pull action is allowed.
438 # In this case only pull action is allowed.
439 # ======================================================================
439 # ======================================================================
440 if self.is_shadow_repo and action != 'pull':
440 if self.is_shadow_repo and action != 'pull':
441 reason = 'Only pull action is allowed for shadow repositories.'
441 reason = 'Only pull action is allowed for shadow repositories.'
442 log.debug('User not allowed to proceed, %s', reason)
442 log.debug('User not allowed to proceed, %s', reason)
443 return HTTPNotAcceptable(reason)(environ, start_response)
443 return HTTPNotAcceptable(reason)(environ, start_response)
444
444
445 # Check if the shadow repo actually exists, in case someone refers
445 # Check if the shadow repo actually exists, in case someone refers
446 # to it, and it has been deleted because of successful merge.
446 # to it, and it has been deleted because of successful merge.
447 if self.is_shadow_repo and not self.is_shadow_repo_dir:
447 if self.is_shadow_repo and not self.is_shadow_repo_dir:
448 log.debug(
448 log.debug(
449 'Shadow repo detected, and shadow repo dir `%s` is missing',
449 'Shadow repo detected, and shadow repo dir `%s` is missing',
450 self.is_shadow_repo_dir)
450 self.is_shadow_repo_dir)
451 return HTTPNotFound()(environ, start_response)
451 return HTTPNotFound()(environ, start_response)
452
452
453 # ======================================================================
453 # ======================================================================
454 # CHECK ANONYMOUS PERMISSION
454 # CHECK ANONYMOUS PERMISSION
455 # ======================================================================
455 # ======================================================================
456 if action in ['pull', 'push']:
456 if action in ['pull', 'push']:
457 anonymous_user = User.get_default_user()
457 anonymous_user = User.get_default_user()
458 username = anonymous_user.username
458 username = anonymous_user.username
459 if anonymous_user.active:
459 if anonymous_user.active:
460 plugin_cache_active, cache_ttl = self._get_default_cache_ttl()
460 plugin_cache_active, cache_ttl = self._get_default_cache_ttl()
461 # ONLY check permissions if the user is activated
461 # ONLY check permissions if the user is activated
462 anonymous_perm = self._check_permission(
462 anonymous_perm = self._check_permission(
463 action, anonymous_user, self.acl_repo_name, ip_addr,
463 action, anonymous_user, self.acl_repo_name, ip_addr,
464 plugin_id='anonymous_access',
464 plugin_id='anonymous_access',
465 plugin_cache_active=plugin_cache_active,
465 plugin_cache_active=plugin_cache_active,
466 cache_ttl=cache_ttl,
466 cache_ttl=cache_ttl,
467 )
467 )
468 else:
468 else:
469 anonymous_perm = False
469 anonymous_perm = False
470
470
471 if not anonymous_user.active or not anonymous_perm:
471 if not anonymous_user.active or not anonymous_perm:
472 if not anonymous_user.active:
472 if not anonymous_user.active:
473 log.debug('Anonymous access is disabled, running '
473 log.debug('Anonymous access is disabled, running '
474 'authentication')
474 'authentication')
475
475
476 if not anonymous_perm:
476 if not anonymous_perm:
477 log.debug('Not enough credentials to access this '
477 log.debug('Not enough credentials to access this '
478 'repository as anonymous user')
478 'repository as anonymous user')
479
479
480 username = None
480 username = None
481 # ==============================================================
481 # ==============================================================
482 # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE
482 # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE
483 # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS
483 # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS
484 # ==============================================================
484 # ==============================================================
485
485
486 # try to auth based on environ, container auth methods
486 # try to auth based on environ, container auth methods
487 log.debug('Running PRE-AUTH for container based authentication')
487 log.debug('Running PRE-AUTH for container based authentication')
488 pre_auth = authenticate(
488 pre_auth = authenticate(
489 '', '', environ, VCS_TYPE, registry=self.registry,
489 '', '', environ, VCS_TYPE, registry=self.registry,
490 acl_repo_name=self.acl_repo_name)
490 acl_repo_name=self.acl_repo_name)
491 if pre_auth and pre_auth.get('username'):
491 if pre_auth and pre_auth.get('username'):
492 username = pre_auth['username']
492 username = pre_auth['username']
493 log.debug('PRE-AUTH got %s as username', username)
493 log.debug('PRE-AUTH got %s as username', username)
494 if pre_auth:
494 if pre_auth:
495 log.debug('PRE-AUTH successful from %s',
495 log.debug('PRE-AUTH successful from %s',
496 pre_auth.get('auth_data', {}).get('_plugin'))
496 pre_auth.get('auth_data', {}).get('_plugin'))
497
497
498 # If not authenticated by the container, running basic auth
498 # If not authenticated by the container, running basic auth
499 # before inject the calling repo_name for special scope checks
499 # before inject the calling repo_name for special scope checks
500 self.authenticate.acl_repo_name = self.acl_repo_name
500 self.authenticate.acl_repo_name = self.acl_repo_name
501
501
502 plugin_cache_active, cache_ttl = False, 0
502 plugin_cache_active, cache_ttl = False, 0
503 plugin = None
503 plugin = None
504 if not username:
504 if not username:
505 self.authenticate.realm = self.authenticate.get_rc_realm()
505 self.authenticate.realm = self.authenticate.get_rc_realm()
506
506
507 try:
507 try:
508 auth_result = self.authenticate(environ)
508 auth_result = self.authenticate(environ)
509 except (UserCreationError, NotAllowedToCreateUserError) as e:
509 except (UserCreationError, NotAllowedToCreateUserError) as e:
510 log.error(e)
510 log.error(e)
511 reason = safe_str(e)
511 reason = safe_str(e)
512 return HTTPNotAcceptable(reason)(environ, start_response)
512 return HTTPNotAcceptable(reason)(environ, start_response)
513
513
514 if isinstance(auth_result, dict):
514 if isinstance(auth_result, dict):
515 AUTH_TYPE.update(environ, 'basic')
515 AUTH_TYPE.update(environ, 'basic')
516 REMOTE_USER.update(environ, auth_result['username'])
516 REMOTE_USER.update(environ, auth_result['username'])
517 username = auth_result['username']
517 username = auth_result['username']
518 plugin = auth_result.get('auth_data', {}).get('_plugin')
518 plugin = auth_result.get('auth_data', {}).get('_plugin')
519 log.info(
519 log.info(
520 'MAIN-AUTH successful for user `%s` from %s plugin',
520 'MAIN-AUTH successful for user `%s` from %s plugin',
521 username, plugin)
521 username, plugin)
522
522
523 plugin_cache_active, cache_ttl = auth_result.get(
523 plugin_cache_active, cache_ttl = auth_result.get(
524 'auth_data', {}).get('_ttl_cache') or (False, 0)
524 'auth_data', {}).get('_ttl_cache') or (False, 0)
525 else:
525 else:
526 return auth_result.wsgi_application(
526 return auth_result.wsgi_application(
527 environ, start_response)
527 environ, start_response)
528
528
529
529
530 # ==============================================================
530 # ==============================================================
531 # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME
531 # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME
532 # ==============================================================
532 # ==============================================================
533 user = User.get_by_username(username)
533 user = User.get_by_username(username)
534 if not self.valid_and_active_user(user):
534 if not self.valid_and_active_user(user):
535 return HTTPForbidden()(environ, start_response)
535 return HTTPForbidden()(environ, start_response)
536 username = user.username
536 username = user.username
537 user.update_lastactivity()
537 user.update_lastactivity()
538 meta.Session().commit()
538 meta.Session().commit()
539
539
540 # check user attributes for password change flag
540 # check user attributes for password change flag
541 user_obj = user
541 user_obj = user
542 if user_obj and user_obj.username != User.DEFAULT_USER and \
542 if user_obj and user_obj.username != User.DEFAULT_USER and \
543 user_obj.user_data.get('force_password_change'):
543 user_obj.user_data.get('force_password_change'):
544 reason = 'password change required'
544 reason = 'password change required'
545 log.debug('User not allowed to authenticate, %s', reason)
545 log.debug('User not allowed to authenticate, %s', reason)
546 return HTTPNotAcceptable(reason)(environ, start_response)
546 return HTTPNotAcceptable(reason)(environ, start_response)
547
547
548 # check permissions for this repository
548 # check permissions for this repository
549 perm = self._check_permission(
549 perm = self._check_permission(
550 action, user, self.acl_repo_name, ip_addr,
550 action, user, self.acl_repo_name, ip_addr,
551 plugin, plugin_cache_active, cache_ttl)
551 plugin, plugin_cache_active, cache_ttl)
552 if not perm:
552 if not perm:
553 return HTTPForbidden()(environ, start_response)
553 return HTTPForbidden()(environ, start_response)
554
554
555 # extras are injected into UI object and later available
555 # extras are injected into UI object and later available
556 # in hooks executed by RhodeCode
556 # in hooks executed by RhodeCode
557 check_locking = _should_check_locking(environ.get('QUERY_STRING'))
557 check_locking = _should_check_locking(environ.get('QUERY_STRING'))
558 extras = vcs_operation_context(
558 extras = vcs_operation_context(
559 environ, repo_name=self.acl_repo_name, username=username,
559 environ, repo_name=self.acl_repo_name, username=username,
560 action=action, scm=self.SCM, check_locking=check_locking,
560 action=action, scm=self.SCM, check_locking=check_locking,
561 is_shadow_repo=self.is_shadow_repo
561 is_shadow_repo=self.is_shadow_repo
562 )
562 )
563
563
564 # ======================================================================
564 # ======================================================================
565 # REQUEST HANDLING
565 # REQUEST HANDLING
566 # ======================================================================
566 # ======================================================================
567 repo_path = os.path.join(
567 repo_path = os.path.join(
568 safe_str(self.base_path), safe_str(self.vcs_repo_name))
568 safe_str(self.base_path), safe_str(self.vcs_repo_name))
569 log.debug('Repository path is %s', repo_path)
569 log.debug('Repository path is %s', repo_path)
570
570
571 fix_PATH()
571 fix_PATH()
572
572
573 log.info(
573 log.info(
574 '%s action on %s repo "%s" by "%s" from %s %s',
574 '%s action on %s repo "%s" by "%s" from %s %s',
575 action, self.SCM, safe_str(self.url_repo_name),
575 action, self.SCM, safe_str(self.url_repo_name),
576 safe_str(username), ip_addr, user_agent)
576 safe_str(username), ip_addr, user_agent)
577
577
578 return self._generate_vcs_response(
578 return self._generate_vcs_response(
579 environ, start_response, repo_path, extras, action)
579 environ, start_response, repo_path, extras, action)
580
580
581 @initialize_generator
581 @initialize_generator
582 def _generate_vcs_response(
582 def _generate_vcs_response(
583 self, environ, start_response, repo_path, extras, action):
583 self, environ, start_response, repo_path, extras, action):
584 """
584 """
585 Returns a generator for the response content.
585 Returns a generator for the response content.
586
586
587 This method is implemented as a generator, so that it can trigger
587 This method is implemented as a generator, so that it can trigger
588 the cache validation after all content sent back to the client. It
588 the cache validation after all content sent back to the client. It
589 also handles the locking exceptions which will be triggered when
589 also handles the locking exceptions which will be triggered when
590 the first chunk is produced by the underlying WSGI application.
590 the first chunk is produced by the underlying WSGI application.
591 """
591 """
592 txn_id = ''
592 txn_id = ''
593 if 'CONTENT_LENGTH' in environ and environ['REQUEST_METHOD'] == 'MERGE':
593 if 'CONTENT_LENGTH' in environ and environ['REQUEST_METHOD'] == 'MERGE':
594 # case for SVN, we want to re-use the callback daemon port
594 # case for SVN, we want to re-use the callback daemon port
595 # so we use the txn_id, for this we peek the body, and still save
595 # so we use the txn_id, for this we peek the body, and still save
596 # it as wsgi.input
596 # it as wsgi.input
597 data = environ['wsgi.input'].read()
597 data = environ['wsgi.input'].read()
598 environ['wsgi.input'] = StringIO(data)
598 environ['wsgi.input'] = StringIO(data)
599 txn_id = extract_svn_txn_id(self.acl_repo_name, data)
599 txn_id = extract_svn_txn_id(self.acl_repo_name, data)
600
600
601 callback_daemon, extras = self._prepare_callback_daemon(
601 callback_daemon, extras = self._prepare_callback_daemon(
602 extras, environ, action, txn_id=txn_id)
602 extras, environ, action, txn_id=txn_id)
603 log.debug('HOOKS extras is %s', extras)
603 log.debug('HOOKS extras is %s', extras)
604
604
605 config = self._create_config(extras, self.acl_repo_name)
605 config = self._create_config(extras, self.acl_repo_name)
606 app = self._create_wsgi_app(repo_path, self.url_repo_name, config)
606 app = self._create_wsgi_app(repo_path, self.url_repo_name, config)
607 with callback_daemon:
607 with callback_daemon:
608 app.rc_extras = extras
608 app.rc_extras = extras
609
609
610 try:
610 try:
611 response = app(environ, start_response)
611 response = app(environ, start_response)
612 finally:
612 finally:
613 # This statement works together with the decorator
613 # This statement works together with the decorator
614 # "initialize_generator" above. The decorator ensures that
614 # "initialize_generator" above. The decorator ensures that
615 # we hit the first yield statement before the generator is
615 # we hit the first yield statement before the generator is
616 # returned back to the WSGI server. This is needed to
616 # returned back to the WSGI server. This is needed to
617 # ensure that the call to "app" above triggers the
617 # ensure that the call to "app" above triggers the
618 # needed callback to "start_response" before the
618 # needed callback to "start_response" before the
619 # generator is actually used.
619 # generator is actually used.
620 yield "__init__"
620 yield "__init__"
621
621
622 # iter content
622 # iter content
623 for chunk in response:
623 for chunk in response:
624 yield chunk
624 yield chunk
625
625
626 try:
626 try:
627 # invalidate cache on push
627 # invalidate cache on push
628 if action == 'push':
628 if action == 'push':
629 self._invalidate_cache(self.url_repo_name)
629 self._invalidate_cache(self.url_repo_name)
630 finally:
630 finally:
631 meta.Session.remove()
631 meta.Session.remove()
632
632
633 def _get_repository_name(self, environ):
633 def _get_repository_name(self, environ):
634 """Get repository name out of the environmnent
634 """Get repository name out of the environmnent
635
635
636 :param environ: WSGI environment
636 :param environ: WSGI environment
637 """
637 """
638 raise NotImplementedError()
638 raise NotImplementedError()
639
639
640 def _get_action(self, environ):
640 def _get_action(self, environ):
641 """Map request commands into a pull or push command.
641 """Map request commands into a pull or push command.
642
642
643 :param environ: WSGI environment
643 :param environ: WSGI environment
644 """
644 """
645 raise NotImplementedError()
645 raise NotImplementedError()
646
646
647 def _create_wsgi_app(self, repo_path, repo_name, config):
647 def _create_wsgi_app(self, repo_path, repo_name, config):
648 """Return the WSGI app that will finally handle the request."""
648 """Return the WSGI app that will finally handle the request."""
649 raise NotImplementedError()
649 raise NotImplementedError()
650
650
651 def _create_config(self, extras, repo_name):
651 def _create_config(self, extras, repo_name):
652 """Create a safe config representation."""
652 """Create a safe config representation."""
653 raise NotImplementedError()
653 raise NotImplementedError()
654
654
655 def _should_use_callback_daemon(self, extras, environ, action):
655 def _should_use_callback_daemon(self, extras, environ, action):
656 return True
656 return True
657
657
658 def _prepare_callback_daemon(self, extras, environ, action, txn_id=None):
658 def _prepare_callback_daemon(self, extras, environ, action, txn_id=None):
659 direct_calls = vcs_settings.HOOKS_DIRECT_CALLS
659 direct_calls = vcs_settings.HOOKS_DIRECT_CALLS
660 if not self._should_use_callback_daemon(extras, environ, action):
660 if not self._should_use_callback_daemon(extras, environ, action):
661 # disable callback daemon for actions that don't require it
661 # disable callback daemon for actions that don't require it
662 direct_calls = True
662 direct_calls = True
663
663
664 return prepare_callback_daemon(
664 return prepare_callback_daemon(
665 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
665 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
666 use_direct_calls=direct_calls, txn_id=txn_id)
666 host=vcs_settings.HOOKS_HOST, use_direct_calls=direct_calls, txn_id=txn_id)
667
667
668
668
669 def _should_check_locking(query_string):
669 def _should_check_locking(query_string):
670 # this is kind of hacky, but due to how mercurial handles client-server
670 # this is kind of hacky, but due to how mercurial handles client-server
671 # server see all operation on commit; bookmarks, phases and
671 # server see all operation on commit; bookmarks, phases and
672 # obsolescence marker in different transaction, we don't want to check
672 # obsolescence marker in different transaction, we don't want to check
673 # locking on those
673 # locking on those
674 return query_string not in ['cmd=listkeys']
674 return query_string not in ['cmd=listkeys']
@@ -1,66 +1,67 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2018 RhodeCode GmbH
3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Internal settings for vcs-lib
22 Internal settings for vcs-lib
23 """
23 """
24
24
25 # list of default encoding used in safe_unicode/safe_str methods
25 # list of default encoding used in safe_unicode/safe_str methods
26 DEFAULT_ENCODINGS = ['utf8']
26 DEFAULT_ENCODINGS = ['utf8']
27
27
28 # Optional arguments to rev-filter, it has to be a list
28 # Optional arguments to rev-filter, it has to be a list
29 # It can also be ['--branches', '--tags']
29 # It can also be ['--branches', '--tags']
30 GIT_REV_FILTER = ['--all']
30 GIT_REV_FILTER = ['--all']
31
31
32 # Compatibility version when creating SVN repositories. None means newest.
32 # Compatibility version when creating SVN repositories. None means newest.
33 # Other available options are: pre-1.4-compatible, pre-1.5-compatible,
33 # Other available options are: pre-1.4-compatible, pre-1.5-compatible,
34 # pre-1.6-compatible, pre-1.8-compatible
34 # pre-1.6-compatible, pre-1.8-compatible
35 SVN_COMPATIBLE_VERSION = None
35 SVN_COMPATIBLE_VERSION = None
36
36
37 ALIASES = ['hg', 'git', 'svn']
37 ALIASES = ['hg', 'git', 'svn']
38
38
39 BACKENDS = {
39 BACKENDS = {
40 'hg': 'rhodecode.lib.vcs.backends.hg.MercurialRepository',
40 'hg': 'rhodecode.lib.vcs.backends.hg.MercurialRepository',
41 'git': 'rhodecode.lib.vcs.backends.git.GitRepository',
41 'git': 'rhodecode.lib.vcs.backends.git.GitRepository',
42 'svn': 'rhodecode.lib.vcs.backends.svn.SubversionRepository',
42 'svn': 'rhodecode.lib.vcs.backends.svn.SubversionRepository',
43 }
43 }
44
44
45 # TODO: Remove once controllers/files.py is adjusted
45 # TODO: Remove once controllers/files.py is adjusted
46 ARCHIVE_SPECS = {
46 ARCHIVE_SPECS = {
47 'tbz2': ('application/x-bzip2', '.tar.bz2'),
47 'tbz2': ('application/x-bzip2', '.tar.bz2'),
48 'tgz': ('application/x-gzip', '.tar.gz'),
48 'tgz': ('application/x-gzip', '.tar.gz'),
49 'zip': ('application/zip', '.zip'),
49 'zip': ('application/zip', '.zip'),
50 }
50 }
51
51
52 HOOKS_PROTOCOL = None
52 HOOKS_PROTOCOL = None
53 HOOKS_DIRECT_CALLS = False
53 HOOKS_DIRECT_CALLS = False
54 HOOKS_HOST = '127.0.0.1'
54
55
55
56
56 def available_aliases():
57 def available_aliases():
57 """
58 """
58 Mercurial is required for the system to work, so in case vcs.backends does
59 Mercurial is required for the system to work, so in case vcs.backends does
59 not include it, we make sure it will be available internally
60 not include it, we make sure it will be available internally
60 TODO: anderson: refactor vcs.backends so it won't be necessary, VCS server
61 TODO: anderson: refactor vcs.backends so it won't be necessary, VCS server
61 should be responsible to dictate available backends.
62 should be responsible to dictate available backends.
62 """
63 """
63 aliases = ALIASES[:]
64 aliases = ALIASES[:]
64 if 'hg' not in aliases:
65 if 'hg' not in aliases:
65 aliases += ['hg']
66 aliases += ['hg']
66 return aliases
67 return aliases
@@ -1,1700 +1,1701 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2018 RhodeCode GmbH
3 # Copyright (C) 2012-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 pull request model for RhodeCode
23 pull request model for RhodeCode
24 """
24 """
25
25
26
26
27 import json
27 import json
28 import logging
28 import logging
29 import datetime
29 import datetime
30 import urllib
30 import urllib
31 import collections
31 import collections
32
32
33 from pyramid.threadlocal import get_current_request
33 from pyramid.threadlocal import get_current_request
34
34
35 from rhodecode import events
35 from rhodecode import events
36 from rhodecode.translation import lazy_ugettext#, _
36 from rhodecode.translation import lazy_ugettext#, _
37 from rhodecode.lib import helpers as h, hooks_utils, diffs
37 from rhodecode.lib import helpers as h, hooks_utils, diffs
38 from rhodecode.lib import audit_logger
38 from rhodecode.lib import audit_logger
39 from rhodecode.lib.compat import OrderedDict
39 from rhodecode.lib.compat import OrderedDict
40 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
40 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
41 from rhodecode.lib.markup_renderer import (
41 from rhodecode.lib.markup_renderer import (
42 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
42 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
43 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
43 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
44 from rhodecode.lib.vcs.backends.base import (
44 from rhodecode.lib.vcs.backends.base import (
45 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
45 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
46 from rhodecode.lib.vcs.conf import settings as vcs_settings
46 from rhodecode.lib.vcs.conf import settings as vcs_settings
47 from rhodecode.lib.vcs.exceptions import (
47 from rhodecode.lib.vcs.exceptions import (
48 CommitDoesNotExistError, EmptyRepositoryError)
48 CommitDoesNotExistError, EmptyRepositoryError)
49 from rhodecode.model import BaseModel
49 from rhodecode.model import BaseModel
50 from rhodecode.model.changeset_status import ChangesetStatusModel
50 from rhodecode.model.changeset_status import ChangesetStatusModel
51 from rhodecode.model.comment import CommentsModel
51 from rhodecode.model.comment import CommentsModel
52 from rhodecode.model.db import (
52 from rhodecode.model.db import (
53 or_, PullRequest, PullRequestReviewers, ChangesetStatus,
53 or_, PullRequest, PullRequestReviewers, ChangesetStatus,
54 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
54 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
55 from rhodecode.model.meta import Session
55 from rhodecode.model.meta import Session
56 from rhodecode.model.notification import NotificationModel, \
56 from rhodecode.model.notification import NotificationModel, \
57 EmailNotificationModel
57 EmailNotificationModel
58 from rhodecode.model.scm import ScmModel
58 from rhodecode.model.scm import ScmModel
59 from rhodecode.model.settings import VcsSettingsModel
59 from rhodecode.model.settings import VcsSettingsModel
60
60
61
61
62 log = logging.getLogger(__name__)
62 log = logging.getLogger(__name__)
63
63
64
64
65 # Data structure to hold the response data when updating commits during a pull
65 # Data structure to hold the response data when updating commits during a pull
66 # request update.
66 # request update.
67 UpdateResponse = collections.namedtuple('UpdateResponse', [
67 UpdateResponse = collections.namedtuple('UpdateResponse', [
68 'executed', 'reason', 'new', 'old', 'changes',
68 'executed', 'reason', 'new', 'old', 'changes',
69 'source_changed', 'target_changed'])
69 'source_changed', 'target_changed'])
70
70
71
71
72 class PullRequestModel(BaseModel):
72 class PullRequestModel(BaseModel):
73
73
74 cls = PullRequest
74 cls = PullRequest
75
75
76 DIFF_CONTEXT = 3
76 DIFF_CONTEXT = 3
77
77
78 MERGE_STATUS_MESSAGES = {
78 MERGE_STATUS_MESSAGES = {
79 MergeFailureReason.NONE: lazy_ugettext(
79 MergeFailureReason.NONE: lazy_ugettext(
80 'This pull request can be automatically merged.'),
80 'This pull request can be automatically merged.'),
81 MergeFailureReason.UNKNOWN: lazy_ugettext(
81 MergeFailureReason.UNKNOWN: lazy_ugettext(
82 'This pull request cannot be merged because of an unhandled'
82 'This pull request cannot be merged because of an unhandled'
83 ' exception.'),
83 ' exception.'),
84 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
84 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
85 'This pull request cannot be merged because of merge conflicts.'),
85 'This pull request cannot be merged because of merge conflicts.'),
86 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
86 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
87 'This pull request could not be merged because push to target'
87 'This pull request could not be merged because push to target'
88 ' failed.'),
88 ' failed.'),
89 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
89 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
90 'This pull request cannot be merged because the target is not a'
90 'This pull request cannot be merged because the target is not a'
91 ' head.'),
91 ' head.'),
92 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
92 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
93 'This pull request cannot be merged because the source contains'
93 'This pull request cannot be merged because the source contains'
94 ' more branches than the target.'),
94 ' more branches than the target.'),
95 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
95 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
96 'This pull request cannot be merged because the target has'
96 'This pull request cannot be merged because the target has'
97 ' multiple heads.'),
97 ' multiple heads.'),
98 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
98 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
99 'This pull request cannot be merged because the target repository'
99 'This pull request cannot be merged because the target repository'
100 ' is locked.'),
100 ' is locked.'),
101 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
101 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
102 'This pull request cannot be merged because the target or the '
102 'This pull request cannot be merged because the target or the '
103 'source reference is missing.'),
103 'source reference is missing.'),
104 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
104 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
105 'This pull request cannot be merged because the target '
105 'This pull request cannot be merged because the target '
106 'reference is missing.'),
106 'reference is missing.'),
107 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
107 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
108 'This pull request cannot be merged because the source '
108 'This pull request cannot be merged because the source '
109 'reference is missing.'),
109 'reference is missing.'),
110 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
110 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
111 'This pull request cannot be merged because of conflicts related '
111 'This pull request cannot be merged because of conflicts related '
112 'to sub repositories.'),
112 'to sub repositories.'),
113 }
113 }
114
114
115 UPDATE_STATUS_MESSAGES = {
115 UPDATE_STATUS_MESSAGES = {
116 UpdateFailureReason.NONE: lazy_ugettext(
116 UpdateFailureReason.NONE: lazy_ugettext(
117 'Pull request update successful.'),
117 'Pull request update successful.'),
118 UpdateFailureReason.UNKNOWN: lazy_ugettext(
118 UpdateFailureReason.UNKNOWN: lazy_ugettext(
119 'Pull request update failed because of an unknown error.'),
119 'Pull request update failed because of an unknown error.'),
120 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
120 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
121 'No update needed because the source and target have not changed.'),
121 'No update needed because the source and target have not changed.'),
122 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
122 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
123 'Pull request cannot be updated because the reference type is '
123 'Pull request cannot be updated because the reference type is '
124 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
124 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
125 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
125 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
126 'This pull request cannot be updated because the target '
126 'This pull request cannot be updated because the target '
127 'reference is missing.'),
127 'reference is missing.'),
128 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
128 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
129 'This pull request cannot be updated because the source '
129 'This pull request cannot be updated because the source '
130 'reference is missing.'),
130 'reference is missing.'),
131 }
131 }
132
132
133 def __get_pull_request(self, pull_request):
133 def __get_pull_request(self, pull_request):
134 return self._get_instance((
134 return self._get_instance((
135 PullRequest, PullRequestVersion), pull_request)
135 PullRequest, PullRequestVersion), pull_request)
136
136
137 def _check_perms(self, perms, pull_request, user, api=False):
137 def _check_perms(self, perms, pull_request, user, api=False):
138 if not api:
138 if not api:
139 return h.HasRepoPermissionAny(*perms)(
139 return h.HasRepoPermissionAny(*perms)(
140 user=user, repo_name=pull_request.target_repo.repo_name)
140 user=user, repo_name=pull_request.target_repo.repo_name)
141 else:
141 else:
142 return h.HasRepoPermissionAnyApi(*perms)(
142 return h.HasRepoPermissionAnyApi(*perms)(
143 user=user, repo_name=pull_request.target_repo.repo_name)
143 user=user, repo_name=pull_request.target_repo.repo_name)
144
144
145 def check_user_read(self, pull_request, user, api=False):
145 def check_user_read(self, pull_request, user, api=False):
146 _perms = ('repository.admin', 'repository.write', 'repository.read',)
146 _perms = ('repository.admin', 'repository.write', 'repository.read',)
147 return self._check_perms(_perms, pull_request, user, api)
147 return self._check_perms(_perms, pull_request, user, api)
148
148
149 def check_user_merge(self, pull_request, user, api=False):
149 def check_user_merge(self, pull_request, user, api=False):
150 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
150 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
151 return self._check_perms(_perms, pull_request, user, api)
151 return self._check_perms(_perms, pull_request, user, api)
152
152
153 def check_user_update(self, pull_request, user, api=False):
153 def check_user_update(self, pull_request, user, api=False):
154 owner = user.user_id == pull_request.user_id
154 owner = user.user_id == pull_request.user_id
155 return self.check_user_merge(pull_request, user, api) or owner
155 return self.check_user_merge(pull_request, user, api) or owner
156
156
157 def check_user_delete(self, pull_request, user):
157 def check_user_delete(self, pull_request, user):
158 owner = user.user_id == pull_request.user_id
158 owner = user.user_id == pull_request.user_id
159 _perms = ('repository.admin',)
159 _perms = ('repository.admin',)
160 return self._check_perms(_perms, pull_request, user) or owner
160 return self._check_perms(_perms, pull_request, user) or owner
161
161
162 def check_user_change_status(self, pull_request, user, api=False):
162 def check_user_change_status(self, pull_request, user, api=False):
163 reviewer = user.user_id in [x.user_id for x in
163 reviewer = user.user_id in [x.user_id for x in
164 pull_request.reviewers]
164 pull_request.reviewers]
165 return self.check_user_update(pull_request, user, api) or reviewer
165 return self.check_user_update(pull_request, user, api) or reviewer
166
166
167 def check_user_comment(self, pull_request, user):
167 def check_user_comment(self, pull_request, user):
168 owner = user.user_id == pull_request.user_id
168 owner = user.user_id == pull_request.user_id
169 return self.check_user_read(pull_request, user) or owner
169 return self.check_user_read(pull_request, user) or owner
170
170
171 def get(self, pull_request):
171 def get(self, pull_request):
172 return self.__get_pull_request(pull_request)
172 return self.__get_pull_request(pull_request)
173
173
174 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
174 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
175 opened_by=None, order_by=None,
175 opened_by=None, order_by=None,
176 order_dir='desc'):
176 order_dir='desc'):
177 repo = None
177 repo = None
178 if repo_name:
178 if repo_name:
179 repo = self._get_repo(repo_name)
179 repo = self._get_repo(repo_name)
180
180
181 q = PullRequest.query()
181 q = PullRequest.query()
182
182
183 # source or target
183 # source or target
184 if repo and source:
184 if repo and source:
185 q = q.filter(PullRequest.source_repo == repo)
185 q = q.filter(PullRequest.source_repo == repo)
186 elif repo:
186 elif repo:
187 q = q.filter(PullRequest.target_repo == repo)
187 q = q.filter(PullRequest.target_repo == repo)
188
188
189 # closed,opened
189 # closed,opened
190 if statuses:
190 if statuses:
191 q = q.filter(PullRequest.status.in_(statuses))
191 q = q.filter(PullRequest.status.in_(statuses))
192
192
193 # opened by filter
193 # opened by filter
194 if opened_by:
194 if opened_by:
195 q = q.filter(PullRequest.user_id.in_(opened_by))
195 q = q.filter(PullRequest.user_id.in_(opened_by))
196
196
197 if order_by:
197 if order_by:
198 order_map = {
198 order_map = {
199 'name_raw': PullRequest.pull_request_id,
199 'name_raw': PullRequest.pull_request_id,
200 'title': PullRequest.title,
200 'title': PullRequest.title,
201 'updated_on_raw': PullRequest.updated_on,
201 'updated_on_raw': PullRequest.updated_on,
202 'target_repo': PullRequest.target_repo_id
202 'target_repo': PullRequest.target_repo_id
203 }
203 }
204 if order_dir == 'asc':
204 if order_dir == 'asc':
205 q = q.order_by(order_map[order_by].asc())
205 q = q.order_by(order_map[order_by].asc())
206 else:
206 else:
207 q = q.order_by(order_map[order_by].desc())
207 q = q.order_by(order_map[order_by].desc())
208
208
209 return q
209 return q
210
210
211 def count_all(self, repo_name, source=False, statuses=None,
211 def count_all(self, repo_name, source=False, statuses=None,
212 opened_by=None):
212 opened_by=None):
213 """
213 """
214 Count the number of pull requests for a specific repository.
214 Count the number of pull requests for a specific repository.
215
215
216 :param repo_name: target or source repo
216 :param repo_name: target or source repo
217 :param source: boolean flag to specify if repo_name refers to source
217 :param source: boolean flag to specify if repo_name refers to source
218 :param statuses: list of pull request statuses
218 :param statuses: list of pull request statuses
219 :param opened_by: author user of the pull request
219 :param opened_by: author user of the pull request
220 :returns: int number of pull requests
220 :returns: int number of pull requests
221 """
221 """
222 q = self._prepare_get_all_query(
222 q = self._prepare_get_all_query(
223 repo_name, source=source, statuses=statuses, opened_by=opened_by)
223 repo_name, source=source, statuses=statuses, opened_by=opened_by)
224
224
225 return q.count()
225 return q.count()
226
226
227 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
227 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
228 offset=0, length=None, order_by=None, order_dir='desc'):
228 offset=0, length=None, order_by=None, order_dir='desc'):
229 """
229 """
230 Get all pull requests for a specific repository.
230 Get all pull requests for a specific repository.
231
231
232 :param repo_name: target or source repo
232 :param repo_name: target or source repo
233 :param source: boolean flag to specify if repo_name refers to source
233 :param source: boolean flag to specify if repo_name refers to source
234 :param statuses: list of pull request statuses
234 :param statuses: list of pull request statuses
235 :param opened_by: author user of the pull request
235 :param opened_by: author user of the pull request
236 :param offset: pagination offset
236 :param offset: pagination offset
237 :param length: length of returned list
237 :param length: length of returned list
238 :param order_by: order of the returned list
238 :param order_by: order of the returned list
239 :param order_dir: 'asc' or 'desc' ordering direction
239 :param order_dir: 'asc' or 'desc' ordering direction
240 :returns: list of pull requests
240 :returns: list of pull requests
241 """
241 """
242 q = self._prepare_get_all_query(
242 q = self._prepare_get_all_query(
243 repo_name, source=source, statuses=statuses, opened_by=opened_by,
243 repo_name, source=source, statuses=statuses, opened_by=opened_by,
244 order_by=order_by, order_dir=order_dir)
244 order_by=order_by, order_dir=order_dir)
245
245
246 if length:
246 if length:
247 pull_requests = q.limit(length).offset(offset).all()
247 pull_requests = q.limit(length).offset(offset).all()
248 else:
248 else:
249 pull_requests = q.all()
249 pull_requests = q.all()
250
250
251 return pull_requests
251 return pull_requests
252
252
253 def count_awaiting_review(self, repo_name, source=False, statuses=None,
253 def count_awaiting_review(self, repo_name, source=False, statuses=None,
254 opened_by=None):
254 opened_by=None):
255 """
255 """
256 Count the number of pull requests for a specific repository that are
256 Count the number of pull requests for a specific repository that are
257 awaiting review.
257 awaiting review.
258
258
259 :param repo_name: target or source repo
259 :param repo_name: target or source repo
260 :param source: boolean flag to specify if repo_name refers to source
260 :param source: boolean flag to specify if repo_name refers to source
261 :param statuses: list of pull request statuses
261 :param statuses: list of pull request statuses
262 :param opened_by: author user of the pull request
262 :param opened_by: author user of the pull request
263 :returns: int number of pull requests
263 :returns: int number of pull requests
264 """
264 """
265 pull_requests = self.get_awaiting_review(
265 pull_requests = self.get_awaiting_review(
266 repo_name, source=source, statuses=statuses, opened_by=opened_by)
266 repo_name, source=source, statuses=statuses, opened_by=opened_by)
267
267
268 return len(pull_requests)
268 return len(pull_requests)
269
269
270 def get_awaiting_review(self, repo_name, source=False, statuses=None,
270 def get_awaiting_review(self, repo_name, source=False, statuses=None,
271 opened_by=None, offset=0, length=None,
271 opened_by=None, offset=0, length=None,
272 order_by=None, order_dir='desc'):
272 order_by=None, order_dir='desc'):
273 """
273 """
274 Get all pull requests for a specific repository that are awaiting
274 Get all pull requests for a specific repository that are awaiting
275 review.
275 review.
276
276
277 :param repo_name: target or source repo
277 :param repo_name: target or source repo
278 :param source: boolean flag to specify if repo_name refers to source
278 :param source: boolean flag to specify if repo_name refers to source
279 :param statuses: list of pull request statuses
279 :param statuses: list of pull request statuses
280 :param opened_by: author user of the pull request
280 :param opened_by: author user of the pull request
281 :param offset: pagination offset
281 :param offset: pagination offset
282 :param length: length of returned list
282 :param length: length of returned list
283 :param order_by: order of the returned list
283 :param order_by: order of the returned list
284 :param order_dir: 'asc' or 'desc' ordering direction
284 :param order_dir: 'asc' or 'desc' ordering direction
285 :returns: list of pull requests
285 :returns: list of pull requests
286 """
286 """
287 pull_requests = self.get_all(
287 pull_requests = self.get_all(
288 repo_name, source=source, statuses=statuses, opened_by=opened_by,
288 repo_name, source=source, statuses=statuses, opened_by=opened_by,
289 order_by=order_by, order_dir=order_dir)
289 order_by=order_by, order_dir=order_dir)
290
290
291 _filtered_pull_requests = []
291 _filtered_pull_requests = []
292 for pr in pull_requests:
292 for pr in pull_requests:
293 status = pr.calculated_review_status()
293 status = pr.calculated_review_status()
294 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
294 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
295 ChangesetStatus.STATUS_UNDER_REVIEW]:
295 ChangesetStatus.STATUS_UNDER_REVIEW]:
296 _filtered_pull_requests.append(pr)
296 _filtered_pull_requests.append(pr)
297 if length:
297 if length:
298 return _filtered_pull_requests[offset:offset+length]
298 return _filtered_pull_requests[offset:offset+length]
299 else:
299 else:
300 return _filtered_pull_requests
300 return _filtered_pull_requests
301
301
302 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
302 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
303 opened_by=None, user_id=None):
303 opened_by=None, user_id=None):
304 """
304 """
305 Count the number of pull requests for a specific repository that are
305 Count the number of pull requests for a specific repository that are
306 awaiting review from a specific user.
306 awaiting review from a specific user.
307
307
308 :param repo_name: target or source repo
308 :param repo_name: target or source repo
309 :param source: boolean flag to specify if repo_name refers to source
309 :param source: boolean flag to specify if repo_name refers to source
310 :param statuses: list of pull request statuses
310 :param statuses: list of pull request statuses
311 :param opened_by: author user of the pull request
311 :param opened_by: author user of the pull request
312 :param user_id: reviewer user of the pull request
312 :param user_id: reviewer user of the pull request
313 :returns: int number of pull requests
313 :returns: int number of pull requests
314 """
314 """
315 pull_requests = self.get_awaiting_my_review(
315 pull_requests = self.get_awaiting_my_review(
316 repo_name, source=source, statuses=statuses, opened_by=opened_by,
316 repo_name, source=source, statuses=statuses, opened_by=opened_by,
317 user_id=user_id)
317 user_id=user_id)
318
318
319 return len(pull_requests)
319 return len(pull_requests)
320
320
321 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
321 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
322 opened_by=None, user_id=None, offset=0,
322 opened_by=None, user_id=None, offset=0,
323 length=None, order_by=None, order_dir='desc'):
323 length=None, order_by=None, order_dir='desc'):
324 """
324 """
325 Get all pull requests for a specific repository that are awaiting
325 Get all pull requests for a specific repository that are awaiting
326 review from a specific user.
326 review from a specific user.
327
327
328 :param repo_name: target or source repo
328 :param repo_name: target or source repo
329 :param source: boolean flag to specify if repo_name refers to source
329 :param source: boolean flag to specify if repo_name refers to source
330 :param statuses: list of pull request statuses
330 :param statuses: list of pull request statuses
331 :param opened_by: author user of the pull request
331 :param opened_by: author user of the pull request
332 :param user_id: reviewer user of the pull request
332 :param user_id: reviewer user of the pull request
333 :param offset: pagination offset
333 :param offset: pagination offset
334 :param length: length of returned list
334 :param length: length of returned list
335 :param order_by: order of the returned list
335 :param order_by: order of the returned list
336 :param order_dir: 'asc' or 'desc' ordering direction
336 :param order_dir: 'asc' or 'desc' ordering direction
337 :returns: list of pull requests
337 :returns: list of pull requests
338 """
338 """
339 pull_requests = self.get_all(
339 pull_requests = self.get_all(
340 repo_name, source=source, statuses=statuses, opened_by=opened_by,
340 repo_name, source=source, statuses=statuses, opened_by=opened_by,
341 order_by=order_by, order_dir=order_dir)
341 order_by=order_by, order_dir=order_dir)
342
342
343 _my = PullRequestModel().get_not_reviewed(user_id)
343 _my = PullRequestModel().get_not_reviewed(user_id)
344 my_participation = []
344 my_participation = []
345 for pr in pull_requests:
345 for pr in pull_requests:
346 if pr in _my:
346 if pr in _my:
347 my_participation.append(pr)
347 my_participation.append(pr)
348 _filtered_pull_requests = my_participation
348 _filtered_pull_requests = my_participation
349 if length:
349 if length:
350 return _filtered_pull_requests[offset:offset+length]
350 return _filtered_pull_requests[offset:offset+length]
351 else:
351 else:
352 return _filtered_pull_requests
352 return _filtered_pull_requests
353
353
354 def get_not_reviewed(self, user_id):
354 def get_not_reviewed(self, user_id):
355 return [
355 return [
356 x.pull_request for x in PullRequestReviewers.query().filter(
356 x.pull_request for x in PullRequestReviewers.query().filter(
357 PullRequestReviewers.user_id == user_id).all()
357 PullRequestReviewers.user_id == user_id).all()
358 ]
358 ]
359
359
360 def _prepare_participating_query(self, user_id=None, statuses=None,
360 def _prepare_participating_query(self, user_id=None, statuses=None,
361 order_by=None, order_dir='desc'):
361 order_by=None, order_dir='desc'):
362 q = PullRequest.query()
362 q = PullRequest.query()
363 if user_id:
363 if user_id:
364 reviewers_subquery = Session().query(
364 reviewers_subquery = Session().query(
365 PullRequestReviewers.pull_request_id).filter(
365 PullRequestReviewers.pull_request_id).filter(
366 PullRequestReviewers.user_id == user_id).subquery()
366 PullRequestReviewers.user_id == user_id).subquery()
367 user_filter = or_(
367 user_filter = or_(
368 PullRequest.user_id == user_id,
368 PullRequest.user_id == user_id,
369 PullRequest.pull_request_id.in_(reviewers_subquery)
369 PullRequest.pull_request_id.in_(reviewers_subquery)
370 )
370 )
371 q = PullRequest.query().filter(user_filter)
371 q = PullRequest.query().filter(user_filter)
372
372
373 # closed,opened
373 # closed,opened
374 if statuses:
374 if statuses:
375 q = q.filter(PullRequest.status.in_(statuses))
375 q = q.filter(PullRequest.status.in_(statuses))
376
376
377 if order_by:
377 if order_by:
378 order_map = {
378 order_map = {
379 'name_raw': PullRequest.pull_request_id,
379 'name_raw': PullRequest.pull_request_id,
380 'title': PullRequest.title,
380 'title': PullRequest.title,
381 'updated_on_raw': PullRequest.updated_on,
381 'updated_on_raw': PullRequest.updated_on,
382 'target_repo': PullRequest.target_repo_id
382 'target_repo': PullRequest.target_repo_id
383 }
383 }
384 if order_dir == 'asc':
384 if order_dir == 'asc':
385 q = q.order_by(order_map[order_by].asc())
385 q = q.order_by(order_map[order_by].asc())
386 else:
386 else:
387 q = q.order_by(order_map[order_by].desc())
387 q = q.order_by(order_map[order_by].desc())
388
388
389 return q
389 return q
390
390
391 def count_im_participating_in(self, user_id=None, statuses=None):
391 def count_im_participating_in(self, user_id=None, statuses=None):
392 q = self._prepare_participating_query(user_id, statuses=statuses)
392 q = self._prepare_participating_query(user_id, statuses=statuses)
393 return q.count()
393 return q.count()
394
394
395 def get_im_participating_in(
395 def get_im_participating_in(
396 self, user_id=None, statuses=None, offset=0,
396 self, user_id=None, statuses=None, offset=0,
397 length=None, order_by=None, order_dir='desc'):
397 length=None, order_by=None, order_dir='desc'):
398 """
398 """
399 Get all Pull requests that i'm participating in, or i have opened
399 Get all Pull requests that i'm participating in, or i have opened
400 """
400 """
401
401
402 q = self._prepare_participating_query(
402 q = self._prepare_participating_query(
403 user_id, statuses=statuses, order_by=order_by,
403 user_id, statuses=statuses, order_by=order_by,
404 order_dir=order_dir)
404 order_dir=order_dir)
405
405
406 if length:
406 if length:
407 pull_requests = q.limit(length).offset(offset).all()
407 pull_requests = q.limit(length).offset(offset).all()
408 else:
408 else:
409 pull_requests = q.all()
409 pull_requests = q.all()
410
410
411 return pull_requests
411 return pull_requests
412
412
413 def get_versions(self, pull_request):
413 def get_versions(self, pull_request):
414 """
414 """
415 returns version of pull request sorted by ID descending
415 returns version of pull request sorted by ID descending
416 """
416 """
417 return PullRequestVersion.query()\
417 return PullRequestVersion.query()\
418 .filter(PullRequestVersion.pull_request == pull_request)\
418 .filter(PullRequestVersion.pull_request == pull_request)\
419 .order_by(PullRequestVersion.pull_request_version_id.asc())\
419 .order_by(PullRequestVersion.pull_request_version_id.asc())\
420 .all()
420 .all()
421
421
422 def get_pr_version(self, pull_request_id, version=None):
422 def get_pr_version(self, pull_request_id, version=None):
423 at_version = None
423 at_version = None
424
424
425 if version and version == 'latest':
425 if version and version == 'latest':
426 pull_request_ver = PullRequest.get(pull_request_id)
426 pull_request_ver = PullRequest.get(pull_request_id)
427 pull_request_obj = pull_request_ver
427 pull_request_obj = pull_request_ver
428 _org_pull_request_obj = pull_request_obj
428 _org_pull_request_obj = pull_request_obj
429 at_version = 'latest'
429 at_version = 'latest'
430 elif version:
430 elif version:
431 pull_request_ver = PullRequestVersion.get_or_404(version)
431 pull_request_ver = PullRequestVersion.get_or_404(version)
432 pull_request_obj = pull_request_ver
432 pull_request_obj = pull_request_ver
433 _org_pull_request_obj = pull_request_ver.pull_request
433 _org_pull_request_obj = pull_request_ver.pull_request
434 at_version = pull_request_ver.pull_request_version_id
434 at_version = pull_request_ver.pull_request_version_id
435 else:
435 else:
436 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
436 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
437 pull_request_id)
437 pull_request_id)
438
438
439 pull_request_display_obj = PullRequest.get_pr_display_object(
439 pull_request_display_obj = PullRequest.get_pr_display_object(
440 pull_request_obj, _org_pull_request_obj)
440 pull_request_obj, _org_pull_request_obj)
441
441
442 return _org_pull_request_obj, pull_request_obj, \
442 return _org_pull_request_obj, pull_request_obj, \
443 pull_request_display_obj, at_version
443 pull_request_display_obj, at_version
444
444
445 def create(self, created_by, source_repo, source_ref, target_repo,
445 def create(self, created_by, source_repo, source_ref, target_repo,
446 target_ref, revisions, reviewers, title, description=None,
446 target_ref, revisions, reviewers, title, description=None,
447 reviewer_data=None, translator=None, auth_user=None):
447 reviewer_data=None, translator=None, auth_user=None):
448 translator = translator or get_current_request().translate
448 translator = translator or get_current_request().translate
449
449
450 created_by_user = self._get_user(created_by)
450 created_by_user = self._get_user(created_by)
451 auth_user = auth_user or created_by_user
451 auth_user = auth_user or created_by_user
452 source_repo = self._get_repo(source_repo)
452 source_repo = self._get_repo(source_repo)
453 target_repo = self._get_repo(target_repo)
453 target_repo = self._get_repo(target_repo)
454
454
455 pull_request = PullRequest()
455 pull_request = PullRequest()
456 pull_request.source_repo = source_repo
456 pull_request.source_repo = source_repo
457 pull_request.source_ref = source_ref
457 pull_request.source_ref = source_ref
458 pull_request.target_repo = target_repo
458 pull_request.target_repo = target_repo
459 pull_request.target_ref = target_ref
459 pull_request.target_ref = target_ref
460 pull_request.revisions = revisions
460 pull_request.revisions = revisions
461 pull_request.title = title
461 pull_request.title = title
462 pull_request.description = description
462 pull_request.description = description
463 pull_request.author = created_by_user
463 pull_request.author = created_by_user
464 pull_request.reviewer_data = reviewer_data
464 pull_request.reviewer_data = reviewer_data
465
465
466 Session().add(pull_request)
466 Session().add(pull_request)
467 Session().flush()
467 Session().flush()
468
468
469 reviewer_ids = set()
469 reviewer_ids = set()
470 # members / reviewers
470 # members / reviewers
471 for reviewer_object in reviewers:
471 for reviewer_object in reviewers:
472 user_id, reasons, mandatory, rules = reviewer_object
472 user_id, reasons, mandatory, rules = reviewer_object
473 user = self._get_user(user_id)
473 user = self._get_user(user_id)
474
474
475 # skip duplicates
475 # skip duplicates
476 if user.user_id in reviewer_ids:
476 if user.user_id in reviewer_ids:
477 continue
477 continue
478
478
479 reviewer_ids.add(user.user_id)
479 reviewer_ids.add(user.user_id)
480
480
481 reviewer = PullRequestReviewers()
481 reviewer = PullRequestReviewers()
482 reviewer.user = user
482 reviewer.user = user
483 reviewer.pull_request = pull_request
483 reviewer.pull_request = pull_request
484 reviewer.reasons = reasons
484 reviewer.reasons = reasons
485 reviewer.mandatory = mandatory
485 reviewer.mandatory = mandatory
486
486
487 # NOTE(marcink): pick only first rule for now
487 # NOTE(marcink): pick only first rule for now
488 rule_id = rules[0] if rules else None
488 rule_id = rules[0] if rules else None
489 rule = RepoReviewRule.get(rule_id) if rule_id else None
489 rule = RepoReviewRule.get(rule_id) if rule_id else None
490 if rule:
490 if rule:
491 review_group = rule.user_group_vote_rule()
491 review_group = rule.user_group_vote_rule()
492 if review_group:
492 if review_group:
493 # NOTE(marcink):
493 # NOTE(marcink):
494 # again, can be that user is member of more,
494 # again, can be that user is member of more,
495 # but we pick the first same, as default reviewers algo
495 # but we pick the first same, as default reviewers algo
496 review_group = review_group[0]
496 review_group = review_group[0]
497
497
498 rule_data = {
498 rule_data = {
499 'rule_name':
499 'rule_name':
500 rule.review_rule_name,
500 rule.review_rule_name,
501 'rule_user_group_entry_id':
501 'rule_user_group_entry_id':
502 review_group.repo_review_rule_users_group_id,
502 review_group.repo_review_rule_users_group_id,
503 'rule_user_group_name':
503 'rule_user_group_name':
504 review_group.users_group.users_group_name,
504 review_group.users_group.users_group_name,
505 'rule_user_group_members':
505 'rule_user_group_members':
506 [x.user.username for x in review_group.users_group.members],
506 [x.user.username for x in review_group.users_group.members],
507 }
507 }
508 # e.g {'vote_rule': -1, 'mandatory': True}
508 # e.g {'vote_rule': -1, 'mandatory': True}
509 rule_data.update(review_group.rule_data())
509 rule_data.update(review_group.rule_data())
510
510
511 reviewer.rule_data = rule_data
511 reviewer.rule_data = rule_data
512
512
513 Session().add(reviewer)
513 Session().add(reviewer)
514 Session().flush()
514 Session().flush()
515
515
516 # Set approval status to "Under Review" for all commits which are
516 # Set approval status to "Under Review" for all commits which are
517 # part of this pull request.
517 # part of this pull request.
518 ChangesetStatusModel().set_status(
518 ChangesetStatusModel().set_status(
519 repo=target_repo,
519 repo=target_repo,
520 status=ChangesetStatus.STATUS_UNDER_REVIEW,
520 status=ChangesetStatus.STATUS_UNDER_REVIEW,
521 user=created_by_user,
521 user=created_by_user,
522 pull_request=pull_request
522 pull_request=pull_request
523 )
523 )
524 # we commit early at this point. This has to do with a fact
524 # we commit early at this point. This has to do with a fact
525 # that before queries do some row-locking. And because of that
525 # that before queries do some row-locking. And because of that
526 # we need to commit and finish transation before below validate call
526 # we need to commit and finish transation before below validate call
527 # that for large repos could be long resulting in long row locks
527 # that for large repos could be long resulting in long row locks
528 Session().commit()
528 Session().commit()
529
529
530 # prepare workspace, and run initial merge simulation
530 # prepare workspace, and run initial merge simulation
531 MergeCheck.validate(
531 MergeCheck.validate(
532 pull_request, user=created_by_user, translator=translator)
532 pull_request, user=created_by_user, translator=translator)
533
533
534 self.notify_reviewers(pull_request, reviewer_ids)
534 self.notify_reviewers(pull_request, reviewer_ids)
535 self._trigger_pull_request_hook(
535 self._trigger_pull_request_hook(
536 pull_request, created_by_user, 'create')
536 pull_request, created_by_user, 'create')
537
537
538 creation_data = pull_request.get_api_data(with_merge_state=False)
538 creation_data = pull_request.get_api_data(with_merge_state=False)
539 self._log_audit_action(
539 self._log_audit_action(
540 'repo.pull_request.create', {'data': creation_data},
540 'repo.pull_request.create', {'data': creation_data},
541 auth_user, pull_request)
541 auth_user, pull_request)
542
542
543 return pull_request
543 return pull_request
544
544
545 def _trigger_pull_request_hook(self, pull_request, user, action):
545 def _trigger_pull_request_hook(self, pull_request, user, action):
546 pull_request = self.__get_pull_request(pull_request)
546 pull_request = self.__get_pull_request(pull_request)
547 target_scm = pull_request.target_repo.scm_instance()
547 target_scm = pull_request.target_repo.scm_instance()
548 if action == 'create':
548 if action == 'create':
549 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
549 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
550 elif action == 'merge':
550 elif action == 'merge':
551 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
551 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
552 elif action == 'close':
552 elif action == 'close':
553 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
553 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
554 elif action == 'review_status_change':
554 elif action == 'review_status_change':
555 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
555 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
556 elif action == 'update':
556 elif action == 'update':
557 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
557 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
558 else:
558 else:
559 return
559 return
560
560
561 trigger_hook(
561 trigger_hook(
562 username=user.username,
562 username=user.username,
563 repo_name=pull_request.target_repo.repo_name,
563 repo_name=pull_request.target_repo.repo_name,
564 repo_alias=target_scm.alias,
564 repo_alias=target_scm.alias,
565 pull_request=pull_request)
565 pull_request=pull_request)
566
566
567 def _get_commit_ids(self, pull_request):
567 def _get_commit_ids(self, pull_request):
568 """
568 """
569 Return the commit ids of the merged pull request.
569 Return the commit ids of the merged pull request.
570
570
571 This method is not dealing correctly yet with the lack of autoupdates
571 This method is not dealing correctly yet with the lack of autoupdates
572 nor with the implicit target updates.
572 nor with the implicit target updates.
573 For example: if a commit in the source repo is already in the target it
573 For example: if a commit in the source repo is already in the target it
574 will be reported anyways.
574 will be reported anyways.
575 """
575 """
576 merge_rev = pull_request.merge_rev
576 merge_rev = pull_request.merge_rev
577 if merge_rev is None:
577 if merge_rev is None:
578 raise ValueError('This pull request was not merged yet')
578 raise ValueError('This pull request was not merged yet')
579
579
580 commit_ids = list(pull_request.revisions)
580 commit_ids = list(pull_request.revisions)
581 if merge_rev not in commit_ids:
581 if merge_rev not in commit_ids:
582 commit_ids.append(merge_rev)
582 commit_ids.append(merge_rev)
583
583
584 return commit_ids
584 return commit_ids
585
585
586 def merge_repo(self, pull_request, user, extras):
586 def merge_repo(self, pull_request, user, extras):
587 log.debug("Merging pull request %s", pull_request.pull_request_id)
587 log.debug("Merging pull request %s", pull_request.pull_request_id)
588 merge_state = self._merge_pull_request(pull_request, user, extras)
588 merge_state = self._merge_pull_request(pull_request, user, extras)
589 if merge_state.executed:
589 if merge_state.executed:
590 log.debug(
590 log.debug(
591 "Merge was successful, updating the pull request comments.")
591 "Merge was successful, updating the pull request comments.")
592 self._comment_and_close_pr(pull_request, user, merge_state)
592 self._comment_and_close_pr(pull_request, user, merge_state)
593
593
594 self._log_audit_action(
594 self._log_audit_action(
595 'repo.pull_request.merge',
595 'repo.pull_request.merge',
596 {'merge_state': merge_state.__dict__},
596 {'merge_state': merge_state.__dict__},
597 user, pull_request)
597 user, pull_request)
598
598
599 else:
599 else:
600 log.warn("Merge failed, not updating the pull request.")
600 log.warn("Merge failed, not updating the pull request.")
601 return merge_state
601 return merge_state
602
602
603 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
603 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
604 target_vcs = pull_request.target_repo.scm_instance()
604 target_vcs = pull_request.target_repo.scm_instance()
605 source_vcs = pull_request.source_repo.scm_instance()
605 source_vcs = pull_request.source_repo.scm_instance()
606 target_ref = self._refresh_reference(
606 target_ref = self._refresh_reference(
607 pull_request.target_ref_parts, target_vcs)
607 pull_request.target_ref_parts, target_vcs)
608
608
609 message = merge_msg or (
609 message = merge_msg or (
610 'Merge pull request #%(pr_id)s from '
610 'Merge pull request #%(pr_id)s from '
611 '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % {
611 '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % {
612 'pr_id': pull_request.pull_request_id,
612 'pr_id': pull_request.pull_request_id,
613 'source_repo': source_vcs.name,
613 'source_repo': source_vcs.name,
614 'source_ref_name': pull_request.source_ref_parts.name,
614 'source_ref_name': pull_request.source_ref_parts.name,
615 'pr_title': pull_request.title
615 'pr_title': pull_request.title
616 }
616 }
617
617
618 workspace_id = self._workspace_id(pull_request)
618 workspace_id = self._workspace_id(pull_request)
619 repo_id = pull_request.target_repo.repo_id
619 repo_id = pull_request.target_repo.repo_id
620 use_rebase = self._use_rebase_for_merging(pull_request)
620 use_rebase = self._use_rebase_for_merging(pull_request)
621 close_branch = self._close_branch_before_merging(pull_request)
621 close_branch = self._close_branch_before_merging(pull_request)
622
622
623 callback_daemon, extras = prepare_callback_daemon(
623 callback_daemon, extras = prepare_callback_daemon(
624 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
624 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
625 host=vcs_settings.HOOKS_HOST,
625 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
626 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
626
627
627 with callback_daemon:
628 with callback_daemon:
628 # TODO: johbo: Implement a clean way to run a config_override
629 # TODO: johbo: Implement a clean way to run a config_override
629 # for a single call.
630 # for a single call.
630 target_vcs.config.set(
631 target_vcs.config.set(
631 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
632 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
632 merge_state = target_vcs.merge(
633 merge_state = target_vcs.merge(
633 repo_id, workspace_id, target_ref, source_vcs,
634 repo_id, workspace_id, target_ref, source_vcs,
634 pull_request.source_ref_parts,
635 pull_request.source_ref_parts,
635 user_name=user.username, user_email=user.email,
636 user_name=user.username, user_email=user.email,
636 message=message, use_rebase=use_rebase,
637 message=message, use_rebase=use_rebase,
637 close_branch=close_branch)
638 close_branch=close_branch)
638 return merge_state
639 return merge_state
639
640
640 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
641 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
641 pull_request.merge_rev = merge_state.merge_ref.commit_id
642 pull_request.merge_rev = merge_state.merge_ref.commit_id
642 pull_request.updated_on = datetime.datetime.now()
643 pull_request.updated_on = datetime.datetime.now()
643 close_msg = close_msg or 'Pull request merged and closed'
644 close_msg = close_msg or 'Pull request merged and closed'
644
645
645 CommentsModel().create(
646 CommentsModel().create(
646 text=safe_unicode(close_msg),
647 text=safe_unicode(close_msg),
647 repo=pull_request.target_repo.repo_id,
648 repo=pull_request.target_repo.repo_id,
648 user=user.user_id,
649 user=user.user_id,
649 pull_request=pull_request.pull_request_id,
650 pull_request=pull_request.pull_request_id,
650 f_path=None,
651 f_path=None,
651 line_no=None,
652 line_no=None,
652 closing_pr=True
653 closing_pr=True
653 )
654 )
654
655
655 Session().add(pull_request)
656 Session().add(pull_request)
656 Session().flush()
657 Session().flush()
657 # TODO: paris: replace invalidation with less radical solution
658 # TODO: paris: replace invalidation with less radical solution
658 ScmModel().mark_for_invalidation(
659 ScmModel().mark_for_invalidation(
659 pull_request.target_repo.repo_name)
660 pull_request.target_repo.repo_name)
660 self._trigger_pull_request_hook(pull_request, user, 'merge')
661 self._trigger_pull_request_hook(pull_request, user, 'merge')
661
662
662 def has_valid_update_type(self, pull_request):
663 def has_valid_update_type(self, pull_request):
663 source_ref_type = pull_request.source_ref_parts.type
664 source_ref_type = pull_request.source_ref_parts.type
664 return source_ref_type in ['book', 'branch', 'tag']
665 return source_ref_type in ['book', 'branch', 'tag']
665
666
666 def update_commits(self, pull_request):
667 def update_commits(self, pull_request):
667 """
668 """
668 Get the updated list of commits for the pull request
669 Get the updated list of commits for the pull request
669 and return the new pull request version and the list
670 and return the new pull request version and the list
670 of commits processed by this update action
671 of commits processed by this update action
671 """
672 """
672 pull_request = self.__get_pull_request(pull_request)
673 pull_request = self.__get_pull_request(pull_request)
673 source_ref_type = pull_request.source_ref_parts.type
674 source_ref_type = pull_request.source_ref_parts.type
674 source_ref_name = pull_request.source_ref_parts.name
675 source_ref_name = pull_request.source_ref_parts.name
675 source_ref_id = pull_request.source_ref_parts.commit_id
676 source_ref_id = pull_request.source_ref_parts.commit_id
676
677
677 target_ref_type = pull_request.target_ref_parts.type
678 target_ref_type = pull_request.target_ref_parts.type
678 target_ref_name = pull_request.target_ref_parts.name
679 target_ref_name = pull_request.target_ref_parts.name
679 target_ref_id = pull_request.target_ref_parts.commit_id
680 target_ref_id = pull_request.target_ref_parts.commit_id
680
681
681 if not self.has_valid_update_type(pull_request):
682 if not self.has_valid_update_type(pull_request):
682 log.debug(
683 log.debug(
683 "Skipping update of pull request %s due to ref type: %s",
684 "Skipping update of pull request %s due to ref type: %s",
684 pull_request, source_ref_type)
685 pull_request, source_ref_type)
685 return UpdateResponse(
686 return UpdateResponse(
686 executed=False,
687 executed=False,
687 reason=UpdateFailureReason.WRONG_REF_TYPE,
688 reason=UpdateFailureReason.WRONG_REF_TYPE,
688 old=pull_request, new=None, changes=None,
689 old=pull_request, new=None, changes=None,
689 source_changed=False, target_changed=False)
690 source_changed=False, target_changed=False)
690
691
691 # source repo
692 # source repo
692 source_repo = pull_request.source_repo.scm_instance()
693 source_repo = pull_request.source_repo.scm_instance()
693 try:
694 try:
694 source_commit = source_repo.get_commit(commit_id=source_ref_name)
695 source_commit = source_repo.get_commit(commit_id=source_ref_name)
695 except CommitDoesNotExistError:
696 except CommitDoesNotExistError:
696 return UpdateResponse(
697 return UpdateResponse(
697 executed=False,
698 executed=False,
698 reason=UpdateFailureReason.MISSING_SOURCE_REF,
699 reason=UpdateFailureReason.MISSING_SOURCE_REF,
699 old=pull_request, new=None, changes=None,
700 old=pull_request, new=None, changes=None,
700 source_changed=False, target_changed=False)
701 source_changed=False, target_changed=False)
701
702
702 source_changed = source_ref_id != source_commit.raw_id
703 source_changed = source_ref_id != source_commit.raw_id
703
704
704 # target repo
705 # target repo
705 target_repo = pull_request.target_repo.scm_instance()
706 target_repo = pull_request.target_repo.scm_instance()
706 try:
707 try:
707 target_commit = target_repo.get_commit(commit_id=target_ref_name)
708 target_commit = target_repo.get_commit(commit_id=target_ref_name)
708 except CommitDoesNotExistError:
709 except CommitDoesNotExistError:
709 return UpdateResponse(
710 return UpdateResponse(
710 executed=False,
711 executed=False,
711 reason=UpdateFailureReason.MISSING_TARGET_REF,
712 reason=UpdateFailureReason.MISSING_TARGET_REF,
712 old=pull_request, new=None, changes=None,
713 old=pull_request, new=None, changes=None,
713 source_changed=False, target_changed=False)
714 source_changed=False, target_changed=False)
714 target_changed = target_ref_id != target_commit.raw_id
715 target_changed = target_ref_id != target_commit.raw_id
715
716
716 if not (source_changed or target_changed):
717 if not (source_changed or target_changed):
717 log.debug("Nothing changed in pull request %s", pull_request)
718 log.debug("Nothing changed in pull request %s", pull_request)
718 return UpdateResponse(
719 return UpdateResponse(
719 executed=False,
720 executed=False,
720 reason=UpdateFailureReason.NO_CHANGE,
721 reason=UpdateFailureReason.NO_CHANGE,
721 old=pull_request, new=None, changes=None,
722 old=pull_request, new=None, changes=None,
722 source_changed=target_changed, target_changed=source_changed)
723 source_changed=target_changed, target_changed=source_changed)
723
724
724 change_in_found = 'target repo' if target_changed else 'source repo'
725 change_in_found = 'target repo' if target_changed else 'source repo'
725 log.debug('Updating pull request because of change in %s detected',
726 log.debug('Updating pull request because of change in %s detected',
726 change_in_found)
727 change_in_found)
727
728
728 # Finally there is a need for an update, in case of source change
729 # Finally there is a need for an update, in case of source change
729 # we create a new version, else just an update
730 # we create a new version, else just an update
730 if source_changed:
731 if source_changed:
731 pull_request_version = self._create_version_from_snapshot(pull_request)
732 pull_request_version = self._create_version_from_snapshot(pull_request)
732 self._link_comments_to_version(pull_request_version)
733 self._link_comments_to_version(pull_request_version)
733 else:
734 else:
734 try:
735 try:
735 ver = pull_request.versions[-1]
736 ver = pull_request.versions[-1]
736 except IndexError:
737 except IndexError:
737 ver = None
738 ver = None
738
739
739 pull_request.pull_request_version_id = \
740 pull_request.pull_request_version_id = \
740 ver.pull_request_version_id if ver else None
741 ver.pull_request_version_id if ver else None
741 pull_request_version = pull_request
742 pull_request_version = pull_request
742
743
743 try:
744 try:
744 if target_ref_type in ('tag', 'branch', 'book'):
745 if target_ref_type in ('tag', 'branch', 'book'):
745 target_commit = target_repo.get_commit(target_ref_name)
746 target_commit = target_repo.get_commit(target_ref_name)
746 else:
747 else:
747 target_commit = target_repo.get_commit(target_ref_id)
748 target_commit = target_repo.get_commit(target_ref_id)
748 except CommitDoesNotExistError:
749 except CommitDoesNotExistError:
749 return UpdateResponse(
750 return UpdateResponse(
750 executed=False,
751 executed=False,
751 reason=UpdateFailureReason.MISSING_TARGET_REF,
752 reason=UpdateFailureReason.MISSING_TARGET_REF,
752 old=pull_request, new=None, changes=None,
753 old=pull_request, new=None, changes=None,
753 source_changed=source_changed, target_changed=target_changed)
754 source_changed=source_changed, target_changed=target_changed)
754
755
755 # re-compute commit ids
756 # re-compute commit ids
756 old_commit_ids = pull_request.revisions
757 old_commit_ids = pull_request.revisions
757 pre_load = ["author", "branch", "date", "message"]
758 pre_load = ["author", "branch", "date", "message"]
758 commit_ranges = target_repo.compare(
759 commit_ranges = target_repo.compare(
759 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
760 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
760 pre_load=pre_load)
761 pre_load=pre_load)
761
762
762 ancestor = target_repo.get_common_ancestor(
763 ancestor = target_repo.get_common_ancestor(
763 target_commit.raw_id, source_commit.raw_id, source_repo)
764 target_commit.raw_id, source_commit.raw_id, source_repo)
764
765
765 pull_request.source_ref = '%s:%s:%s' % (
766 pull_request.source_ref = '%s:%s:%s' % (
766 source_ref_type, source_ref_name, source_commit.raw_id)
767 source_ref_type, source_ref_name, source_commit.raw_id)
767 pull_request.target_ref = '%s:%s:%s' % (
768 pull_request.target_ref = '%s:%s:%s' % (
768 target_ref_type, target_ref_name, ancestor)
769 target_ref_type, target_ref_name, ancestor)
769
770
770 pull_request.revisions = [
771 pull_request.revisions = [
771 commit.raw_id for commit in reversed(commit_ranges)]
772 commit.raw_id for commit in reversed(commit_ranges)]
772 pull_request.updated_on = datetime.datetime.now()
773 pull_request.updated_on = datetime.datetime.now()
773 Session().add(pull_request)
774 Session().add(pull_request)
774 new_commit_ids = pull_request.revisions
775 new_commit_ids = pull_request.revisions
775
776
776 old_diff_data, new_diff_data = self._generate_update_diffs(
777 old_diff_data, new_diff_data = self._generate_update_diffs(
777 pull_request, pull_request_version)
778 pull_request, pull_request_version)
778
779
779 # calculate commit and file changes
780 # calculate commit and file changes
780 changes = self._calculate_commit_id_changes(
781 changes = self._calculate_commit_id_changes(
781 old_commit_ids, new_commit_ids)
782 old_commit_ids, new_commit_ids)
782 file_changes = self._calculate_file_changes(
783 file_changes = self._calculate_file_changes(
783 old_diff_data, new_diff_data)
784 old_diff_data, new_diff_data)
784
785
785 # set comments as outdated if DIFFS changed
786 # set comments as outdated if DIFFS changed
786 CommentsModel().outdate_comments(
787 CommentsModel().outdate_comments(
787 pull_request, old_diff_data=old_diff_data,
788 pull_request, old_diff_data=old_diff_data,
788 new_diff_data=new_diff_data)
789 new_diff_data=new_diff_data)
789
790
790 commit_changes = (changes.added or changes.removed)
791 commit_changes = (changes.added or changes.removed)
791 file_node_changes = (
792 file_node_changes = (
792 file_changes.added or file_changes.modified or file_changes.removed)
793 file_changes.added or file_changes.modified or file_changes.removed)
793 pr_has_changes = commit_changes or file_node_changes
794 pr_has_changes = commit_changes or file_node_changes
794
795
795 # Add an automatic comment to the pull request, in case
796 # Add an automatic comment to the pull request, in case
796 # anything has changed
797 # anything has changed
797 if pr_has_changes:
798 if pr_has_changes:
798 update_comment = CommentsModel().create(
799 update_comment = CommentsModel().create(
799 text=self._render_update_message(changes, file_changes),
800 text=self._render_update_message(changes, file_changes),
800 repo=pull_request.target_repo,
801 repo=pull_request.target_repo,
801 user=pull_request.author,
802 user=pull_request.author,
802 pull_request=pull_request,
803 pull_request=pull_request,
803 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
804 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
804
805
805 # Update status to "Under Review" for added commits
806 # Update status to "Under Review" for added commits
806 for commit_id in changes.added:
807 for commit_id in changes.added:
807 ChangesetStatusModel().set_status(
808 ChangesetStatusModel().set_status(
808 repo=pull_request.source_repo,
809 repo=pull_request.source_repo,
809 status=ChangesetStatus.STATUS_UNDER_REVIEW,
810 status=ChangesetStatus.STATUS_UNDER_REVIEW,
810 comment=update_comment,
811 comment=update_comment,
811 user=pull_request.author,
812 user=pull_request.author,
812 pull_request=pull_request,
813 pull_request=pull_request,
813 revision=commit_id)
814 revision=commit_id)
814
815
815 log.debug(
816 log.debug(
816 'Updated pull request %s, added_ids: %s, common_ids: %s, '
817 'Updated pull request %s, added_ids: %s, common_ids: %s, '
817 'removed_ids: %s', pull_request.pull_request_id,
818 'removed_ids: %s', pull_request.pull_request_id,
818 changes.added, changes.common, changes.removed)
819 changes.added, changes.common, changes.removed)
819 log.debug(
820 log.debug(
820 'Updated pull request with the following file changes: %s',
821 'Updated pull request with the following file changes: %s',
821 file_changes)
822 file_changes)
822
823
823 log.info(
824 log.info(
824 "Updated pull request %s from commit %s to commit %s, "
825 "Updated pull request %s from commit %s to commit %s, "
825 "stored new version %s of this pull request.",
826 "stored new version %s of this pull request.",
826 pull_request.pull_request_id, source_ref_id,
827 pull_request.pull_request_id, source_ref_id,
827 pull_request.source_ref_parts.commit_id,
828 pull_request.source_ref_parts.commit_id,
828 pull_request_version.pull_request_version_id)
829 pull_request_version.pull_request_version_id)
829 Session().commit()
830 Session().commit()
830 self._trigger_pull_request_hook(
831 self._trigger_pull_request_hook(
831 pull_request, pull_request.author, 'update')
832 pull_request, pull_request.author, 'update')
832
833
833 return UpdateResponse(
834 return UpdateResponse(
834 executed=True, reason=UpdateFailureReason.NONE,
835 executed=True, reason=UpdateFailureReason.NONE,
835 old=pull_request, new=pull_request_version, changes=changes,
836 old=pull_request, new=pull_request_version, changes=changes,
836 source_changed=source_changed, target_changed=target_changed)
837 source_changed=source_changed, target_changed=target_changed)
837
838
838 def _create_version_from_snapshot(self, pull_request):
839 def _create_version_from_snapshot(self, pull_request):
839 version = PullRequestVersion()
840 version = PullRequestVersion()
840 version.title = pull_request.title
841 version.title = pull_request.title
841 version.description = pull_request.description
842 version.description = pull_request.description
842 version.status = pull_request.status
843 version.status = pull_request.status
843 version.created_on = datetime.datetime.now()
844 version.created_on = datetime.datetime.now()
844 version.updated_on = pull_request.updated_on
845 version.updated_on = pull_request.updated_on
845 version.user_id = pull_request.user_id
846 version.user_id = pull_request.user_id
846 version.source_repo = pull_request.source_repo
847 version.source_repo = pull_request.source_repo
847 version.source_ref = pull_request.source_ref
848 version.source_ref = pull_request.source_ref
848 version.target_repo = pull_request.target_repo
849 version.target_repo = pull_request.target_repo
849 version.target_ref = pull_request.target_ref
850 version.target_ref = pull_request.target_ref
850
851
851 version._last_merge_source_rev = pull_request._last_merge_source_rev
852 version._last_merge_source_rev = pull_request._last_merge_source_rev
852 version._last_merge_target_rev = pull_request._last_merge_target_rev
853 version._last_merge_target_rev = pull_request._last_merge_target_rev
853 version.last_merge_status = pull_request.last_merge_status
854 version.last_merge_status = pull_request.last_merge_status
854 version.shadow_merge_ref = pull_request.shadow_merge_ref
855 version.shadow_merge_ref = pull_request.shadow_merge_ref
855 version.merge_rev = pull_request.merge_rev
856 version.merge_rev = pull_request.merge_rev
856 version.reviewer_data = pull_request.reviewer_data
857 version.reviewer_data = pull_request.reviewer_data
857
858
858 version.revisions = pull_request.revisions
859 version.revisions = pull_request.revisions
859 version.pull_request = pull_request
860 version.pull_request = pull_request
860 Session().add(version)
861 Session().add(version)
861 Session().flush()
862 Session().flush()
862
863
863 return version
864 return version
864
865
865 def _generate_update_diffs(self, pull_request, pull_request_version):
866 def _generate_update_diffs(self, pull_request, pull_request_version):
866
867
867 diff_context = (
868 diff_context = (
868 self.DIFF_CONTEXT +
869 self.DIFF_CONTEXT +
869 CommentsModel.needed_extra_diff_context())
870 CommentsModel.needed_extra_diff_context())
870
871
871 source_repo = pull_request_version.source_repo
872 source_repo = pull_request_version.source_repo
872 source_ref_id = pull_request_version.source_ref_parts.commit_id
873 source_ref_id = pull_request_version.source_ref_parts.commit_id
873 target_ref_id = pull_request_version.target_ref_parts.commit_id
874 target_ref_id = pull_request_version.target_ref_parts.commit_id
874 old_diff = self._get_diff_from_pr_or_version(
875 old_diff = self._get_diff_from_pr_or_version(
875 source_repo, source_ref_id, target_ref_id, context=diff_context)
876 source_repo, source_ref_id, target_ref_id, context=diff_context)
876
877
877 source_repo = pull_request.source_repo
878 source_repo = pull_request.source_repo
878 source_ref_id = pull_request.source_ref_parts.commit_id
879 source_ref_id = pull_request.source_ref_parts.commit_id
879 target_ref_id = pull_request.target_ref_parts.commit_id
880 target_ref_id = pull_request.target_ref_parts.commit_id
880
881
881 new_diff = self._get_diff_from_pr_or_version(
882 new_diff = self._get_diff_from_pr_or_version(
882 source_repo, source_ref_id, target_ref_id, context=diff_context)
883 source_repo, source_ref_id, target_ref_id, context=diff_context)
883
884
884 old_diff_data = diffs.DiffProcessor(old_diff)
885 old_diff_data = diffs.DiffProcessor(old_diff)
885 old_diff_data.prepare()
886 old_diff_data.prepare()
886 new_diff_data = diffs.DiffProcessor(new_diff)
887 new_diff_data = diffs.DiffProcessor(new_diff)
887 new_diff_data.prepare()
888 new_diff_data.prepare()
888
889
889 return old_diff_data, new_diff_data
890 return old_diff_data, new_diff_data
890
891
891 def _link_comments_to_version(self, pull_request_version):
892 def _link_comments_to_version(self, pull_request_version):
892 """
893 """
893 Link all unlinked comments of this pull request to the given version.
894 Link all unlinked comments of this pull request to the given version.
894
895
895 :param pull_request_version: The `PullRequestVersion` to which
896 :param pull_request_version: The `PullRequestVersion` to which
896 the comments shall be linked.
897 the comments shall be linked.
897
898
898 """
899 """
899 pull_request = pull_request_version.pull_request
900 pull_request = pull_request_version.pull_request
900 comments = ChangesetComment.query()\
901 comments = ChangesetComment.query()\
901 .filter(
902 .filter(
902 # TODO: johbo: Should we query for the repo at all here?
903 # TODO: johbo: Should we query for the repo at all here?
903 # Pending decision on how comments of PRs are to be related
904 # Pending decision on how comments of PRs are to be related
904 # to either the source repo, the target repo or no repo at all.
905 # to either the source repo, the target repo or no repo at all.
905 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
906 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
906 ChangesetComment.pull_request == pull_request,
907 ChangesetComment.pull_request == pull_request,
907 ChangesetComment.pull_request_version == None)\
908 ChangesetComment.pull_request_version == None)\
908 .order_by(ChangesetComment.comment_id.asc())
909 .order_by(ChangesetComment.comment_id.asc())
909
910
910 # TODO: johbo: Find out why this breaks if it is done in a bulk
911 # TODO: johbo: Find out why this breaks if it is done in a bulk
911 # operation.
912 # operation.
912 for comment in comments:
913 for comment in comments:
913 comment.pull_request_version_id = (
914 comment.pull_request_version_id = (
914 pull_request_version.pull_request_version_id)
915 pull_request_version.pull_request_version_id)
915 Session().add(comment)
916 Session().add(comment)
916
917
917 def _calculate_commit_id_changes(self, old_ids, new_ids):
918 def _calculate_commit_id_changes(self, old_ids, new_ids):
918 added = [x for x in new_ids if x not in old_ids]
919 added = [x for x in new_ids if x not in old_ids]
919 common = [x for x in new_ids if x in old_ids]
920 common = [x for x in new_ids if x in old_ids]
920 removed = [x for x in old_ids if x not in new_ids]
921 removed = [x for x in old_ids if x not in new_ids]
921 total = new_ids
922 total = new_ids
922 return ChangeTuple(added, common, removed, total)
923 return ChangeTuple(added, common, removed, total)
923
924
924 def _calculate_file_changes(self, old_diff_data, new_diff_data):
925 def _calculate_file_changes(self, old_diff_data, new_diff_data):
925
926
926 old_files = OrderedDict()
927 old_files = OrderedDict()
927 for diff_data in old_diff_data.parsed_diff:
928 for diff_data in old_diff_data.parsed_diff:
928 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
929 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
929
930
930 added_files = []
931 added_files = []
931 modified_files = []
932 modified_files = []
932 removed_files = []
933 removed_files = []
933 for diff_data in new_diff_data.parsed_diff:
934 for diff_data in new_diff_data.parsed_diff:
934 new_filename = diff_data['filename']
935 new_filename = diff_data['filename']
935 new_hash = md5_safe(diff_data['raw_diff'])
936 new_hash = md5_safe(diff_data['raw_diff'])
936
937
937 old_hash = old_files.get(new_filename)
938 old_hash = old_files.get(new_filename)
938 if not old_hash:
939 if not old_hash:
939 # file is not present in old diff, means it's added
940 # file is not present in old diff, means it's added
940 added_files.append(new_filename)
941 added_files.append(new_filename)
941 else:
942 else:
942 if new_hash != old_hash:
943 if new_hash != old_hash:
943 modified_files.append(new_filename)
944 modified_files.append(new_filename)
944 # now remove a file from old, since we have seen it already
945 # now remove a file from old, since we have seen it already
945 del old_files[new_filename]
946 del old_files[new_filename]
946
947
947 # removed files is when there are present in old, but not in NEW,
948 # removed files is when there are present in old, but not in NEW,
948 # since we remove old files that are present in new diff, left-overs
949 # since we remove old files that are present in new diff, left-overs
949 # if any should be the removed files
950 # if any should be the removed files
950 removed_files.extend(old_files.keys())
951 removed_files.extend(old_files.keys())
951
952
952 return FileChangeTuple(added_files, modified_files, removed_files)
953 return FileChangeTuple(added_files, modified_files, removed_files)
953
954
954 def _render_update_message(self, changes, file_changes):
955 def _render_update_message(self, changes, file_changes):
955 """
956 """
956 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
957 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
957 so it's always looking the same disregarding on which default
958 so it's always looking the same disregarding on which default
958 renderer system is using.
959 renderer system is using.
959
960
960 :param changes: changes named tuple
961 :param changes: changes named tuple
961 :param file_changes: file changes named tuple
962 :param file_changes: file changes named tuple
962
963
963 """
964 """
964 new_status = ChangesetStatus.get_status_lbl(
965 new_status = ChangesetStatus.get_status_lbl(
965 ChangesetStatus.STATUS_UNDER_REVIEW)
966 ChangesetStatus.STATUS_UNDER_REVIEW)
966
967
967 changed_files = (
968 changed_files = (
968 file_changes.added + file_changes.modified + file_changes.removed)
969 file_changes.added + file_changes.modified + file_changes.removed)
969
970
970 params = {
971 params = {
971 'under_review_label': new_status,
972 'under_review_label': new_status,
972 'added_commits': changes.added,
973 'added_commits': changes.added,
973 'removed_commits': changes.removed,
974 'removed_commits': changes.removed,
974 'changed_files': changed_files,
975 'changed_files': changed_files,
975 'added_files': file_changes.added,
976 'added_files': file_changes.added,
976 'modified_files': file_changes.modified,
977 'modified_files': file_changes.modified,
977 'removed_files': file_changes.removed,
978 'removed_files': file_changes.removed,
978 }
979 }
979 renderer = RstTemplateRenderer()
980 renderer = RstTemplateRenderer()
980 return renderer.render('pull_request_update.mako', **params)
981 return renderer.render('pull_request_update.mako', **params)
981
982
982 def edit(self, pull_request, title, description, user):
983 def edit(self, pull_request, title, description, user):
983 pull_request = self.__get_pull_request(pull_request)
984 pull_request = self.__get_pull_request(pull_request)
984 old_data = pull_request.get_api_data(with_merge_state=False)
985 old_data = pull_request.get_api_data(with_merge_state=False)
985 if pull_request.is_closed():
986 if pull_request.is_closed():
986 raise ValueError('This pull request is closed')
987 raise ValueError('This pull request is closed')
987 if title:
988 if title:
988 pull_request.title = title
989 pull_request.title = title
989 pull_request.description = description
990 pull_request.description = description
990 pull_request.updated_on = datetime.datetime.now()
991 pull_request.updated_on = datetime.datetime.now()
991 Session().add(pull_request)
992 Session().add(pull_request)
992 self._log_audit_action(
993 self._log_audit_action(
993 'repo.pull_request.edit', {'old_data': old_data},
994 'repo.pull_request.edit', {'old_data': old_data},
994 user, pull_request)
995 user, pull_request)
995
996
996 def update_reviewers(self, pull_request, reviewer_data, user):
997 def update_reviewers(self, pull_request, reviewer_data, user):
997 """
998 """
998 Update the reviewers in the pull request
999 Update the reviewers in the pull request
999
1000
1000 :param pull_request: the pr to update
1001 :param pull_request: the pr to update
1001 :param reviewer_data: list of tuples
1002 :param reviewer_data: list of tuples
1002 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
1003 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
1003 """
1004 """
1004 pull_request = self.__get_pull_request(pull_request)
1005 pull_request = self.__get_pull_request(pull_request)
1005 if pull_request.is_closed():
1006 if pull_request.is_closed():
1006 raise ValueError('This pull request is closed')
1007 raise ValueError('This pull request is closed')
1007
1008
1008 reviewers = {}
1009 reviewers = {}
1009 for user_id, reasons, mandatory, rules in reviewer_data:
1010 for user_id, reasons, mandatory, rules in reviewer_data:
1010 if isinstance(user_id, (int, basestring)):
1011 if isinstance(user_id, (int, basestring)):
1011 user_id = self._get_user(user_id).user_id
1012 user_id = self._get_user(user_id).user_id
1012 reviewers[user_id] = {
1013 reviewers[user_id] = {
1013 'reasons': reasons, 'mandatory': mandatory}
1014 'reasons': reasons, 'mandatory': mandatory}
1014
1015
1015 reviewers_ids = set(reviewers.keys())
1016 reviewers_ids = set(reviewers.keys())
1016 current_reviewers = PullRequestReviewers.query()\
1017 current_reviewers = PullRequestReviewers.query()\
1017 .filter(PullRequestReviewers.pull_request ==
1018 .filter(PullRequestReviewers.pull_request ==
1018 pull_request).all()
1019 pull_request).all()
1019 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1020 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1020
1021
1021 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1022 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1022 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1023 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1023
1024
1024 log.debug("Adding %s reviewers", ids_to_add)
1025 log.debug("Adding %s reviewers", ids_to_add)
1025 log.debug("Removing %s reviewers", ids_to_remove)
1026 log.debug("Removing %s reviewers", ids_to_remove)
1026 changed = False
1027 changed = False
1027 for uid in ids_to_add:
1028 for uid in ids_to_add:
1028 changed = True
1029 changed = True
1029 _usr = self._get_user(uid)
1030 _usr = self._get_user(uid)
1030 reviewer = PullRequestReviewers()
1031 reviewer = PullRequestReviewers()
1031 reviewer.user = _usr
1032 reviewer.user = _usr
1032 reviewer.pull_request = pull_request
1033 reviewer.pull_request = pull_request
1033 reviewer.reasons = reviewers[uid]['reasons']
1034 reviewer.reasons = reviewers[uid]['reasons']
1034 # NOTE(marcink): mandatory shouldn't be changed now
1035 # NOTE(marcink): mandatory shouldn't be changed now
1035 # reviewer.mandatory = reviewers[uid]['reasons']
1036 # reviewer.mandatory = reviewers[uid]['reasons']
1036 Session().add(reviewer)
1037 Session().add(reviewer)
1037 self._log_audit_action(
1038 self._log_audit_action(
1038 'repo.pull_request.reviewer.add', {'data': reviewer.get_dict()},
1039 'repo.pull_request.reviewer.add', {'data': reviewer.get_dict()},
1039 user, pull_request)
1040 user, pull_request)
1040
1041
1041 for uid in ids_to_remove:
1042 for uid in ids_to_remove:
1042 changed = True
1043 changed = True
1043 reviewers = PullRequestReviewers.query()\
1044 reviewers = PullRequestReviewers.query()\
1044 .filter(PullRequestReviewers.user_id == uid,
1045 .filter(PullRequestReviewers.user_id == uid,
1045 PullRequestReviewers.pull_request == pull_request)\
1046 PullRequestReviewers.pull_request == pull_request)\
1046 .all()
1047 .all()
1047 # use .all() in case we accidentally added the same person twice
1048 # use .all() in case we accidentally added the same person twice
1048 # this CAN happen due to the lack of DB checks
1049 # this CAN happen due to the lack of DB checks
1049 for obj in reviewers:
1050 for obj in reviewers:
1050 old_data = obj.get_dict()
1051 old_data = obj.get_dict()
1051 Session().delete(obj)
1052 Session().delete(obj)
1052 self._log_audit_action(
1053 self._log_audit_action(
1053 'repo.pull_request.reviewer.delete',
1054 'repo.pull_request.reviewer.delete',
1054 {'old_data': old_data}, user, pull_request)
1055 {'old_data': old_data}, user, pull_request)
1055
1056
1056 if changed:
1057 if changed:
1057 pull_request.updated_on = datetime.datetime.now()
1058 pull_request.updated_on = datetime.datetime.now()
1058 Session().add(pull_request)
1059 Session().add(pull_request)
1059
1060
1060 self.notify_reviewers(pull_request, ids_to_add)
1061 self.notify_reviewers(pull_request, ids_to_add)
1061 return ids_to_add, ids_to_remove
1062 return ids_to_add, ids_to_remove
1062
1063
1063 def get_url(self, pull_request, request=None, permalink=False):
1064 def get_url(self, pull_request, request=None, permalink=False):
1064 if not request:
1065 if not request:
1065 request = get_current_request()
1066 request = get_current_request()
1066
1067
1067 if permalink:
1068 if permalink:
1068 return request.route_url(
1069 return request.route_url(
1069 'pull_requests_global',
1070 'pull_requests_global',
1070 pull_request_id=pull_request.pull_request_id,)
1071 pull_request_id=pull_request.pull_request_id,)
1071 else:
1072 else:
1072 return request.route_url('pullrequest_show',
1073 return request.route_url('pullrequest_show',
1073 repo_name=safe_str(pull_request.target_repo.repo_name),
1074 repo_name=safe_str(pull_request.target_repo.repo_name),
1074 pull_request_id=pull_request.pull_request_id,)
1075 pull_request_id=pull_request.pull_request_id,)
1075
1076
1076 def get_shadow_clone_url(self, pull_request, request=None):
1077 def get_shadow_clone_url(self, pull_request, request=None):
1077 """
1078 """
1078 Returns qualified url pointing to the shadow repository. If this pull
1079 Returns qualified url pointing to the shadow repository. If this pull
1079 request is closed there is no shadow repository and ``None`` will be
1080 request is closed there is no shadow repository and ``None`` will be
1080 returned.
1081 returned.
1081 """
1082 """
1082 if pull_request.is_closed():
1083 if pull_request.is_closed():
1083 return None
1084 return None
1084 else:
1085 else:
1085 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1086 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1086 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1087 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1087
1088
1088 def notify_reviewers(self, pull_request, reviewers_ids):
1089 def notify_reviewers(self, pull_request, reviewers_ids):
1089 # notification to reviewers
1090 # notification to reviewers
1090 if not reviewers_ids:
1091 if not reviewers_ids:
1091 return
1092 return
1092
1093
1093 pull_request_obj = pull_request
1094 pull_request_obj = pull_request
1094 # get the current participants of this pull request
1095 # get the current participants of this pull request
1095 recipients = reviewers_ids
1096 recipients = reviewers_ids
1096 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1097 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1097
1098
1098 pr_source_repo = pull_request_obj.source_repo
1099 pr_source_repo = pull_request_obj.source_repo
1099 pr_target_repo = pull_request_obj.target_repo
1100 pr_target_repo = pull_request_obj.target_repo
1100
1101
1101 pr_url = h.route_url('pullrequest_show',
1102 pr_url = h.route_url('pullrequest_show',
1102 repo_name=pr_target_repo.repo_name,
1103 repo_name=pr_target_repo.repo_name,
1103 pull_request_id=pull_request_obj.pull_request_id,)
1104 pull_request_id=pull_request_obj.pull_request_id,)
1104
1105
1105 # set some variables for email notification
1106 # set some variables for email notification
1106 pr_target_repo_url = h.route_url(
1107 pr_target_repo_url = h.route_url(
1107 'repo_summary', repo_name=pr_target_repo.repo_name)
1108 'repo_summary', repo_name=pr_target_repo.repo_name)
1108
1109
1109 pr_source_repo_url = h.route_url(
1110 pr_source_repo_url = h.route_url(
1110 'repo_summary', repo_name=pr_source_repo.repo_name)
1111 'repo_summary', repo_name=pr_source_repo.repo_name)
1111
1112
1112 # pull request specifics
1113 # pull request specifics
1113 pull_request_commits = [
1114 pull_request_commits = [
1114 (x.raw_id, x.message)
1115 (x.raw_id, x.message)
1115 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1116 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1116
1117
1117 kwargs = {
1118 kwargs = {
1118 'user': pull_request.author,
1119 'user': pull_request.author,
1119 'pull_request': pull_request_obj,
1120 'pull_request': pull_request_obj,
1120 'pull_request_commits': pull_request_commits,
1121 'pull_request_commits': pull_request_commits,
1121
1122
1122 'pull_request_target_repo': pr_target_repo,
1123 'pull_request_target_repo': pr_target_repo,
1123 'pull_request_target_repo_url': pr_target_repo_url,
1124 'pull_request_target_repo_url': pr_target_repo_url,
1124
1125
1125 'pull_request_source_repo': pr_source_repo,
1126 'pull_request_source_repo': pr_source_repo,
1126 'pull_request_source_repo_url': pr_source_repo_url,
1127 'pull_request_source_repo_url': pr_source_repo_url,
1127
1128
1128 'pull_request_url': pr_url,
1129 'pull_request_url': pr_url,
1129 }
1130 }
1130
1131
1131 # pre-generate the subject for notification itself
1132 # pre-generate the subject for notification itself
1132 (subject,
1133 (subject,
1133 _h, _e, # we don't care about those
1134 _h, _e, # we don't care about those
1134 body_plaintext) = EmailNotificationModel().render_email(
1135 body_plaintext) = EmailNotificationModel().render_email(
1135 notification_type, **kwargs)
1136 notification_type, **kwargs)
1136
1137
1137 # create notification objects, and emails
1138 # create notification objects, and emails
1138 NotificationModel().create(
1139 NotificationModel().create(
1139 created_by=pull_request.author,
1140 created_by=pull_request.author,
1140 notification_subject=subject,
1141 notification_subject=subject,
1141 notification_body=body_plaintext,
1142 notification_body=body_plaintext,
1142 notification_type=notification_type,
1143 notification_type=notification_type,
1143 recipients=recipients,
1144 recipients=recipients,
1144 email_kwargs=kwargs,
1145 email_kwargs=kwargs,
1145 )
1146 )
1146
1147
1147 def delete(self, pull_request, user):
1148 def delete(self, pull_request, user):
1148 pull_request = self.__get_pull_request(pull_request)
1149 pull_request = self.__get_pull_request(pull_request)
1149 old_data = pull_request.get_api_data(with_merge_state=False)
1150 old_data = pull_request.get_api_data(with_merge_state=False)
1150 self._cleanup_merge_workspace(pull_request)
1151 self._cleanup_merge_workspace(pull_request)
1151 self._log_audit_action(
1152 self._log_audit_action(
1152 'repo.pull_request.delete', {'old_data': old_data},
1153 'repo.pull_request.delete', {'old_data': old_data},
1153 user, pull_request)
1154 user, pull_request)
1154 Session().delete(pull_request)
1155 Session().delete(pull_request)
1155
1156
1156 def close_pull_request(self, pull_request, user):
1157 def close_pull_request(self, pull_request, user):
1157 pull_request = self.__get_pull_request(pull_request)
1158 pull_request = self.__get_pull_request(pull_request)
1158 self._cleanup_merge_workspace(pull_request)
1159 self._cleanup_merge_workspace(pull_request)
1159 pull_request.status = PullRequest.STATUS_CLOSED
1160 pull_request.status = PullRequest.STATUS_CLOSED
1160 pull_request.updated_on = datetime.datetime.now()
1161 pull_request.updated_on = datetime.datetime.now()
1161 Session().add(pull_request)
1162 Session().add(pull_request)
1162 self._trigger_pull_request_hook(
1163 self._trigger_pull_request_hook(
1163 pull_request, pull_request.author, 'close')
1164 pull_request, pull_request.author, 'close')
1164
1165
1165 pr_data = pull_request.get_api_data(with_merge_state=False)
1166 pr_data = pull_request.get_api_data(with_merge_state=False)
1166 self._log_audit_action(
1167 self._log_audit_action(
1167 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1168 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1168
1169
1169 def close_pull_request_with_comment(
1170 def close_pull_request_with_comment(
1170 self, pull_request, user, repo, message=None):
1171 self, pull_request, user, repo, message=None):
1171
1172
1172 pull_request_review_status = pull_request.calculated_review_status()
1173 pull_request_review_status = pull_request.calculated_review_status()
1173
1174
1174 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1175 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1175 # approved only if we have voting consent
1176 # approved only if we have voting consent
1176 status = ChangesetStatus.STATUS_APPROVED
1177 status = ChangesetStatus.STATUS_APPROVED
1177 else:
1178 else:
1178 status = ChangesetStatus.STATUS_REJECTED
1179 status = ChangesetStatus.STATUS_REJECTED
1179 status_lbl = ChangesetStatus.get_status_lbl(status)
1180 status_lbl = ChangesetStatus.get_status_lbl(status)
1180
1181
1181 default_message = (
1182 default_message = (
1182 'Closing with status change {transition_icon} {status}.'
1183 'Closing with status change {transition_icon} {status}.'
1183 ).format(transition_icon='>', status=status_lbl)
1184 ).format(transition_icon='>', status=status_lbl)
1184 text = message or default_message
1185 text = message or default_message
1185
1186
1186 # create a comment, and link it to new status
1187 # create a comment, and link it to new status
1187 comment = CommentsModel().create(
1188 comment = CommentsModel().create(
1188 text=text,
1189 text=text,
1189 repo=repo.repo_id,
1190 repo=repo.repo_id,
1190 user=user.user_id,
1191 user=user.user_id,
1191 pull_request=pull_request.pull_request_id,
1192 pull_request=pull_request.pull_request_id,
1192 status_change=status_lbl,
1193 status_change=status_lbl,
1193 status_change_type=status,
1194 status_change_type=status,
1194 closing_pr=True
1195 closing_pr=True
1195 )
1196 )
1196
1197
1197 # calculate old status before we change it
1198 # calculate old status before we change it
1198 old_calculated_status = pull_request.calculated_review_status()
1199 old_calculated_status = pull_request.calculated_review_status()
1199 ChangesetStatusModel().set_status(
1200 ChangesetStatusModel().set_status(
1200 repo.repo_id,
1201 repo.repo_id,
1201 status,
1202 status,
1202 user.user_id,
1203 user.user_id,
1203 comment=comment,
1204 comment=comment,
1204 pull_request=pull_request.pull_request_id
1205 pull_request=pull_request.pull_request_id
1205 )
1206 )
1206
1207
1207 Session().flush()
1208 Session().flush()
1208 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1209 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1209 # we now calculate the status of pull request again, and based on that
1210 # we now calculate the status of pull request again, and based on that
1210 # calculation trigger status change. This might happen in cases
1211 # calculation trigger status change. This might happen in cases
1211 # that non-reviewer admin closes a pr, which means his vote doesn't
1212 # that non-reviewer admin closes a pr, which means his vote doesn't
1212 # change the status, while if he's a reviewer this might change it.
1213 # change the status, while if he's a reviewer this might change it.
1213 calculated_status = pull_request.calculated_review_status()
1214 calculated_status = pull_request.calculated_review_status()
1214 if old_calculated_status != calculated_status:
1215 if old_calculated_status != calculated_status:
1215 self._trigger_pull_request_hook(
1216 self._trigger_pull_request_hook(
1216 pull_request, user, 'review_status_change')
1217 pull_request, user, 'review_status_change')
1217
1218
1218 # finally close the PR
1219 # finally close the PR
1219 PullRequestModel().close_pull_request(
1220 PullRequestModel().close_pull_request(
1220 pull_request.pull_request_id, user)
1221 pull_request.pull_request_id, user)
1221
1222
1222 return comment, status
1223 return comment, status
1223
1224
1224 def merge_status(self, pull_request, translator=None,
1225 def merge_status(self, pull_request, translator=None,
1225 force_shadow_repo_refresh=False):
1226 force_shadow_repo_refresh=False):
1226 _ = translator or get_current_request().translate
1227 _ = translator or get_current_request().translate
1227
1228
1228 if not self._is_merge_enabled(pull_request):
1229 if not self._is_merge_enabled(pull_request):
1229 return False, _('Server-side pull request merging is disabled.')
1230 return False, _('Server-side pull request merging is disabled.')
1230 if pull_request.is_closed():
1231 if pull_request.is_closed():
1231 return False, _('This pull request is closed.')
1232 return False, _('This pull request is closed.')
1232 merge_possible, msg = self._check_repo_requirements(
1233 merge_possible, msg = self._check_repo_requirements(
1233 target=pull_request.target_repo, source=pull_request.source_repo,
1234 target=pull_request.target_repo, source=pull_request.source_repo,
1234 translator=_)
1235 translator=_)
1235 if not merge_possible:
1236 if not merge_possible:
1236 return merge_possible, msg
1237 return merge_possible, msg
1237
1238
1238 try:
1239 try:
1239 resp = self._try_merge(
1240 resp = self._try_merge(
1240 pull_request,
1241 pull_request,
1241 force_shadow_repo_refresh=force_shadow_repo_refresh)
1242 force_shadow_repo_refresh=force_shadow_repo_refresh)
1242 log.debug("Merge response: %s", resp)
1243 log.debug("Merge response: %s", resp)
1243 status = resp.possible, self.merge_status_message(
1244 status = resp.possible, self.merge_status_message(
1244 resp.failure_reason)
1245 resp.failure_reason)
1245 except NotImplementedError:
1246 except NotImplementedError:
1246 status = False, _('Pull request merging is not supported.')
1247 status = False, _('Pull request merging is not supported.')
1247
1248
1248 return status
1249 return status
1249
1250
1250 def _check_repo_requirements(self, target, source, translator):
1251 def _check_repo_requirements(self, target, source, translator):
1251 """
1252 """
1252 Check if `target` and `source` have compatible requirements.
1253 Check if `target` and `source` have compatible requirements.
1253
1254
1254 Currently this is just checking for largefiles.
1255 Currently this is just checking for largefiles.
1255 """
1256 """
1256 _ = translator
1257 _ = translator
1257 target_has_largefiles = self._has_largefiles(target)
1258 target_has_largefiles = self._has_largefiles(target)
1258 source_has_largefiles = self._has_largefiles(source)
1259 source_has_largefiles = self._has_largefiles(source)
1259 merge_possible = True
1260 merge_possible = True
1260 message = u''
1261 message = u''
1261
1262
1262 if target_has_largefiles != source_has_largefiles:
1263 if target_has_largefiles != source_has_largefiles:
1263 merge_possible = False
1264 merge_possible = False
1264 if source_has_largefiles:
1265 if source_has_largefiles:
1265 message = _(
1266 message = _(
1266 'Target repository large files support is disabled.')
1267 'Target repository large files support is disabled.')
1267 else:
1268 else:
1268 message = _(
1269 message = _(
1269 'Source repository large files support is disabled.')
1270 'Source repository large files support is disabled.')
1270
1271
1271 return merge_possible, message
1272 return merge_possible, message
1272
1273
1273 def _has_largefiles(self, repo):
1274 def _has_largefiles(self, repo):
1274 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1275 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1275 'extensions', 'largefiles')
1276 'extensions', 'largefiles')
1276 return largefiles_ui and largefiles_ui[0].active
1277 return largefiles_ui and largefiles_ui[0].active
1277
1278
1278 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1279 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1279 """
1280 """
1280 Try to merge the pull request and return the merge status.
1281 Try to merge the pull request and return the merge status.
1281 """
1282 """
1282 log.debug(
1283 log.debug(
1283 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1284 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1284 pull_request.pull_request_id, force_shadow_repo_refresh)
1285 pull_request.pull_request_id, force_shadow_repo_refresh)
1285 target_vcs = pull_request.target_repo.scm_instance()
1286 target_vcs = pull_request.target_repo.scm_instance()
1286
1287
1287 # Refresh the target reference.
1288 # Refresh the target reference.
1288 try:
1289 try:
1289 target_ref = self._refresh_reference(
1290 target_ref = self._refresh_reference(
1290 pull_request.target_ref_parts, target_vcs)
1291 pull_request.target_ref_parts, target_vcs)
1291 except CommitDoesNotExistError:
1292 except CommitDoesNotExistError:
1292 merge_state = MergeResponse(
1293 merge_state = MergeResponse(
1293 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
1294 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
1294 return merge_state
1295 return merge_state
1295
1296
1296 target_locked = pull_request.target_repo.locked
1297 target_locked = pull_request.target_repo.locked
1297 if target_locked and target_locked[0]:
1298 if target_locked and target_locked[0]:
1298 log.debug("The target repository is locked.")
1299 log.debug("The target repository is locked.")
1299 merge_state = MergeResponse(
1300 merge_state = MergeResponse(
1300 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
1301 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
1301 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1302 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1302 pull_request, target_ref):
1303 pull_request, target_ref):
1303 log.debug("Refreshing the merge status of the repository.")
1304 log.debug("Refreshing the merge status of the repository.")
1304 merge_state = self._refresh_merge_state(
1305 merge_state = self._refresh_merge_state(
1305 pull_request, target_vcs, target_ref)
1306 pull_request, target_vcs, target_ref)
1306 else:
1307 else:
1307 possible = pull_request.\
1308 possible = pull_request.\
1308 last_merge_status == MergeFailureReason.NONE
1309 last_merge_status == MergeFailureReason.NONE
1309 merge_state = MergeResponse(
1310 merge_state = MergeResponse(
1310 possible, False, None, pull_request.last_merge_status)
1311 possible, False, None, pull_request.last_merge_status)
1311
1312
1312 return merge_state
1313 return merge_state
1313
1314
1314 def _refresh_reference(self, reference, vcs_repository):
1315 def _refresh_reference(self, reference, vcs_repository):
1315 if reference.type in ('branch', 'book'):
1316 if reference.type in ('branch', 'book'):
1316 name_or_id = reference.name
1317 name_or_id = reference.name
1317 else:
1318 else:
1318 name_or_id = reference.commit_id
1319 name_or_id = reference.commit_id
1319 refreshed_commit = vcs_repository.get_commit(name_or_id)
1320 refreshed_commit = vcs_repository.get_commit(name_or_id)
1320 refreshed_reference = Reference(
1321 refreshed_reference = Reference(
1321 reference.type, reference.name, refreshed_commit.raw_id)
1322 reference.type, reference.name, refreshed_commit.raw_id)
1322 return refreshed_reference
1323 return refreshed_reference
1323
1324
1324 def _needs_merge_state_refresh(self, pull_request, target_reference):
1325 def _needs_merge_state_refresh(self, pull_request, target_reference):
1325 return not(
1326 return not(
1326 pull_request.revisions and
1327 pull_request.revisions and
1327 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1328 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1328 target_reference.commit_id == pull_request._last_merge_target_rev)
1329 target_reference.commit_id == pull_request._last_merge_target_rev)
1329
1330
1330 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1331 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1331 workspace_id = self._workspace_id(pull_request)
1332 workspace_id = self._workspace_id(pull_request)
1332 source_vcs = pull_request.source_repo.scm_instance()
1333 source_vcs = pull_request.source_repo.scm_instance()
1333 repo_id = pull_request.target_repo.repo_id
1334 repo_id = pull_request.target_repo.repo_id
1334 use_rebase = self._use_rebase_for_merging(pull_request)
1335 use_rebase = self._use_rebase_for_merging(pull_request)
1335 close_branch = self._close_branch_before_merging(pull_request)
1336 close_branch = self._close_branch_before_merging(pull_request)
1336 merge_state = target_vcs.merge(
1337 merge_state = target_vcs.merge(
1337 repo_id, workspace_id,
1338 repo_id, workspace_id,
1338 target_reference, source_vcs, pull_request.source_ref_parts,
1339 target_reference, source_vcs, pull_request.source_ref_parts,
1339 dry_run=True, use_rebase=use_rebase,
1340 dry_run=True, use_rebase=use_rebase,
1340 close_branch=close_branch)
1341 close_branch=close_branch)
1341
1342
1342 # Do not store the response if there was an unknown error.
1343 # Do not store the response if there was an unknown error.
1343 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1344 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1344 pull_request._last_merge_source_rev = \
1345 pull_request._last_merge_source_rev = \
1345 pull_request.source_ref_parts.commit_id
1346 pull_request.source_ref_parts.commit_id
1346 pull_request._last_merge_target_rev = target_reference.commit_id
1347 pull_request._last_merge_target_rev = target_reference.commit_id
1347 pull_request.last_merge_status = merge_state.failure_reason
1348 pull_request.last_merge_status = merge_state.failure_reason
1348 pull_request.shadow_merge_ref = merge_state.merge_ref
1349 pull_request.shadow_merge_ref = merge_state.merge_ref
1349 Session().add(pull_request)
1350 Session().add(pull_request)
1350 Session().commit()
1351 Session().commit()
1351
1352
1352 return merge_state
1353 return merge_state
1353
1354
1354 def _workspace_id(self, pull_request):
1355 def _workspace_id(self, pull_request):
1355 workspace_id = 'pr-%s' % pull_request.pull_request_id
1356 workspace_id = 'pr-%s' % pull_request.pull_request_id
1356 return workspace_id
1357 return workspace_id
1357
1358
1358 def merge_status_message(self, status_code):
1359 def merge_status_message(self, status_code):
1359 """
1360 """
1360 Return a human friendly error message for the given merge status code.
1361 Return a human friendly error message for the given merge status code.
1361 """
1362 """
1362 return self.MERGE_STATUS_MESSAGES[status_code]
1363 return self.MERGE_STATUS_MESSAGES[status_code]
1363
1364
1364 def generate_repo_data(self, repo, commit_id=None, branch=None,
1365 def generate_repo_data(self, repo, commit_id=None, branch=None,
1365 bookmark=None, translator=None):
1366 bookmark=None, translator=None):
1366 from rhodecode.model.repo import RepoModel
1367 from rhodecode.model.repo import RepoModel
1367
1368
1368 all_refs, selected_ref = \
1369 all_refs, selected_ref = \
1369 self._get_repo_pullrequest_sources(
1370 self._get_repo_pullrequest_sources(
1370 repo.scm_instance(), commit_id=commit_id,
1371 repo.scm_instance(), commit_id=commit_id,
1371 branch=branch, bookmark=bookmark, translator=translator)
1372 branch=branch, bookmark=bookmark, translator=translator)
1372
1373
1373 refs_select2 = []
1374 refs_select2 = []
1374 for element in all_refs:
1375 for element in all_refs:
1375 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1376 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1376 refs_select2.append({'text': element[1], 'children': children})
1377 refs_select2.append({'text': element[1], 'children': children})
1377
1378
1378 return {
1379 return {
1379 'user': {
1380 'user': {
1380 'user_id': repo.user.user_id,
1381 'user_id': repo.user.user_id,
1381 'username': repo.user.username,
1382 'username': repo.user.username,
1382 'firstname': repo.user.first_name,
1383 'firstname': repo.user.first_name,
1383 'lastname': repo.user.last_name,
1384 'lastname': repo.user.last_name,
1384 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1385 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1385 },
1386 },
1386 'name': repo.repo_name,
1387 'name': repo.repo_name,
1387 'link': RepoModel().get_url(repo),
1388 'link': RepoModel().get_url(repo),
1388 'description': h.chop_at_smart(repo.description_safe, '\n'),
1389 'description': h.chop_at_smart(repo.description_safe, '\n'),
1389 'refs': {
1390 'refs': {
1390 'all_refs': all_refs,
1391 'all_refs': all_refs,
1391 'selected_ref': selected_ref,
1392 'selected_ref': selected_ref,
1392 'select2_refs': refs_select2
1393 'select2_refs': refs_select2
1393 }
1394 }
1394 }
1395 }
1395
1396
1396 def generate_pullrequest_title(self, source, source_ref, target):
1397 def generate_pullrequest_title(self, source, source_ref, target):
1397 return u'{source}#{at_ref} to {target}'.format(
1398 return u'{source}#{at_ref} to {target}'.format(
1398 source=source,
1399 source=source,
1399 at_ref=source_ref,
1400 at_ref=source_ref,
1400 target=target,
1401 target=target,
1401 )
1402 )
1402
1403
1403 def _cleanup_merge_workspace(self, pull_request):
1404 def _cleanup_merge_workspace(self, pull_request):
1404 # Merging related cleanup
1405 # Merging related cleanup
1405 repo_id = pull_request.target_repo.repo_id
1406 repo_id = pull_request.target_repo.repo_id
1406 target_scm = pull_request.target_repo.scm_instance()
1407 target_scm = pull_request.target_repo.scm_instance()
1407 workspace_id = self._workspace_id(pull_request)
1408 workspace_id = self._workspace_id(pull_request)
1408
1409
1409 try:
1410 try:
1410 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1411 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1411 except NotImplementedError:
1412 except NotImplementedError:
1412 pass
1413 pass
1413
1414
1414 def _get_repo_pullrequest_sources(
1415 def _get_repo_pullrequest_sources(
1415 self, repo, commit_id=None, branch=None, bookmark=None,
1416 self, repo, commit_id=None, branch=None, bookmark=None,
1416 translator=None):
1417 translator=None):
1417 """
1418 """
1418 Return a structure with repo's interesting commits, suitable for
1419 Return a structure with repo's interesting commits, suitable for
1419 the selectors in pullrequest controller
1420 the selectors in pullrequest controller
1420
1421
1421 :param commit_id: a commit that must be in the list somehow
1422 :param commit_id: a commit that must be in the list somehow
1422 and selected by default
1423 and selected by default
1423 :param branch: a branch that must be in the list and selected
1424 :param branch: a branch that must be in the list and selected
1424 by default - even if closed
1425 by default - even if closed
1425 :param bookmark: a bookmark that must be in the list and selected
1426 :param bookmark: a bookmark that must be in the list and selected
1426 """
1427 """
1427 _ = translator or get_current_request().translate
1428 _ = translator or get_current_request().translate
1428
1429
1429 commit_id = safe_str(commit_id) if commit_id else None
1430 commit_id = safe_str(commit_id) if commit_id else None
1430 branch = safe_str(branch) if branch else None
1431 branch = safe_str(branch) if branch else None
1431 bookmark = safe_str(bookmark) if bookmark else None
1432 bookmark = safe_str(bookmark) if bookmark else None
1432
1433
1433 selected = None
1434 selected = None
1434
1435
1435 # order matters: first source that has commit_id in it will be selected
1436 # order matters: first source that has commit_id in it will be selected
1436 sources = []
1437 sources = []
1437 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1438 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1438 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1439 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1439
1440
1440 if commit_id:
1441 if commit_id:
1441 ref_commit = (h.short_id(commit_id), commit_id)
1442 ref_commit = (h.short_id(commit_id), commit_id)
1442 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1443 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1443
1444
1444 sources.append(
1445 sources.append(
1445 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1446 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1446 )
1447 )
1447
1448
1448 groups = []
1449 groups = []
1449 for group_key, ref_list, group_name, match in sources:
1450 for group_key, ref_list, group_name, match in sources:
1450 group_refs = []
1451 group_refs = []
1451 for ref_name, ref_id in ref_list:
1452 for ref_name, ref_id in ref_list:
1452 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1453 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1453 group_refs.append((ref_key, ref_name))
1454 group_refs.append((ref_key, ref_name))
1454
1455
1455 if not selected:
1456 if not selected:
1456 if set([commit_id, match]) & set([ref_id, ref_name]):
1457 if set([commit_id, match]) & set([ref_id, ref_name]):
1457 selected = ref_key
1458 selected = ref_key
1458
1459
1459 if group_refs:
1460 if group_refs:
1460 groups.append((group_refs, group_name))
1461 groups.append((group_refs, group_name))
1461
1462
1462 if not selected:
1463 if not selected:
1463 ref = commit_id or branch or bookmark
1464 ref = commit_id or branch or bookmark
1464 if ref:
1465 if ref:
1465 raise CommitDoesNotExistError(
1466 raise CommitDoesNotExistError(
1466 'No commit refs could be found matching: %s' % ref)
1467 'No commit refs could be found matching: %s' % ref)
1467 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1468 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1468 selected = 'branch:%s:%s' % (
1469 selected = 'branch:%s:%s' % (
1469 repo.DEFAULT_BRANCH_NAME,
1470 repo.DEFAULT_BRANCH_NAME,
1470 repo.branches[repo.DEFAULT_BRANCH_NAME]
1471 repo.branches[repo.DEFAULT_BRANCH_NAME]
1471 )
1472 )
1472 elif repo.commit_ids:
1473 elif repo.commit_ids:
1473 # make the user select in this case
1474 # make the user select in this case
1474 selected = None
1475 selected = None
1475 else:
1476 else:
1476 raise EmptyRepositoryError()
1477 raise EmptyRepositoryError()
1477 return groups, selected
1478 return groups, selected
1478
1479
1479 def get_diff(self, source_repo, source_ref_id, target_ref_id, context=DIFF_CONTEXT):
1480 def get_diff(self, source_repo, source_ref_id, target_ref_id, context=DIFF_CONTEXT):
1480 return self._get_diff_from_pr_or_version(
1481 return self._get_diff_from_pr_or_version(
1481 source_repo, source_ref_id, target_ref_id, context=context)
1482 source_repo, source_ref_id, target_ref_id, context=context)
1482
1483
1483 def _get_diff_from_pr_or_version(
1484 def _get_diff_from_pr_or_version(
1484 self, source_repo, source_ref_id, target_ref_id, context):
1485 self, source_repo, source_ref_id, target_ref_id, context):
1485 target_commit = source_repo.get_commit(
1486 target_commit = source_repo.get_commit(
1486 commit_id=safe_str(target_ref_id))
1487 commit_id=safe_str(target_ref_id))
1487 source_commit = source_repo.get_commit(
1488 source_commit = source_repo.get_commit(
1488 commit_id=safe_str(source_ref_id))
1489 commit_id=safe_str(source_ref_id))
1489 if isinstance(source_repo, Repository):
1490 if isinstance(source_repo, Repository):
1490 vcs_repo = source_repo.scm_instance()
1491 vcs_repo = source_repo.scm_instance()
1491 else:
1492 else:
1492 vcs_repo = source_repo
1493 vcs_repo = source_repo
1493
1494
1494 # TODO: johbo: In the context of an update, we cannot reach
1495 # TODO: johbo: In the context of an update, we cannot reach
1495 # the old commit anymore with our normal mechanisms. It needs
1496 # the old commit anymore with our normal mechanisms. It needs
1496 # some sort of special support in the vcs layer to avoid this
1497 # some sort of special support in the vcs layer to avoid this
1497 # workaround.
1498 # workaround.
1498 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1499 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1499 vcs_repo.alias == 'git'):
1500 vcs_repo.alias == 'git'):
1500 source_commit.raw_id = safe_str(source_ref_id)
1501 source_commit.raw_id = safe_str(source_ref_id)
1501
1502
1502 log.debug('calculating diff between '
1503 log.debug('calculating diff between '
1503 'source_ref:%s and target_ref:%s for repo `%s`',
1504 'source_ref:%s and target_ref:%s for repo `%s`',
1504 target_ref_id, source_ref_id,
1505 target_ref_id, source_ref_id,
1505 safe_unicode(vcs_repo.path))
1506 safe_unicode(vcs_repo.path))
1506
1507
1507 vcs_diff = vcs_repo.get_diff(
1508 vcs_diff = vcs_repo.get_diff(
1508 commit1=target_commit, commit2=source_commit, context=context)
1509 commit1=target_commit, commit2=source_commit, context=context)
1509 return vcs_diff
1510 return vcs_diff
1510
1511
1511 def _is_merge_enabled(self, pull_request):
1512 def _is_merge_enabled(self, pull_request):
1512 return self._get_general_setting(
1513 return self._get_general_setting(
1513 pull_request, 'rhodecode_pr_merge_enabled')
1514 pull_request, 'rhodecode_pr_merge_enabled')
1514
1515
1515 def _use_rebase_for_merging(self, pull_request):
1516 def _use_rebase_for_merging(self, pull_request):
1516 repo_type = pull_request.target_repo.repo_type
1517 repo_type = pull_request.target_repo.repo_type
1517 if repo_type == 'hg':
1518 if repo_type == 'hg':
1518 return self._get_general_setting(
1519 return self._get_general_setting(
1519 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1520 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1520 elif repo_type == 'git':
1521 elif repo_type == 'git':
1521 return self._get_general_setting(
1522 return self._get_general_setting(
1522 pull_request, 'rhodecode_git_use_rebase_for_merging')
1523 pull_request, 'rhodecode_git_use_rebase_for_merging')
1523
1524
1524 return False
1525 return False
1525
1526
1526 def _close_branch_before_merging(self, pull_request):
1527 def _close_branch_before_merging(self, pull_request):
1527 repo_type = pull_request.target_repo.repo_type
1528 repo_type = pull_request.target_repo.repo_type
1528 if repo_type == 'hg':
1529 if repo_type == 'hg':
1529 return self._get_general_setting(
1530 return self._get_general_setting(
1530 pull_request, 'rhodecode_hg_close_branch_before_merging')
1531 pull_request, 'rhodecode_hg_close_branch_before_merging')
1531 elif repo_type == 'git':
1532 elif repo_type == 'git':
1532 return self._get_general_setting(
1533 return self._get_general_setting(
1533 pull_request, 'rhodecode_git_close_branch_before_merging')
1534 pull_request, 'rhodecode_git_close_branch_before_merging')
1534
1535
1535 return False
1536 return False
1536
1537
1537 def _get_general_setting(self, pull_request, settings_key, default=False):
1538 def _get_general_setting(self, pull_request, settings_key, default=False):
1538 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1539 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1539 settings = settings_model.get_general_settings()
1540 settings = settings_model.get_general_settings()
1540 return settings.get(settings_key, default)
1541 return settings.get(settings_key, default)
1541
1542
1542 def _log_audit_action(self, action, action_data, user, pull_request):
1543 def _log_audit_action(self, action, action_data, user, pull_request):
1543 audit_logger.store(
1544 audit_logger.store(
1544 action=action,
1545 action=action,
1545 action_data=action_data,
1546 action_data=action_data,
1546 user=user,
1547 user=user,
1547 repo=pull_request.target_repo)
1548 repo=pull_request.target_repo)
1548
1549
1549 def get_reviewer_functions(self):
1550 def get_reviewer_functions(self):
1550 """
1551 """
1551 Fetches functions for validation and fetching default reviewers.
1552 Fetches functions for validation and fetching default reviewers.
1552 If available we use the EE package, else we fallback to CE
1553 If available we use the EE package, else we fallback to CE
1553 package functions
1554 package functions
1554 """
1555 """
1555 try:
1556 try:
1556 from rc_reviewers.utils import get_default_reviewers_data
1557 from rc_reviewers.utils import get_default_reviewers_data
1557 from rc_reviewers.utils import validate_default_reviewers
1558 from rc_reviewers.utils import validate_default_reviewers
1558 except ImportError:
1559 except ImportError:
1559 from rhodecode.apps.repository.utils import \
1560 from rhodecode.apps.repository.utils import \
1560 get_default_reviewers_data
1561 get_default_reviewers_data
1561 from rhodecode.apps.repository.utils import \
1562 from rhodecode.apps.repository.utils import \
1562 validate_default_reviewers
1563 validate_default_reviewers
1563
1564
1564 return get_default_reviewers_data, validate_default_reviewers
1565 return get_default_reviewers_data, validate_default_reviewers
1565
1566
1566
1567
1567 class MergeCheck(object):
1568 class MergeCheck(object):
1568 """
1569 """
1569 Perform Merge Checks and returns a check object which stores information
1570 Perform Merge Checks and returns a check object which stores information
1570 about merge errors, and merge conditions
1571 about merge errors, and merge conditions
1571 """
1572 """
1572 TODO_CHECK = 'todo'
1573 TODO_CHECK = 'todo'
1573 PERM_CHECK = 'perm'
1574 PERM_CHECK = 'perm'
1574 REVIEW_CHECK = 'review'
1575 REVIEW_CHECK = 'review'
1575 MERGE_CHECK = 'merge'
1576 MERGE_CHECK = 'merge'
1576
1577
1577 def __init__(self):
1578 def __init__(self):
1578 self.review_status = None
1579 self.review_status = None
1579 self.merge_possible = None
1580 self.merge_possible = None
1580 self.merge_msg = ''
1581 self.merge_msg = ''
1581 self.failed = None
1582 self.failed = None
1582 self.errors = []
1583 self.errors = []
1583 self.error_details = OrderedDict()
1584 self.error_details = OrderedDict()
1584
1585
1585 def push_error(self, error_type, message, error_key, details):
1586 def push_error(self, error_type, message, error_key, details):
1586 self.failed = True
1587 self.failed = True
1587 self.errors.append([error_type, message])
1588 self.errors.append([error_type, message])
1588 self.error_details[error_key] = dict(
1589 self.error_details[error_key] = dict(
1589 details=details,
1590 details=details,
1590 error_type=error_type,
1591 error_type=error_type,
1591 message=message
1592 message=message
1592 )
1593 )
1593
1594
1594 @classmethod
1595 @classmethod
1595 def validate(cls, pull_request, user, translator, fail_early=False,
1596 def validate(cls, pull_request, user, translator, fail_early=False,
1596 force_shadow_repo_refresh=False):
1597 force_shadow_repo_refresh=False):
1597 _ = translator
1598 _ = translator
1598 merge_check = cls()
1599 merge_check = cls()
1599
1600
1600 # permissions to merge
1601 # permissions to merge
1601 user_allowed_to_merge = PullRequestModel().check_user_merge(
1602 user_allowed_to_merge = PullRequestModel().check_user_merge(
1602 pull_request, user)
1603 pull_request, user)
1603 if not user_allowed_to_merge:
1604 if not user_allowed_to_merge:
1604 log.debug("MergeCheck: cannot merge, approval is pending.")
1605 log.debug("MergeCheck: cannot merge, approval is pending.")
1605
1606
1606 msg = _('User `{}` not allowed to perform merge.').format(user.username)
1607 msg = _('User `{}` not allowed to perform merge.').format(user.username)
1607 merge_check.push_error('error', msg, cls.PERM_CHECK, user.username)
1608 merge_check.push_error('error', msg, cls.PERM_CHECK, user.username)
1608 if fail_early:
1609 if fail_early:
1609 return merge_check
1610 return merge_check
1610
1611
1611 # review status, must be always present
1612 # review status, must be always present
1612 review_status = pull_request.calculated_review_status()
1613 review_status = pull_request.calculated_review_status()
1613 merge_check.review_status = review_status
1614 merge_check.review_status = review_status
1614
1615
1615 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1616 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1616 if not status_approved:
1617 if not status_approved:
1617 log.debug("MergeCheck: cannot merge, approval is pending.")
1618 log.debug("MergeCheck: cannot merge, approval is pending.")
1618
1619
1619 msg = _('Pull request reviewer approval is pending.')
1620 msg = _('Pull request reviewer approval is pending.')
1620
1621
1621 merge_check.push_error(
1622 merge_check.push_error(
1622 'warning', msg, cls.REVIEW_CHECK, review_status)
1623 'warning', msg, cls.REVIEW_CHECK, review_status)
1623
1624
1624 if fail_early:
1625 if fail_early:
1625 return merge_check
1626 return merge_check
1626
1627
1627 # left over TODOs
1628 # left over TODOs
1628 todos = CommentsModel().get_unresolved_todos(pull_request)
1629 todos = CommentsModel().get_unresolved_todos(pull_request)
1629 if todos:
1630 if todos:
1630 log.debug("MergeCheck: cannot merge, {} "
1631 log.debug("MergeCheck: cannot merge, {} "
1631 "unresolved todos left.".format(len(todos)))
1632 "unresolved todos left.".format(len(todos)))
1632
1633
1633 if len(todos) == 1:
1634 if len(todos) == 1:
1634 msg = _('Cannot merge, {} TODO still not resolved.').format(
1635 msg = _('Cannot merge, {} TODO still not resolved.').format(
1635 len(todos))
1636 len(todos))
1636 else:
1637 else:
1637 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1638 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1638 len(todos))
1639 len(todos))
1639
1640
1640 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1641 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1641
1642
1642 if fail_early:
1643 if fail_early:
1643 return merge_check
1644 return merge_check
1644
1645
1645 # merge possible, here is the filesystem simulation + shadow repo
1646 # merge possible, here is the filesystem simulation + shadow repo
1646 merge_status, msg = PullRequestModel().merge_status(
1647 merge_status, msg = PullRequestModel().merge_status(
1647 pull_request, translator=translator,
1648 pull_request, translator=translator,
1648 force_shadow_repo_refresh=force_shadow_repo_refresh)
1649 force_shadow_repo_refresh=force_shadow_repo_refresh)
1649 merge_check.merge_possible = merge_status
1650 merge_check.merge_possible = merge_status
1650 merge_check.merge_msg = msg
1651 merge_check.merge_msg = msg
1651 if not merge_status:
1652 if not merge_status:
1652 log.debug(
1653 log.debug(
1653 "MergeCheck: cannot merge, pull request merge not possible.")
1654 "MergeCheck: cannot merge, pull request merge not possible.")
1654 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1655 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1655
1656
1656 if fail_early:
1657 if fail_early:
1657 return merge_check
1658 return merge_check
1658
1659
1659 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1660 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1660 return merge_check
1661 return merge_check
1661
1662
1662 @classmethod
1663 @classmethod
1663 def get_merge_conditions(cls, pull_request, translator):
1664 def get_merge_conditions(cls, pull_request, translator):
1664 _ = translator
1665 _ = translator
1665 merge_details = {}
1666 merge_details = {}
1666
1667
1667 model = PullRequestModel()
1668 model = PullRequestModel()
1668 use_rebase = model._use_rebase_for_merging(pull_request)
1669 use_rebase = model._use_rebase_for_merging(pull_request)
1669
1670
1670 if use_rebase:
1671 if use_rebase:
1671 merge_details['merge_strategy'] = dict(
1672 merge_details['merge_strategy'] = dict(
1672 details={},
1673 details={},
1673 message=_('Merge strategy: rebase')
1674 message=_('Merge strategy: rebase')
1674 )
1675 )
1675 else:
1676 else:
1676 merge_details['merge_strategy'] = dict(
1677 merge_details['merge_strategy'] = dict(
1677 details={},
1678 details={},
1678 message=_('Merge strategy: explicit merge commit')
1679 message=_('Merge strategy: explicit merge commit')
1679 )
1680 )
1680
1681
1681 close_branch = model._close_branch_before_merging(pull_request)
1682 close_branch = model._close_branch_before_merging(pull_request)
1682 if close_branch:
1683 if close_branch:
1683 repo_type = pull_request.target_repo.repo_type
1684 repo_type = pull_request.target_repo.repo_type
1684 if repo_type == 'hg':
1685 if repo_type == 'hg':
1685 close_msg = _('Source branch will be closed after merge.')
1686 close_msg = _('Source branch will be closed after merge.')
1686 elif repo_type == 'git':
1687 elif repo_type == 'git':
1687 close_msg = _('Source branch will be deleted after merge.')
1688 close_msg = _('Source branch will be deleted after merge.')
1688
1689
1689 merge_details['close_branch'] = dict(
1690 merge_details['close_branch'] = dict(
1690 details={},
1691 details={},
1691 message=close_msg
1692 message=close_msg
1692 )
1693 )
1693
1694
1694 return merge_details
1695 return merge_details
1695
1696
1696 ChangeTuple = collections.namedtuple(
1697 ChangeTuple = collections.namedtuple(
1697 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1698 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1698
1699
1699 FileChangeTuple = collections.namedtuple(
1700 FileChangeTuple = collections.namedtuple(
1700 'FileChangeTuple', ['added', 'modified', 'removed'])
1701 'FileChangeTuple', ['added', 'modified', 'removed'])
@@ -1,204 +1,205 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2018 RhodeCode GmbH
3 # Copyright (C) 2016-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 import pytest
22 import pytest
23
23
24 from rhodecode.tests import no_newline_id_generator
24 from rhodecode.tests import no_newline_id_generator
25 from rhodecode.config.middleware import (
25 from rhodecode.config.middleware import (
26 _sanitize_vcs_settings, _bool_setting, _string_setting, _list_setting,
26 _sanitize_vcs_settings, _bool_setting, _string_setting, _list_setting,
27 _int_setting)
27 _int_setting)
28
28
29
29
30 class TestHelperFunctions(object):
30 class TestHelperFunctions(object):
31 @pytest.mark.parametrize('raw, expected', [
31 @pytest.mark.parametrize('raw, expected', [
32 ('true', True), (u'true', True),
32 ('true', True), (u'true', True),
33 ('yes', True), (u'yes', True),
33 ('yes', True), (u'yes', True),
34 ('on', True), (u'on', True),
34 ('on', True), (u'on', True),
35 ('false', False), (u'false', False),
35 ('false', False), (u'false', False),
36 ('no', False), (u'no', False),
36 ('no', False), (u'no', False),
37 ('off', False), (u'off', False),
37 ('off', False), (u'off', False),
38 ('invalid-bool-value', False),
38 ('invalid-bool-value', False),
39 ('invalid-∫øø@-√å@¨€', False),
39 ('invalid-∫øø@-√å@¨€', False),
40 (u'invalid-∫øø@-√å@¨€', False),
40 (u'invalid-∫øø@-√å@¨€', False),
41 ])
41 ])
42 def test_bool_setting_helper(self, raw, expected):
42 def test_bool_setting_helper(self, raw, expected):
43 key = 'dummy-key'
43 key = 'dummy-key'
44 settings = {key: raw}
44 settings = {key: raw}
45 _bool_setting(settings, key, None)
45 _bool_setting(settings, key, None)
46 assert settings[key] is expected
46 assert settings[key] is expected
47
47
48 @pytest.mark.parametrize('raw, expected', [
48 @pytest.mark.parametrize('raw, expected', [
49 ('', ''),
49 ('', ''),
50 ('test-string', 'test-string'),
50 ('test-string', 'test-string'),
51 ('CaSe-TeSt', 'case-test'),
51 ('CaSe-TeSt', 'case-test'),
52 ('test-string-烩€', 'test-string-烩€'),
52 ('test-string-烩€', 'test-string-烩€'),
53 (u'test-string-烩€', u'test-string-烩€'),
53 (u'test-string-烩€', u'test-string-烩€'),
54 ])
54 ])
55 def test_string_setting_helper(self, raw, expected):
55 def test_string_setting_helper(self, raw, expected):
56 key = 'dummy-key'
56 key = 'dummy-key'
57 settings = {key: raw}
57 settings = {key: raw}
58 _string_setting(settings, key, None)
58 _string_setting(settings, key, None)
59 assert settings[key] == expected
59 assert settings[key] == expected
60
60
61 @pytest.mark.parametrize('raw, expected', [
61 @pytest.mark.parametrize('raw, expected', [
62 ('', []),
62 ('', []),
63 ('test', ['test']),
63 ('test', ['test']),
64 ('CaSe-TeSt', ['CaSe-TeSt']),
64 ('CaSe-TeSt', ['CaSe-TeSt']),
65 ('test-string-烩€', ['test-string-烩€']),
65 ('test-string-烩€', ['test-string-烩€']),
66 (u'test-string-烩€', [u'test-string-烩€']),
66 (u'test-string-烩€', [u'test-string-烩€']),
67 ('hg git svn', ['hg', 'git', 'svn']),
67 ('hg git svn', ['hg', 'git', 'svn']),
68 ('hg,git,svn', ['hg', 'git', 'svn']),
68 ('hg,git,svn', ['hg', 'git', 'svn']),
69 ('hg, git, svn', ['hg', 'git', 'svn']),
69 ('hg, git, svn', ['hg', 'git', 'svn']),
70 ('hg\ngit\nsvn', ['hg', 'git', 'svn']),
70 ('hg\ngit\nsvn', ['hg', 'git', 'svn']),
71 (' hg\n git\n svn ', ['hg', 'git', 'svn']),
71 (' hg\n git\n svn ', ['hg', 'git', 'svn']),
72 (', hg , git , svn , ', ['', 'hg', 'git', 'svn', '']),
72 (', hg , git , svn , ', ['', 'hg', 'git', 'svn', '']),
73 ('cheese,free node,other', ['cheese', 'free node', 'other']),
73 ('cheese,free node,other', ['cheese', 'free node', 'other']),
74 ], ids=no_newline_id_generator)
74 ], ids=no_newline_id_generator)
75 def test_list_setting_helper(self, raw, expected):
75 def test_list_setting_helper(self, raw, expected):
76 key = 'dummy-key'
76 key = 'dummy-key'
77 settings = {key: raw}
77 settings = {key: raw}
78 _list_setting(settings, key, None)
78 _list_setting(settings, key, None)
79 assert settings[key] == expected
79 assert settings[key] == expected
80
80
81 @pytest.mark.parametrize('raw, expected', [
81 @pytest.mark.parametrize('raw, expected', [
82 ('0', 0),
82 ('0', 0),
83 ('-0', 0),
83 ('-0', 0),
84 ('12345', 12345),
84 ('12345', 12345),
85 ('-12345', -12345),
85 ('-12345', -12345),
86 (u'-12345', -12345),
86 (u'-12345', -12345),
87 ])
87 ])
88 def test_int_setting_helper(self, raw, expected):
88 def test_int_setting_helper(self, raw, expected):
89 key = 'dummy-key'
89 key = 'dummy-key'
90 settings = {key: raw}
90 settings = {key: raw}
91 _int_setting(settings, key, None)
91 _int_setting(settings, key, None)
92 assert settings[key] == expected
92 assert settings[key] == expected
93
93
94 @pytest.mark.parametrize('raw', [
94 @pytest.mark.parametrize('raw', [
95 ('0xff'),
95 ('0xff'),
96 (''),
96 (''),
97 ('invalid-int'),
97 ('invalid-int'),
98 ('invalid-⁄~†'),
98 ('invalid-⁄~†'),
99 (u'invalid-⁄~†'),
99 (u'invalid-⁄~†'),
100 ])
100 ])
101 def test_int_setting_helper_invalid_input(self, raw):
101 def test_int_setting_helper_invalid_input(self, raw):
102 key = 'dummy-key'
102 key = 'dummy-key'
103 settings = {key: raw}
103 settings = {key: raw}
104 with pytest.raises(Exception):
104 with pytest.raises(Exception):
105 _int_setting(settings, key, None)
105 _int_setting(settings, key, None)
106
106
107
107
108 class TestSanitizeVcsSettings(object):
108 class TestSanitizeVcsSettings(object):
109 _bool_settings = [
109 _bool_settings = [
110 ('vcs.hooks.direct_calls', False),
110 ('vcs.hooks.direct_calls', False),
111 ('vcs.server.enable', True),
111 ('vcs.server.enable', True),
112 ('vcs.start_server', False),
112 ('vcs.start_server', False),
113 ('startup.import_repos', False),
113 ('startup.import_repos', False),
114 ]
114 ]
115
115
116 _string_settings = [
116 _string_settings = [
117 ('vcs.svn.compatible_version', ''),
117 ('vcs.svn.compatible_version', ''),
118 ('git_rev_filter', '--all'),
118 ('git_rev_filter', '--all'),
119 ('vcs.hooks.protocol', 'http'),
119 ('vcs.hooks.protocol', 'http'),
120 ('vcs.hooks.host', '127.0.0.1'),
120 ('vcs.scm_app_implementation', 'http'),
121 ('vcs.scm_app_implementation', 'http'),
121 ('vcs.server', ''),
122 ('vcs.server', ''),
122 ('vcs.server.log_level', 'debug'),
123 ('vcs.server.log_level', 'debug'),
123 ('vcs.server.protocol', 'http'),
124 ('vcs.server.protocol', 'http'),
124 ]
125 ]
125
126
126 _list_settings = [
127 _list_settings = [
127 ('vcs.backends', 'hg git'),
128 ('vcs.backends', 'hg git'),
128 ]
129 ]
129
130
130 @pytest.mark.parametrize('key, default', _list_settings)
131 @pytest.mark.parametrize('key, default', _list_settings)
131 def test_list_setting_spacesep_list(self, key, default):
132 def test_list_setting_spacesep_list(self, key, default):
132 test_list = ['test', 'list', 'values', 'for', key]
133 test_list = ['test', 'list', 'values', 'for', key]
133 input_value = ' '.join(test_list)
134 input_value = ' '.join(test_list)
134 settings = {key: input_value}
135 settings = {key: input_value}
135 _sanitize_vcs_settings(settings)
136 _sanitize_vcs_settings(settings)
136 assert settings[key] == test_list
137 assert settings[key] == test_list
137
138
138 @pytest.mark.parametrize('key, default', _list_settings)
139 @pytest.mark.parametrize('key, default', _list_settings)
139 def test_list_setting_newlinesep_list(self, key, default):
140 def test_list_setting_newlinesep_list(self, key, default):
140 test_list = ['test', 'list', 'values', 'for', key]
141 test_list = ['test', 'list', 'values', 'for', key]
141 input_value = '\n'.join(test_list)
142 input_value = '\n'.join(test_list)
142 settings = {key: input_value}
143 settings = {key: input_value}
143 _sanitize_vcs_settings(settings)
144 _sanitize_vcs_settings(settings)
144 assert settings[key] == test_list
145 assert settings[key] == test_list
145
146
146 @pytest.mark.parametrize('key, default', _list_settings)
147 @pytest.mark.parametrize('key, default', _list_settings)
147 def test_list_setting_commasep_list(self, key, default):
148 def test_list_setting_commasep_list(self, key, default):
148 test_list = ['test', 'list', 'values', 'for', key]
149 test_list = ['test', 'list', 'values', 'for', key]
149 input_value = ','.join(test_list)
150 input_value = ','.join(test_list)
150 settings = {key: input_value}
151 settings = {key: input_value}
151 _sanitize_vcs_settings(settings)
152 _sanitize_vcs_settings(settings)
152 assert settings[key] == test_list
153 assert settings[key] == test_list
153
154
154 @pytest.mark.parametrize('key, default', _list_settings)
155 @pytest.mark.parametrize('key, default', _list_settings)
155 def test_list_setting_comma_and_space_sep_list(self, key, default):
156 def test_list_setting_comma_and_space_sep_list(self, key, default):
156 test_list = ['test', 'list', 'values', 'for', key]
157 test_list = ['test', 'list', 'values', 'for', key]
157 input_value = ', '.join(test_list)
158 input_value = ', '.join(test_list)
158 settings = {key: input_value}
159 settings = {key: input_value}
159 _sanitize_vcs_settings(settings)
160 _sanitize_vcs_settings(settings)
160 assert settings[key] == test_list
161 assert settings[key] == test_list
161
162
162 @pytest.mark.parametrize('key, default', _string_settings)
163 @pytest.mark.parametrize('key, default', _string_settings)
163 def test_string_setting_string(self, key, default):
164 def test_string_setting_string(self, key, default):
164 test_value = 'test-string-for-{}'.format(key)
165 test_value = 'test-string-for-{}'.format(key)
165 settings = {key: test_value}
166 settings = {key: test_value}
166 _sanitize_vcs_settings(settings)
167 _sanitize_vcs_settings(settings)
167 assert settings[key] == test_value
168 assert settings[key] == test_value
168
169
169 @pytest.mark.parametrize('key, default', _string_settings)
170 @pytest.mark.parametrize('key, default', _string_settings)
170 def test_string_setting_default(self, key, default):
171 def test_string_setting_default(self, key, default):
171 settings = {}
172 settings = {}
172 _sanitize_vcs_settings(settings)
173 _sanitize_vcs_settings(settings)
173 assert settings[key] == default
174 assert settings[key] == default
174
175
175 @pytest.mark.parametrize('key, default', _string_settings)
176 @pytest.mark.parametrize('key, default', _string_settings)
176 def test_string_setting_lowercase(self, key, default):
177 def test_string_setting_lowercase(self, key, default):
177 test_value = 'Test-String-For-{}'.format(key)
178 test_value = 'Test-String-For-{}'.format(key)
178 settings = {key: test_value}
179 settings = {key: test_value}
179 _sanitize_vcs_settings(settings)
180 _sanitize_vcs_settings(settings)
180 assert settings[key] == test_value.lower()
181 assert settings[key] == test_value.lower()
181
182
182 @pytest.mark.parametrize('key, default', _bool_settings)
183 @pytest.mark.parametrize('key, default', _bool_settings)
183 def test_bool_setting_true(self, key, default):
184 def test_bool_setting_true(self, key, default):
184 settings = {key: 'true'}
185 settings = {key: 'true'}
185 _sanitize_vcs_settings(settings)
186 _sanitize_vcs_settings(settings)
186 assert settings[key] is True
187 assert settings[key] is True
187
188
188 @pytest.mark.parametrize('key, default', _bool_settings)
189 @pytest.mark.parametrize('key, default', _bool_settings)
189 def test_bool_setting_false(self, key, default):
190 def test_bool_setting_false(self, key, default):
190 settings = {key: 'false'}
191 settings = {key: 'false'}
191 _sanitize_vcs_settings(settings)
192 _sanitize_vcs_settings(settings)
192 assert settings[key] is False
193 assert settings[key] is False
193
194
194 @pytest.mark.parametrize('key, default', _bool_settings)
195 @pytest.mark.parametrize('key, default', _bool_settings)
195 def test_bool_setting_invalid_string(self, key, default):
196 def test_bool_setting_invalid_string(self, key, default):
196 settings = {key: 'no-bool-val-string'}
197 settings = {key: 'no-bool-val-string'}
197 _sanitize_vcs_settings(settings)
198 _sanitize_vcs_settings(settings)
198 assert settings[key] is False
199 assert settings[key] is False
199
200
200 @pytest.mark.parametrize('key, default', _bool_settings)
201 @pytest.mark.parametrize('key, default', _bool_settings)
201 def test_bool_setting_default(self, key, default):
202 def test_bool_setting_default(self, key, default):
202 settings = {}
203 settings = {}
203 _sanitize_vcs_settings(settings)
204 _sanitize_vcs_settings(settings)
204 assert settings[key] is default
205 assert settings[key] is default
@@ -1,472 +1,473 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import base64
21 import base64
22
22
23 import mock
23 import mock
24 import pytest
24 import pytest
25
25
26 from rhodecode.lib.utils2 import AttributeDict
26 from rhodecode.lib.utils2 import AttributeDict
27 from rhodecode.tests.utils import CustomTestApp
27 from rhodecode.tests.utils import CustomTestApp
28
28
29 from rhodecode.lib.caching_query import FromCache
29 from rhodecode.lib.caching_query import FromCache
30 from rhodecode.lib.hooks_daemon import DummyHooksCallbackDaemon
30 from rhodecode.lib.hooks_daemon import DummyHooksCallbackDaemon
31 from rhodecode.lib.middleware import simplevcs
31 from rhodecode.lib.middleware import simplevcs
32 from rhodecode.lib.middleware.https_fixup import HttpsFixup
32 from rhodecode.lib.middleware.https_fixup import HttpsFixup
33 from rhodecode.lib.middleware.utils import scm_app_http
33 from rhodecode.lib.middleware.utils import scm_app_http
34 from rhodecode.model.db import User, _hash_key
34 from rhodecode.model.db import User, _hash_key
35 from rhodecode.model.meta import Session
35 from rhodecode.model.meta import Session
36 from rhodecode.tests import (
36 from rhodecode.tests import (
37 HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
37 HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
38 from rhodecode.tests.lib.middleware import mock_scm_app
38 from rhodecode.tests.lib.middleware import mock_scm_app
39
39
40
40
41 class StubVCSController(simplevcs.SimpleVCS):
41 class StubVCSController(simplevcs.SimpleVCS):
42
42
43 SCM = 'hg'
43 SCM = 'hg'
44 stub_response_body = tuple()
44 stub_response_body = tuple()
45
45
46 def __init__(self, *args, **kwargs):
46 def __init__(self, *args, **kwargs):
47 super(StubVCSController, self).__init__(*args, **kwargs)
47 super(StubVCSController, self).__init__(*args, **kwargs)
48 self._action = 'pull'
48 self._action = 'pull'
49 self._is_shadow_repo_dir = True
49 self._is_shadow_repo_dir = True
50 self._name = HG_REPO
50 self._name = HG_REPO
51 self.set_repo_names(None)
51 self.set_repo_names(None)
52
52
53 @property
53 @property
54 def is_shadow_repo_dir(self):
54 def is_shadow_repo_dir(self):
55 return self._is_shadow_repo_dir
55 return self._is_shadow_repo_dir
56
56
57 def _get_repository_name(self, environ):
57 def _get_repository_name(self, environ):
58 return self._name
58 return self._name
59
59
60 def _get_action(self, environ):
60 def _get_action(self, environ):
61 return self._action
61 return self._action
62
62
63 def _create_wsgi_app(self, repo_path, repo_name, config):
63 def _create_wsgi_app(self, repo_path, repo_name, config):
64 def fake_app(environ, start_response):
64 def fake_app(environ, start_response):
65 headers = [
65 headers = [
66 ('Http-Accept', 'application/mercurial')
66 ('Http-Accept', 'application/mercurial')
67 ]
67 ]
68 start_response('200 OK', headers)
68 start_response('200 OK', headers)
69 return self.stub_response_body
69 return self.stub_response_body
70 return fake_app
70 return fake_app
71
71
72 def _create_config(self, extras, repo_name):
72 def _create_config(self, extras, repo_name):
73 return None
73 return None
74
74
75
75
76 @pytest.fixture
76 @pytest.fixture
77 def vcscontroller(baseapp, config_stub, request_stub):
77 def vcscontroller(baseapp, config_stub, request_stub):
78 config_stub.testing_securitypolicy()
78 config_stub.testing_securitypolicy()
79 config_stub.include('rhodecode.authentication')
79 config_stub.include('rhodecode.authentication')
80
80
81 controller = StubVCSController(
81 controller = StubVCSController(
82 baseapp.config.get_settings(), request_stub.registry)
82 baseapp.config.get_settings(), request_stub.registry)
83 app = HttpsFixup(controller, baseapp.config.get_settings())
83 app = HttpsFixup(controller, baseapp.config.get_settings())
84 app = CustomTestApp(app)
84 app = CustomTestApp(app)
85
85
86 _remove_default_user_from_query_cache()
86 _remove_default_user_from_query_cache()
87
87
88 # Sanity checks that things are set up correctly
88 # Sanity checks that things are set up correctly
89 app.get('/' + HG_REPO, status=200)
89 app.get('/' + HG_REPO, status=200)
90
90
91 app.controller = controller
91 app.controller = controller
92 return app
92 return app
93
93
94
94
95 def _remove_default_user_from_query_cache():
95 def _remove_default_user_from_query_cache():
96 user = User.get_default_user(cache=True)
96 user = User.get_default_user(cache=True)
97 query = Session().query(User).filter(User.username == user.username)
97 query = Session().query(User).filter(User.username == user.username)
98 query = query.options(
98 query = query.options(
99 FromCache("sql_cache_short", "get_user_%s" % _hash_key(user.username)))
99 FromCache("sql_cache_short", "get_user_%s" % _hash_key(user.username)))
100 query.invalidate()
100 query.invalidate()
101 Session().expire(user)
101 Session().expire(user)
102
102
103
103
104 def test_handles_exceptions_during_permissions_checks(
104 def test_handles_exceptions_during_permissions_checks(
105 vcscontroller, disable_anonymous_user):
105 vcscontroller, disable_anonymous_user):
106 user_and_pass = '%s:%s' % (TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
106 user_and_pass = '%s:%s' % (TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
107 auth_password = base64.encodestring(user_and_pass).strip()
107 auth_password = base64.encodestring(user_and_pass).strip()
108 extra_environ = {
108 extra_environ = {
109 'AUTH_TYPE': 'Basic',
109 'AUTH_TYPE': 'Basic',
110 'HTTP_AUTHORIZATION': 'Basic %s' % auth_password,
110 'HTTP_AUTHORIZATION': 'Basic %s' % auth_password,
111 'REMOTE_USER': TEST_USER_ADMIN_LOGIN,
111 'REMOTE_USER': TEST_USER_ADMIN_LOGIN,
112 }
112 }
113
113
114 # Verify that things are hooked up correctly
114 # Verify that things are hooked up correctly
115 vcscontroller.get('/', status=200, extra_environ=extra_environ)
115 vcscontroller.get('/', status=200, extra_environ=extra_environ)
116
116
117 # Simulate trouble during permission checks
117 # Simulate trouble during permission checks
118 with mock.patch('rhodecode.model.db.User.get_by_username',
118 with mock.patch('rhodecode.model.db.User.get_by_username',
119 side_effect=Exception) as get_user:
119 side_effect=Exception) as get_user:
120 # Verify that a correct 500 is returned and check that the expected
120 # Verify that a correct 500 is returned and check that the expected
121 # code path was hit.
121 # code path was hit.
122 vcscontroller.get('/', status=500, extra_environ=extra_environ)
122 vcscontroller.get('/', status=500, extra_environ=extra_environ)
123 assert get_user.called
123 assert get_user.called
124
124
125
125
126 def test_returns_forbidden_if_no_anonymous_access(
126 def test_returns_forbidden_if_no_anonymous_access(
127 vcscontroller, disable_anonymous_user):
127 vcscontroller, disable_anonymous_user):
128 vcscontroller.get('/', status=401)
128 vcscontroller.get('/', status=401)
129
129
130
130
131 class StubFailVCSController(simplevcs.SimpleVCS):
131 class StubFailVCSController(simplevcs.SimpleVCS):
132 def _handle_request(self, environ, start_response):
132 def _handle_request(self, environ, start_response):
133 raise Exception("BOOM")
133 raise Exception("BOOM")
134
134
135
135
136 @pytest.fixture(scope='module')
136 @pytest.fixture(scope='module')
137 def fail_controller(baseapp):
137 def fail_controller(baseapp):
138 controller = StubFailVCSController(
138 controller = StubFailVCSController(
139 baseapp.config.get_settings(), baseapp.config)
139 baseapp.config.get_settings(), baseapp.config)
140 controller = HttpsFixup(controller, baseapp.config.get_settings())
140 controller = HttpsFixup(controller, baseapp.config.get_settings())
141 controller = CustomTestApp(controller)
141 controller = CustomTestApp(controller)
142 return controller
142 return controller
143
143
144
144
145 def test_handles_exceptions_as_internal_server_error(fail_controller):
145 def test_handles_exceptions_as_internal_server_error(fail_controller):
146 fail_controller.get('/', status=500)
146 fail_controller.get('/', status=500)
147
147
148
148
149 def test_provides_traceback_for_appenlight(fail_controller):
149 def test_provides_traceback_for_appenlight(fail_controller):
150 response = fail_controller.get(
150 response = fail_controller.get(
151 '/', status=500, extra_environ={'appenlight.client': 'fake'})
151 '/', status=500, extra_environ={'appenlight.client': 'fake'})
152 assert 'appenlight.__traceback' in response.request.environ
152 assert 'appenlight.__traceback' in response.request.environ
153
153
154
154
155 def test_provides_utils_scm_app_as_scm_app_by_default(baseapp, request_stub):
155 def test_provides_utils_scm_app_as_scm_app_by_default(baseapp, request_stub):
156 controller = StubVCSController(baseapp.config.get_settings(), request_stub.registry)
156 controller = StubVCSController(baseapp.config.get_settings(), request_stub.registry)
157 assert controller.scm_app is scm_app_http
157 assert controller.scm_app is scm_app_http
158
158
159
159
160 def test_allows_to_override_scm_app_via_config(baseapp, request_stub):
160 def test_allows_to_override_scm_app_via_config(baseapp, request_stub):
161 config = baseapp.config.get_settings().copy()
161 config = baseapp.config.get_settings().copy()
162 config['vcs.scm_app_implementation'] = (
162 config['vcs.scm_app_implementation'] = (
163 'rhodecode.tests.lib.middleware.mock_scm_app')
163 'rhodecode.tests.lib.middleware.mock_scm_app')
164 controller = StubVCSController(config, request_stub.registry)
164 controller = StubVCSController(config, request_stub.registry)
165 assert controller.scm_app is mock_scm_app
165 assert controller.scm_app is mock_scm_app
166
166
167
167
168 @pytest.mark.parametrize('query_string, expected', [
168 @pytest.mark.parametrize('query_string, expected', [
169 ('cmd=stub_command', True),
169 ('cmd=stub_command', True),
170 ('cmd=listkeys', False),
170 ('cmd=listkeys', False),
171 ])
171 ])
172 def test_should_check_locking(query_string, expected):
172 def test_should_check_locking(query_string, expected):
173 result = simplevcs._should_check_locking(query_string)
173 result = simplevcs._should_check_locking(query_string)
174 assert result == expected
174 assert result == expected
175
175
176
176
177 class TestShadowRepoRegularExpression(object):
177 class TestShadowRepoRegularExpression(object):
178 pr_segment = 'pull-request'
178 pr_segment = 'pull-request'
179 shadow_segment = 'repository'
179 shadow_segment = 'repository'
180
180
181 @pytest.mark.parametrize('url, expected', [
181 @pytest.mark.parametrize('url, expected', [
182 # repo with/without groups
182 # repo with/without groups
183 ('My-Repo/{pr_segment}/1/{shadow_segment}', True),
183 ('My-Repo/{pr_segment}/1/{shadow_segment}', True),
184 ('Group/My-Repo/{pr_segment}/2/{shadow_segment}', True),
184 ('Group/My-Repo/{pr_segment}/2/{shadow_segment}', True),
185 ('Group/Sub-Group/My-Repo/{pr_segment}/3/{shadow_segment}', True),
185 ('Group/Sub-Group/My-Repo/{pr_segment}/3/{shadow_segment}', True),
186 ('Group/Sub-Group1/Sub-Group2/My-Repo/{pr_segment}/3/{shadow_segment}', True),
186 ('Group/Sub-Group1/Sub-Group2/My-Repo/{pr_segment}/3/{shadow_segment}', True),
187
187
188 # pull request ID
188 # pull request ID
189 ('MyRepo/{pr_segment}/1/{shadow_segment}', True),
189 ('MyRepo/{pr_segment}/1/{shadow_segment}', True),
190 ('MyRepo/{pr_segment}/1234567890/{shadow_segment}', True),
190 ('MyRepo/{pr_segment}/1234567890/{shadow_segment}', True),
191 ('MyRepo/{pr_segment}/-1/{shadow_segment}', False),
191 ('MyRepo/{pr_segment}/-1/{shadow_segment}', False),
192 ('MyRepo/{pr_segment}/invalid/{shadow_segment}', False),
192 ('MyRepo/{pr_segment}/invalid/{shadow_segment}', False),
193
193
194 # unicode
194 # unicode
195 (u'Sp€çîál-Repö/{pr_segment}/1/{shadow_segment}', True),
195 (u'Sp€çîál-Repö/{pr_segment}/1/{shadow_segment}', True),
196 (u'Sp€çîál-Gröüp/Sp€çîál-Repö/{pr_segment}/1/{shadow_segment}', True),
196 (u'Sp€çîál-Gröüp/Sp€çîál-Repö/{pr_segment}/1/{shadow_segment}', True),
197
197
198 # trailing/leading slash
198 # trailing/leading slash
199 ('/My-Repo/{pr_segment}/1/{shadow_segment}', False),
199 ('/My-Repo/{pr_segment}/1/{shadow_segment}', False),
200 ('My-Repo/{pr_segment}/1/{shadow_segment}/', False),
200 ('My-Repo/{pr_segment}/1/{shadow_segment}/', False),
201 ('/My-Repo/{pr_segment}/1/{shadow_segment}/', False),
201 ('/My-Repo/{pr_segment}/1/{shadow_segment}/', False),
202
202
203 # misc
203 # misc
204 ('My-Repo/{pr_segment}/1/{shadow_segment}/extra', False),
204 ('My-Repo/{pr_segment}/1/{shadow_segment}/extra', False),
205 ('My-Repo/{pr_segment}/1/{shadow_segment}extra', False),
205 ('My-Repo/{pr_segment}/1/{shadow_segment}extra', False),
206 ])
206 ])
207 def test_shadow_repo_regular_expression(self, url, expected):
207 def test_shadow_repo_regular_expression(self, url, expected):
208 from rhodecode.lib.middleware.simplevcs import SimpleVCS
208 from rhodecode.lib.middleware.simplevcs import SimpleVCS
209 url = url.format(
209 url = url.format(
210 pr_segment=self.pr_segment,
210 pr_segment=self.pr_segment,
211 shadow_segment=self.shadow_segment)
211 shadow_segment=self.shadow_segment)
212 match_obj = SimpleVCS.shadow_repo_re.match(url)
212 match_obj = SimpleVCS.shadow_repo_re.match(url)
213 assert (match_obj is not None) == expected
213 assert (match_obj is not None) == expected
214
214
215
215
216 @pytest.mark.backends('git', 'hg')
216 @pytest.mark.backends('git', 'hg')
217 class TestShadowRepoExposure(object):
217 class TestShadowRepoExposure(object):
218
218
219 def test_pull_on_shadow_repo_propagates_to_wsgi_app(
219 def test_pull_on_shadow_repo_propagates_to_wsgi_app(
220 self, baseapp, request_stub):
220 self, baseapp, request_stub):
221 """
221 """
222 Check that a pull action to a shadow repo is propagated to the
222 Check that a pull action to a shadow repo is propagated to the
223 underlying wsgi app.
223 underlying wsgi app.
224 """
224 """
225 controller = StubVCSController(
225 controller = StubVCSController(
226 baseapp.config.get_settings(), request_stub.registry)
226 baseapp.config.get_settings(), request_stub.registry)
227 controller._check_ssl = mock.Mock()
227 controller._check_ssl = mock.Mock()
228 controller.is_shadow_repo = True
228 controller.is_shadow_repo = True
229 controller._action = 'pull'
229 controller._action = 'pull'
230 controller._is_shadow_repo_dir = True
230 controller._is_shadow_repo_dir = True
231 controller.stub_response_body = 'dummy body value'
231 controller.stub_response_body = 'dummy body value'
232 controller._get_default_cache_ttl = mock.Mock(
232 controller._get_default_cache_ttl = mock.Mock(
233 return_value=(False, 0))
233 return_value=(False, 0))
234
234
235 environ_stub = {
235 environ_stub = {
236 'HTTP_HOST': 'test.example.com',
236 'HTTP_HOST': 'test.example.com',
237 'HTTP_ACCEPT': 'application/mercurial',
237 'HTTP_ACCEPT': 'application/mercurial',
238 'REQUEST_METHOD': 'GET',
238 'REQUEST_METHOD': 'GET',
239 'wsgi.url_scheme': 'http',
239 'wsgi.url_scheme': 'http',
240 }
240 }
241
241
242 response = controller(environ_stub, mock.Mock())
242 response = controller(environ_stub, mock.Mock())
243 response_body = ''.join(response)
243 response_body = ''.join(response)
244
244
245 # Assert that we got the response from the wsgi app.
245 # Assert that we got the response from the wsgi app.
246 assert response_body == controller.stub_response_body
246 assert response_body == controller.stub_response_body
247
247
248 def test_pull_on_shadow_repo_that_is_missing(self, baseapp, request_stub):
248 def test_pull_on_shadow_repo_that_is_missing(self, baseapp, request_stub):
249 """
249 """
250 Check that a pull action to a shadow repo is propagated to the
250 Check that a pull action to a shadow repo is propagated to the
251 underlying wsgi app.
251 underlying wsgi app.
252 """
252 """
253 controller = StubVCSController(
253 controller = StubVCSController(
254 baseapp.config.get_settings(), request_stub.registry)
254 baseapp.config.get_settings(), request_stub.registry)
255 controller._check_ssl = mock.Mock()
255 controller._check_ssl = mock.Mock()
256 controller.is_shadow_repo = True
256 controller.is_shadow_repo = True
257 controller._action = 'pull'
257 controller._action = 'pull'
258 controller._is_shadow_repo_dir = False
258 controller._is_shadow_repo_dir = False
259 controller.stub_response_body = 'dummy body value'
259 controller.stub_response_body = 'dummy body value'
260 environ_stub = {
260 environ_stub = {
261 'HTTP_HOST': 'test.example.com',
261 'HTTP_HOST': 'test.example.com',
262 'HTTP_ACCEPT': 'application/mercurial',
262 'HTTP_ACCEPT': 'application/mercurial',
263 'REQUEST_METHOD': 'GET',
263 'REQUEST_METHOD': 'GET',
264 'wsgi.url_scheme': 'http',
264 'wsgi.url_scheme': 'http',
265 }
265 }
266
266
267 response = controller(environ_stub, mock.Mock())
267 response = controller(environ_stub, mock.Mock())
268 response_body = ''.join(response)
268 response_body = ''.join(response)
269
269
270 # Assert that we got the response from the wsgi app.
270 # Assert that we got the response from the wsgi app.
271 assert '404 Not Found' in response_body
271 assert '404 Not Found' in response_body
272
272
273 def test_push_on_shadow_repo_raises(self, baseapp, request_stub):
273 def test_push_on_shadow_repo_raises(self, baseapp, request_stub):
274 """
274 """
275 Check that a push action to a shadow repo is aborted.
275 Check that a push action to a shadow repo is aborted.
276 """
276 """
277 controller = StubVCSController(
277 controller = StubVCSController(
278 baseapp.config.get_settings(), request_stub.registry)
278 baseapp.config.get_settings(), request_stub.registry)
279 controller._check_ssl = mock.Mock()
279 controller._check_ssl = mock.Mock()
280 controller.is_shadow_repo = True
280 controller.is_shadow_repo = True
281 controller._action = 'push'
281 controller._action = 'push'
282 controller.stub_response_body = 'dummy body value'
282 controller.stub_response_body = 'dummy body value'
283 environ_stub = {
283 environ_stub = {
284 'HTTP_HOST': 'test.example.com',
284 'HTTP_HOST': 'test.example.com',
285 'HTTP_ACCEPT': 'application/mercurial',
285 'HTTP_ACCEPT': 'application/mercurial',
286 'REQUEST_METHOD': 'GET',
286 'REQUEST_METHOD': 'GET',
287 'wsgi.url_scheme': 'http',
287 'wsgi.url_scheme': 'http',
288 }
288 }
289
289
290 response = controller(environ_stub, mock.Mock())
290 response = controller(environ_stub, mock.Mock())
291 response_body = ''.join(response)
291 response_body = ''.join(response)
292
292
293 assert response_body != controller.stub_response_body
293 assert response_body != controller.stub_response_body
294 # Assert that a 406 error is returned.
294 # Assert that a 406 error is returned.
295 assert '406 Not Acceptable' in response_body
295 assert '406 Not Acceptable' in response_body
296
296
297 def test_set_repo_names_no_shadow(self, baseapp, request_stub):
297 def test_set_repo_names_no_shadow(self, baseapp, request_stub):
298 """
298 """
299 Check that the set_repo_names method sets all names to the one returned
299 Check that the set_repo_names method sets all names to the one returned
300 by the _get_repository_name method on a request to a non shadow repo.
300 by the _get_repository_name method on a request to a non shadow repo.
301 """
301 """
302 environ_stub = {}
302 environ_stub = {}
303 controller = StubVCSController(
303 controller = StubVCSController(
304 baseapp.config.get_settings(), request_stub.registry)
304 baseapp.config.get_settings(), request_stub.registry)
305 controller._name = 'RepoGroup/MyRepo'
305 controller._name = 'RepoGroup/MyRepo'
306 controller.set_repo_names(environ_stub)
306 controller.set_repo_names(environ_stub)
307 assert not controller.is_shadow_repo
307 assert not controller.is_shadow_repo
308 assert (controller.url_repo_name ==
308 assert (controller.url_repo_name ==
309 controller.acl_repo_name ==
309 controller.acl_repo_name ==
310 controller.vcs_repo_name ==
310 controller.vcs_repo_name ==
311 controller._get_repository_name(environ_stub))
311 controller._get_repository_name(environ_stub))
312
312
313 def test_set_repo_names_with_shadow(
313 def test_set_repo_names_with_shadow(
314 self, baseapp, pr_util, config_stub, request_stub):
314 self, baseapp, pr_util, config_stub, request_stub):
315 """
315 """
316 Check that the set_repo_names method sets correct names on a request
316 Check that the set_repo_names method sets correct names on a request
317 to a shadow repo.
317 to a shadow repo.
318 """
318 """
319 from rhodecode.model.pull_request import PullRequestModel
319 from rhodecode.model.pull_request import PullRequestModel
320
320
321 pull_request = pr_util.create_pull_request()
321 pull_request = pr_util.create_pull_request()
322 shadow_url = '{target}/{pr_segment}/{pr_id}/{shadow_segment}'.format(
322 shadow_url = '{target}/{pr_segment}/{pr_id}/{shadow_segment}'.format(
323 target=pull_request.target_repo.repo_name,
323 target=pull_request.target_repo.repo_name,
324 pr_id=pull_request.pull_request_id,
324 pr_id=pull_request.pull_request_id,
325 pr_segment=TestShadowRepoRegularExpression.pr_segment,
325 pr_segment=TestShadowRepoRegularExpression.pr_segment,
326 shadow_segment=TestShadowRepoRegularExpression.shadow_segment)
326 shadow_segment=TestShadowRepoRegularExpression.shadow_segment)
327 controller = StubVCSController(
327 controller = StubVCSController(
328 baseapp.config.get_settings(), request_stub.registry)
328 baseapp.config.get_settings(), request_stub.registry)
329 controller._name = shadow_url
329 controller._name = shadow_url
330 controller.set_repo_names({})
330 controller.set_repo_names({})
331
331
332 # Get file system path to shadow repo for assertions.
332 # Get file system path to shadow repo for assertions.
333 workspace_id = PullRequestModel()._workspace_id(pull_request)
333 workspace_id = PullRequestModel()._workspace_id(pull_request)
334 target_vcs = pull_request.target_repo.scm_instance()
334 target_vcs = pull_request.target_repo.scm_instance()
335 vcs_repo_name = target_vcs._get_shadow_repository_path(
335 vcs_repo_name = target_vcs._get_shadow_repository_path(
336 pull_request.target_repo.repo_id, workspace_id)
336 pull_request.target_repo.repo_id, workspace_id)
337
337
338 assert controller.vcs_repo_name == vcs_repo_name
338 assert controller.vcs_repo_name == vcs_repo_name
339 assert controller.url_repo_name == shadow_url
339 assert controller.url_repo_name == shadow_url
340 assert controller.acl_repo_name == pull_request.target_repo.repo_name
340 assert controller.acl_repo_name == pull_request.target_repo.repo_name
341 assert controller.is_shadow_repo
341 assert controller.is_shadow_repo
342
342
343 def test_set_repo_names_with_shadow_but_missing_pr(
343 def test_set_repo_names_with_shadow_but_missing_pr(
344 self, baseapp, pr_util, config_stub, request_stub):
344 self, baseapp, pr_util, config_stub, request_stub):
345 """
345 """
346 Checks that the set_repo_names method enforces matching target repos
346 Checks that the set_repo_names method enforces matching target repos
347 and pull request IDs.
347 and pull request IDs.
348 """
348 """
349 pull_request = pr_util.create_pull_request()
349 pull_request = pr_util.create_pull_request()
350 shadow_url = '{target}/{pr_segment}/{pr_id}/{shadow_segment}'.format(
350 shadow_url = '{target}/{pr_segment}/{pr_id}/{shadow_segment}'.format(
351 target=pull_request.target_repo.repo_name,
351 target=pull_request.target_repo.repo_name,
352 pr_id=999999999,
352 pr_id=999999999,
353 pr_segment=TestShadowRepoRegularExpression.pr_segment,
353 pr_segment=TestShadowRepoRegularExpression.pr_segment,
354 shadow_segment=TestShadowRepoRegularExpression.shadow_segment)
354 shadow_segment=TestShadowRepoRegularExpression.shadow_segment)
355 controller = StubVCSController(
355 controller = StubVCSController(
356 baseapp.config.get_settings(), request_stub.registry)
356 baseapp.config.get_settings(), request_stub.registry)
357 controller._name = shadow_url
357 controller._name = shadow_url
358 controller.set_repo_names({})
358 controller.set_repo_names({})
359
359
360 assert not controller.is_shadow_repo
360 assert not controller.is_shadow_repo
361 assert (controller.url_repo_name ==
361 assert (controller.url_repo_name ==
362 controller.acl_repo_name ==
362 controller.acl_repo_name ==
363 controller.vcs_repo_name)
363 controller.vcs_repo_name)
364
364
365
365
366 @pytest.mark.usefixtures('baseapp')
366 @pytest.mark.usefixtures('baseapp')
367 class TestGenerateVcsResponse(object):
367 class TestGenerateVcsResponse(object):
368
368
369 def test_ensures_that_start_response_is_called_early_enough(self):
369 def test_ensures_that_start_response_is_called_early_enough(self):
370 self.call_controller_with_response_body(iter(['a', 'b']))
370 self.call_controller_with_response_body(iter(['a', 'b']))
371 assert self.start_response.called
371 assert self.start_response.called
372
372
373 def test_invalidates_cache_after_body_is_consumed(self):
373 def test_invalidates_cache_after_body_is_consumed(self):
374 result = self.call_controller_with_response_body(iter(['a', 'b']))
374 result = self.call_controller_with_response_body(iter(['a', 'b']))
375 assert not self.was_cache_invalidated()
375 assert not self.was_cache_invalidated()
376 # Consume the result
376 # Consume the result
377 list(result)
377 list(result)
378 assert self.was_cache_invalidated()
378 assert self.was_cache_invalidated()
379
379
380 def test_raises_unknown_exceptions(self):
380 def test_raises_unknown_exceptions(self):
381 result = self.call_controller_with_response_body(
381 result = self.call_controller_with_response_body(
382 self.raise_result_iter(vcs_kind='unknown'))
382 self.raise_result_iter(vcs_kind='unknown'))
383 with pytest.raises(Exception):
383 with pytest.raises(Exception):
384 list(result)
384 list(result)
385
385
386 def test_prepare_callback_daemon_is_called(self):
386 def test_prepare_callback_daemon_is_called(self):
387 def side_effect(extras, environ, action, txn_id=None):
387 def side_effect(extras, environ, action, txn_id=None):
388 return DummyHooksCallbackDaemon(), extras
388 return DummyHooksCallbackDaemon(), extras
389
389
390 prepare_patcher = mock.patch.object(
390 prepare_patcher = mock.patch.object(
391 StubVCSController, '_prepare_callback_daemon')
391 StubVCSController, '_prepare_callback_daemon')
392 with prepare_patcher as prepare_mock:
392 with prepare_patcher as prepare_mock:
393 prepare_mock.side_effect = side_effect
393 prepare_mock.side_effect = side_effect
394 self.call_controller_with_response_body(iter(['a', 'b']))
394 self.call_controller_with_response_body(iter(['a', 'b']))
395 assert prepare_mock.called
395 assert prepare_mock.called
396 assert prepare_mock.call_count == 1
396 assert prepare_mock.call_count == 1
397
397
398 def call_controller_with_response_body(self, response_body):
398 def call_controller_with_response_body(self, response_body):
399 settings = {
399 settings = {
400 'base_path': 'fake_base_path',
400 'base_path': 'fake_base_path',
401 'vcs.hooks.protocol': 'http',
401 'vcs.hooks.protocol': 'http',
402 'vcs.hooks.direct_calls': False,
402 'vcs.hooks.direct_calls': False,
403 }
403 }
404 registry = AttributeDict()
404 registry = AttributeDict()
405 controller = StubVCSController(settings, registry)
405 controller = StubVCSController(settings, registry)
406 controller._invalidate_cache = mock.Mock()
406 controller._invalidate_cache = mock.Mock()
407 controller.stub_response_body = response_body
407 controller.stub_response_body = response_body
408 self.start_response = mock.Mock()
408 self.start_response = mock.Mock()
409 result = controller._generate_vcs_response(
409 result = controller._generate_vcs_response(
410 environ={}, start_response=self.start_response,
410 environ={}, start_response=self.start_response,
411 repo_path='fake_repo_path',
411 repo_path='fake_repo_path',
412 extras={}, action='push')
412 extras={}, action='push')
413 self.controller = controller
413 self.controller = controller
414 return result
414 return result
415
415
416 def raise_result_iter(self, vcs_kind='repo_locked'):
416 def raise_result_iter(self, vcs_kind='repo_locked'):
417 """
417 """
418 Simulates an exception due to a vcs raised exception if kind vcs_kind
418 Simulates an exception due to a vcs raised exception if kind vcs_kind
419 """
419 """
420 raise self.vcs_exception(vcs_kind=vcs_kind)
420 raise self.vcs_exception(vcs_kind=vcs_kind)
421 yield "never_reached"
421 yield "never_reached"
422
422
423 def vcs_exception(self, vcs_kind='repo_locked'):
423 def vcs_exception(self, vcs_kind='repo_locked'):
424 locked_exception = Exception('TEST_MESSAGE')
424 locked_exception = Exception('TEST_MESSAGE')
425 locked_exception._vcs_kind = vcs_kind
425 locked_exception._vcs_kind = vcs_kind
426 return locked_exception
426 return locked_exception
427
427
428 def was_cache_invalidated(self):
428 def was_cache_invalidated(self):
429 return self.controller._invalidate_cache.called
429 return self.controller._invalidate_cache.called
430
430
431
431
432 class TestInitializeGenerator(object):
432 class TestInitializeGenerator(object):
433
433
434 def test_drains_first_element(self):
434 def test_drains_first_element(self):
435 gen = self.factory(['__init__', 1, 2])
435 gen = self.factory(['__init__', 1, 2])
436 result = list(gen)
436 result = list(gen)
437 assert result == [1, 2]
437 assert result == [1, 2]
438
438
439 @pytest.mark.parametrize('values', [
439 @pytest.mark.parametrize('values', [
440 [],
440 [],
441 [1, 2],
441 [1, 2],
442 ])
442 ])
443 def test_raises_value_error(self, values):
443 def test_raises_value_error(self, values):
444 with pytest.raises(ValueError):
444 with pytest.raises(ValueError):
445 self.factory(values)
445 self.factory(values)
446
446
447 @simplevcs.initialize_generator
447 @simplevcs.initialize_generator
448 def factory(self, iterable):
448 def factory(self, iterable):
449 for elem in iterable:
449 for elem in iterable:
450 yield elem
450 yield elem
451
451
452
452
453 class TestPrepareHooksDaemon(object):
453 class TestPrepareHooksDaemon(object):
454 def test_calls_imported_prepare_callback_daemon(self, app_settings, request_stub):
454 def test_calls_imported_prepare_callback_daemon(self, app_settings, request_stub):
455 expected_extras = {'extra1': 'value1'}
455 expected_extras = {'extra1': 'value1'}
456 daemon = DummyHooksCallbackDaemon()
456 daemon = DummyHooksCallbackDaemon()
457
457
458 controller = StubVCSController(app_settings, request_stub.registry)
458 controller = StubVCSController(app_settings, request_stub.registry)
459 prepare_patcher = mock.patch.object(
459 prepare_patcher = mock.patch.object(
460 simplevcs, 'prepare_callback_daemon',
460 simplevcs, 'prepare_callback_daemon',
461 return_value=(daemon, expected_extras))
461 return_value=(daemon, expected_extras))
462 with prepare_patcher as prepare_mock:
462 with prepare_patcher as prepare_mock:
463 callback_daemon, extras = controller._prepare_callback_daemon(
463 callback_daemon, extras = controller._prepare_callback_daemon(
464 expected_extras.copy(), {}, 'push')
464 expected_extras.copy(), {}, 'push')
465 prepare_mock.assert_called_once_with(
465 prepare_mock.assert_called_once_with(
466 expected_extras,
466 expected_extras,
467 protocol=app_settings['vcs.hooks.protocol'],
467 protocol=app_settings['vcs.hooks.protocol'],
468 host=app_settings['vcs.hooks.host'],
468 txn_id=None,
469 txn_id=None,
469 use_direct_calls=app_settings['vcs.hooks.direct_calls'])
470 use_direct_calls=app_settings['vcs.hooks.direct_calls'])
470
471
471 assert callback_daemon == daemon
472 assert callback_daemon == daemon
472 assert extras == extras
473 assert extras == extras
@@ -1,329 +1,331 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import json
21 import json
22 import logging
22 import logging
23 from StringIO import StringIO
23 from StringIO import StringIO
24
24
25 import mock
25 import mock
26 import pytest
26 import pytest
27
27
28 from rhodecode.lib import hooks_daemon
28 from rhodecode.lib import hooks_daemon
29 from rhodecode.tests.utils import assert_message_in_log
29 from rhodecode.tests.utils import assert_message_in_log
30
30
31
31
32 class TestDummyHooksCallbackDaemon(object):
32 class TestDummyHooksCallbackDaemon(object):
33 def test_hooks_module_path_set_properly(self):
33 def test_hooks_module_path_set_properly(self):
34 daemon = hooks_daemon.DummyHooksCallbackDaemon()
34 daemon = hooks_daemon.DummyHooksCallbackDaemon()
35 assert daemon.hooks_module == 'rhodecode.lib.hooks_daemon'
35 assert daemon.hooks_module == 'rhodecode.lib.hooks_daemon'
36
36
37 def test_logs_entering_the_hook(self):
37 def test_logs_entering_the_hook(self):
38 daemon = hooks_daemon.DummyHooksCallbackDaemon()
38 daemon = hooks_daemon.DummyHooksCallbackDaemon()
39 with mock.patch.object(hooks_daemon.log, 'debug') as log_mock:
39 with mock.patch.object(hooks_daemon.log, 'debug') as log_mock:
40 with daemon as return_value:
40 with daemon as return_value:
41 log_mock.assert_called_once_with(
41 log_mock.assert_called_once_with(
42 'Running dummy hooks callback daemon')
42 'Running dummy hooks callback daemon')
43 assert return_value == daemon
43 assert return_value == daemon
44
44
45 def test_logs_exiting_the_hook(self):
45 def test_logs_exiting_the_hook(self):
46 daemon = hooks_daemon.DummyHooksCallbackDaemon()
46 daemon = hooks_daemon.DummyHooksCallbackDaemon()
47 with mock.patch.object(hooks_daemon.log, 'debug') as log_mock:
47 with mock.patch.object(hooks_daemon.log, 'debug') as log_mock:
48 with daemon:
48 with daemon:
49 pass
49 pass
50 log_mock.assert_called_with('Exiting dummy hooks callback daemon')
50 log_mock.assert_called_with('Exiting dummy hooks callback daemon')
51
51
52
52
53 class TestHooks(object):
53 class TestHooks(object):
54 def test_hooks_can_be_used_as_a_context_processor(self):
54 def test_hooks_can_be_used_as_a_context_processor(self):
55 hooks = hooks_daemon.Hooks()
55 hooks = hooks_daemon.Hooks()
56 with hooks as return_value:
56 with hooks as return_value:
57 pass
57 pass
58 assert hooks == return_value
58 assert hooks == return_value
59
59
60
60
61 class TestHooksHttpHandler(object):
61 class TestHooksHttpHandler(object):
62 def test_read_request_parses_method_name_and_arguments(self):
62 def test_read_request_parses_method_name_and_arguments(self):
63 data = {
63 data = {
64 'method': 'test',
64 'method': 'test',
65 'extras': {
65 'extras': {
66 'param1': 1,
66 'param1': 1,
67 'param2': 'a'
67 'param2': 'a'
68 }
68 }
69 }
69 }
70 request = self._generate_post_request(data)
70 request = self._generate_post_request(data)
71 hooks_patcher = mock.patch.object(
71 hooks_patcher = mock.patch.object(
72 hooks_daemon.Hooks, data['method'], create=True, return_value=1)
72 hooks_daemon.Hooks, data['method'], create=True, return_value=1)
73
73
74 with hooks_patcher as hooks_mock:
74 with hooks_patcher as hooks_mock:
75 MockServer(hooks_daemon.HooksHttpHandler, request)
75 MockServer(hooks_daemon.HooksHttpHandler, request)
76
76
77 hooks_mock.assert_called_once_with(data['extras'])
77 hooks_mock.assert_called_once_with(data['extras'])
78
78
79 def test_hooks_serialized_result_is_returned(self):
79 def test_hooks_serialized_result_is_returned(self):
80 request = self._generate_post_request({})
80 request = self._generate_post_request({})
81 rpc_method = 'test'
81 rpc_method = 'test'
82 hook_result = {
82 hook_result = {
83 'first': 'one',
83 'first': 'one',
84 'second': 2
84 'second': 2
85 }
85 }
86 read_patcher = mock.patch.object(
86 read_patcher = mock.patch.object(
87 hooks_daemon.HooksHttpHandler, '_read_request',
87 hooks_daemon.HooksHttpHandler, '_read_request',
88 return_value=(rpc_method, {}))
88 return_value=(rpc_method, {}))
89 hooks_patcher = mock.patch.object(
89 hooks_patcher = mock.patch.object(
90 hooks_daemon.Hooks, rpc_method, create=True,
90 hooks_daemon.Hooks, rpc_method, create=True,
91 return_value=hook_result)
91 return_value=hook_result)
92
92
93 with read_patcher, hooks_patcher:
93 with read_patcher, hooks_patcher:
94 server = MockServer(hooks_daemon.HooksHttpHandler, request)
94 server = MockServer(hooks_daemon.HooksHttpHandler, request)
95
95
96 expected_result = json.dumps(hook_result)
96 expected_result = json.dumps(hook_result)
97 assert server.request.output_stream.buflist[-1] == expected_result
97 assert server.request.output_stream.buflist[-1] == expected_result
98
98
99 def test_exception_is_returned_in_response(self):
99 def test_exception_is_returned_in_response(self):
100 request = self._generate_post_request({})
100 request = self._generate_post_request({})
101 rpc_method = 'test'
101 rpc_method = 'test'
102 read_patcher = mock.patch.object(
102 read_patcher = mock.patch.object(
103 hooks_daemon.HooksHttpHandler, '_read_request',
103 hooks_daemon.HooksHttpHandler, '_read_request',
104 return_value=(rpc_method, {}))
104 return_value=(rpc_method, {}))
105 hooks_patcher = mock.patch.object(
105 hooks_patcher = mock.patch.object(
106 hooks_daemon.Hooks, rpc_method, create=True,
106 hooks_daemon.Hooks, rpc_method, create=True,
107 side_effect=Exception('Test exception'))
107 side_effect=Exception('Test exception'))
108
108
109 with read_patcher, hooks_patcher:
109 with read_patcher, hooks_patcher:
110 server = MockServer(hooks_daemon.HooksHttpHandler, request)
110 server = MockServer(hooks_daemon.HooksHttpHandler, request)
111
111
112 org_exc = json.loads(server.request.output_stream.buflist[-1])
112 org_exc = json.loads(server.request.output_stream.buflist[-1])
113 expected_result = {
113 expected_result = {
114 'exception': 'Exception',
114 'exception': 'Exception',
115 'exception_traceback': org_exc['exception_traceback'],
115 'exception_traceback': org_exc['exception_traceback'],
116 'exception_args': ['Test exception']
116 'exception_args': ['Test exception']
117 }
117 }
118 assert org_exc == expected_result
118 assert org_exc == expected_result
119
119
120 def test_log_message_writes_to_debug_log(self, caplog):
120 def test_log_message_writes_to_debug_log(self, caplog):
121 ip_port = ('0.0.0.0', 8888)
121 ip_port = ('0.0.0.0', 8888)
122 handler = hooks_daemon.HooksHttpHandler(
122 handler = hooks_daemon.HooksHttpHandler(
123 MockRequest('POST /'), ip_port, mock.Mock())
123 MockRequest('POST /'), ip_port, mock.Mock())
124 fake_date = '1/Nov/2015 00:00:00'
124 fake_date = '1/Nov/2015 00:00:00'
125 date_patcher = mock.patch.object(
125 date_patcher = mock.patch.object(
126 handler, 'log_date_time_string', return_value=fake_date)
126 handler, 'log_date_time_string', return_value=fake_date)
127 with date_patcher, caplog.at_level(logging.DEBUG):
127 with date_patcher, caplog.at_level(logging.DEBUG):
128 handler.log_message('Some message %d, %s', 123, 'string')
128 handler.log_message('Some message %d, %s', 123, 'string')
129
129
130 expected_message = '{} - - [{}] Some message 123, string'.format(
130 expected_message = '{} - - [{}] Some message 123, string'.format(
131 ip_port[0], fake_date)
131 ip_port[0], fake_date)
132 assert_message_in_log(
132 assert_message_in_log(
133 caplog.records, expected_message,
133 caplog.records, expected_message,
134 levelno=logging.DEBUG, module='hooks_daemon')
134 levelno=logging.DEBUG, module='hooks_daemon')
135
135
136 def _generate_post_request(self, data):
136 def _generate_post_request(self, data):
137 payload = json.dumps(data)
137 payload = json.dumps(data)
138 return 'POST / HTTP/1.0\nContent-Length: {}\n\n{}'.format(
138 return 'POST / HTTP/1.0\nContent-Length: {}\n\n{}'.format(
139 len(payload), payload)
139 len(payload), payload)
140
140
141
141
142 class ThreadedHookCallbackDaemon(object):
142 class ThreadedHookCallbackDaemon(object):
143 def test_constructor_calls_prepare(self):
143 def test_constructor_calls_prepare(self):
144 prepare_daemon_patcher = mock.patch.object(
144 prepare_daemon_patcher = mock.patch.object(
145 hooks_daemon.ThreadedHookCallbackDaemon, '_prepare')
145 hooks_daemon.ThreadedHookCallbackDaemon, '_prepare')
146 with prepare_daemon_patcher as prepare_daemon_mock:
146 with prepare_daemon_patcher as prepare_daemon_mock:
147 hooks_daemon.ThreadedHookCallbackDaemon()
147 hooks_daemon.ThreadedHookCallbackDaemon()
148 prepare_daemon_mock.assert_called_once_with()
148 prepare_daemon_mock.assert_called_once_with()
149
149
150 def test_run_is_called_on_context_start(self):
150 def test_run_is_called_on_context_start(self):
151 patchers = mock.patch.multiple(
151 patchers = mock.patch.multiple(
152 hooks_daemon.ThreadedHookCallbackDaemon,
152 hooks_daemon.ThreadedHookCallbackDaemon,
153 _run=mock.DEFAULT, _prepare=mock.DEFAULT, __exit__=mock.DEFAULT)
153 _run=mock.DEFAULT, _prepare=mock.DEFAULT, __exit__=mock.DEFAULT)
154
154
155 with patchers as mocks:
155 with patchers as mocks:
156 daemon = hooks_daemon.ThreadedHookCallbackDaemon()
156 daemon = hooks_daemon.ThreadedHookCallbackDaemon()
157 with daemon as daemon_context:
157 with daemon as daemon_context:
158 pass
158 pass
159 mocks['_run'].assert_called_once_with()
159 mocks['_run'].assert_called_once_with()
160 assert daemon_context == daemon
160 assert daemon_context == daemon
161
161
162 def test_stop_is_called_on_context_exit(self):
162 def test_stop_is_called_on_context_exit(self):
163 patchers = mock.patch.multiple(
163 patchers = mock.patch.multiple(
164 hooks_daemon.ThreadedHookCallbackDaemon,
164 hooks_daemon.ThreadedHookCallbackDaemon,
165 _run=mock.DEFAULT, _prepare=mock.DEFAULT, _stop=mock.DEFAULT)
165 _run=mock.DEFAULT, _prepare=mock.DEFAULT, _stop=mock.DEFAULT)
166
166
167 with patchers as mocks:
167 with patchers as mocks:
168 daemon = hooks_daemon.ThreadedHookCallbackDaemon()
168 daemon = hooks_daemon.ThreadedHookCallbackDaemon()
169 with daemon as daemon_context:
169 with daemon as daemon_context:
170 assert mocks['_stop'].call_count == 0
170 assert mocks['_stop'].call_count == 0
171
171
172 mocks['_stop'].assert_called_once_with()
172 mocks['_stop'].assert_called_once_with()
173 assert daemon_context == daemon
173 assert daemon_context == daemon
174
174
175
175
176 class TestHttpHooksCallbackDaemon(object):
176 class TestHttpHooksCallbackDaemon(object):
177 def test_prepare_inits_daemon_variable(self, tcp_server, caplog):
177 def test_prepare_inits_daemon_variable(self, tcp_server, caplog):
178 with self._tcp_patcher(tcp_server), caplog.at_level(logging.DEBUG):
178 with self._tcp_patcher(tcp_server), caplog.at_level(logging.DEBUG):
179 daemon = hooks_daemon.HttpHooksCallbackDaemon()
179 daemon = hooks_daemon.HttpHooksCallbackDaemon()
180 assert daemon._daemon == tcp_server
180 assert daemon._daemon == tcp_server
181
181
182 _, port = tcp_server.server_address
182 _, port = tcp_server.server_address
183 expected_uri = '{}:{}'.format(daemon.IP_ADDRESS, port)
183 expected_uri = '{}:{}'.format('127.0.0.1', port)
184 msg = 'Preparing HTTP callback daemon at `{}` and ' \
184 msg = 'Preparing HTTP callback daemon at `{}` and ' \
185 'registering hook object'.format(expected_uri)
185 'registering hook object'.format(expected_uri)
186 assert_message_in_log(
186 assert_message_in_log(
187 caplog.records, msg, levelno=logging.DEBUG, module='hooks_daemon')
187 caplog.records, msg, levelno=logging.DEBUG, module='hooks_daemon')
188
188
189 def test_prepare_inits_hooks_uri_and_logs_it(
189 def test_prepare_inits_hooks_uri_and_logs_it(
190 self, tcp_server, caplog):
190 self, tcp_server, caplog):
191 with self._tcp_patcher(tcp_server), caplog.at_level(logging.DEBUG):
191 with self._tcp_patcher(tcp_server), caplog.at_level(logging.DEBUG):
192 daemon = hooks_daemon.HttpHooksCallbackDaemon()
192 daemon = hooks_daemon.HttpHooksCallbackDaemon()
193
193
194 _, port = tcp_server.server_address
194 _, port = tcp_server.server_address
195 expected_uri = '{}:{}'.format(daemon.IP_ADDRESS, port)
195 expected_uri = '{}:{}'.format('127.0.0.1', port)
196 assert daemon.hooks_uri == expected_uri
196 assert daemon.hooks_uri == expected_uri
197
197
198 msg = 'Preparing HTTP callback daemon at `{}` and ' \
198 msg = 'Preparing HTTP callback daemon at `{}` and ' \
199 'registering hook object'.format(expected_uri)
199 'registering hook object'.format(expected_uri)
200 assert_message_in_log(
200 assert_message_in_log(
201 caplog.records, msg,
201 caplog.records, msg,
202 levelno=logging.DEBUG, module='hooks_daemon')
202 levelno=logging.DEBUG, module='hooks_daemon')
203
203
204 def test_run_creates_a_thread(self, tcp_server):
204 def test_run_creates_a_thread(self, tcp_server):
205 thread = mock.Mock()
205 thread = mock.Mock()
206
206
207 with self._tcp_patcher(tcp_server):
207 with self._tcp_patcher(tcp_server):
208 daemon = hooks_daemon.HttpHooksCallbackDaemon()
208 daemon = hooks_daemon.HttpHooksCallbackDaemon()
209
209
210 with self._thread_patcher(thread) as thread_mock:
210 with self._thread_patcher(thread) as thread_mock:
211 daemon._run()
211 daemon._run()
212
212
213 thread_mock.assert_called_once_with(
213 thread_mock.assert_called_once_with(
214 target=tcp_server.serve_forever,
214 target=tcp_server.serve_forever,
215 kwargs={'poll_interval': daemon.POLL_INTERVAL})
215 kwargs={'poll_interval': daemon.POLL_INTERVAL})
216 assert thread.daemon is True
216 assert thread.daemon is True
217 thread.start.assert_called_once_with()
217 thread.start.assert_called_once_with()
218
218
219 def test_run_logs(self, tcp_server, caplog):
219 def test_run_logs(self, tcp_server, caplog):
220
220
221 with self._tcp_patcher(tcp_server):
221 with self._tcp_patcher(tcp_server):
222 daemon = hooks_daemon.HttpHooksCallbackDaemon()
222 daemon = hooks_daemon.HttpHooksCallbackDaemon()
223
223
224 with self._thread_patcher(mock.Mock()), caplog.at_level(logging.DEBUG):
224 with self._thread_patcher(mock.Mock()), caplog.at_level(logging.DEBUG):
225 daemon._run()
225 daemon._run()
226
226
227 assert_message_in_log(
227 assert_message_in_log(
228 caplog.records,
228 caplog.records,
229 'Running event loop of callback daemon in background thread',
229 'Running event loop of callback daemon in background thread',
230 levelno=logging.DEBUG, module='hooks_daemon')
230 levelno=logging.DEBUG, module='hooks_daemon')
231
231
232 def test_stop_cleans_up_the_connection(self, tcp_server, caplog):
232 def test_stop_cleans_up_the_connection(self, tcp_server, caplog):
233 thread = mock.Mock()
233 thread = mock.Mock()
234
234
235 with self._tcp_patcher(tcp_server):
235 with self._tcp_patcher(tcp_server):
236 daemon = hooks_daemon.HttpHooksCallbackDaemon()
236 daemon = hooks_daemon.HttpHooksCallbackDaemon()
237
237
238 with self._thread_patcher(thread), caplog.at_level(logging.DEBUG):
238 with self._thread_patcher(thread), caplog.at_level(logging.DEBUG):
239 with daemon:
239 with daemon:
240 assert daemon._daemon == tcp_server
240 assert daemon._daemon == tcp_server
241 assert daemon._callback_thread == thread
241 assert daemon._callback_thread == thread
242
242
243 assert daemon._daemon is None
243 assert daemon._daemon is None
244 assert daemon._callback_thread is None
244 assert daemon._callback_thread is None
245 tcp_server.shutdown.assert_called_with()
245 tcp_server.shutdown.assert_called_with()
246 thread.join.assert_called_once_with()
246 thread.join.assert_called_once_with()
247
247
248 assert_message_in_log(
248 assert_message_in_log(
249 caplog.records, 'Waiting for background thread to finish.',
249 caplog.records, 'Waiting for background thread to finish.',
250 levelno=logging.DEBUG, module='hooks_daemon')
250 levelno=logging.DEBUG, module='hooks_daemon')
251
251
252 def _tcp_patcher(self, tcp_server):
252 def _tcp_patcher(self, tcp_server):
253 return mock.patch.object(
253 return mock.patch.object(
254 hooks_daemon, 'TCPServer', return_value=tcp_server)
254 hooks_daemon, 'TCPServer', return_value=tcp_server)
255
255
256 def _thread_patcher(self, thread):
256 def _thread_patcher(self, thread):
257 return mock.patch.object(
257 return mock.patch.object(
258 hooks_daemon.threading, 'Thread', return_value=thread)
258 hooks_daemon.threading, 'Thread', return_value=thread)
259
259
260
260
261 class TestPrepareHooksDaemon(object):
261 class TestPrepareHooksDaemon(object):
262 @pytest.mark.parametrize('protocol', ('http',))
262 @pytest.mark.parametrize('protocol', ('http',))
263 def test_returns_dummy_hooks_callback_daemon_when_using_direct_calls(
263 def test_returns_dummy_hooks_callback_daemon_when_using_direct_calls(
264 self, protocol):
264 self, protocol):
265 expected_extras = {'extra1': 'value1'}
265 expected_extras = {'extra1': 'value1'}
266 callback, extras = hooks_daemon.prepare_callback_daemon(
266 callback, extras = hooks_daemon.prepare_callback_daemon(
267 expected_extras.copy(), protocol=protocol, use_direct_calls=True)
267 expected_extras.copy(), protocol=protocol,
268 host='127.0.0.1', use_direct_calls=True)
268 assert isinstance(callback, hooks_daemon.DummyHooksCallbackDaemon)
269 assert isinstance(callback, hooks_daemon.DummyHooksCallbackDaemon)
269 expected_extras['hooks_module'] = 'rhodecode.lib.hooks_daemon'
270 expected_extras['hooks_module'] = 'rhodecode.lib.hooks_daemon'
270 expected_extras['time'] = extras['time']
271 expected_extras['time'] = extras['time']
271 assert 'extra1' in extras
272 assert 'extra1' in extras
272
273
273 @pytest.mark.parametrize('protocol, expected_class', (
274 @pytest.mark.parametrize('protocol, expected_class', (
274 ('http', hooks_daemon.HttpHooksCallbackDaemon),
275 ('http', hooks_daemon.HttpHooksCallbackDaemon),
275 ))
276 ))
276 def test_returns_real_hooks_callback_daemon_when_protocol_is_specified(
277 def test_returns_real_hooks_callback_daemon_when_protocol_is_specified(
277 self, protocol, expected_class):
278 self, protocol, expected_class):
278 expected_extras = {
279 expected_extras = {
279 'extra1': 'value1',
280 'extra1': 'value1',
280 'txn_id': 'txnid2',
281 'txn_id': 'txnid2',
281 'hooks_protocol': protocol.lower()
282 'hooks_protocol': protocol.lower()
282 }
283 }
283 callback, extras = hooks_daemon.prepare_callback_daemon(
284 callback, extras = hooks_daemon.prepare_callback_daemon(
284 expected_extras.copy(), protocol=protocol, use_direct_calls=False,
285 expected_extras.copy(), protocol=protocol, host='127.0.0.1',
286 use_direct_calls=False,
285 txn_id='txnid2')
287 txn_id='txnid2')
286 assert isinstance(callback, expected_class)
288 assert isinstance(callback, expected_class)
287 extras.pop('hooks_uri')
289 extras.pop('hooks_uri')
288 expected_extras['time'] = extras['time']
290 expected_extras['time'] = extras['time']
289 assert extras == expected_extras
291 assert extras == expected_extras
290
292
291 @pytest.mark.parametrize('protocol', (
293 @pytest.mark.parametrize('protocol', (
292 'invalid',
294 'invalid',
293 'Http',
295 'Http',
294 'HTTP',
296 'HTTP',
295 ))
297 ))
296 def test_raises_on_invalid_protocol(self, protocol):
298 def test_raises_on_invalid_protocol(self, protocol):
297 expected_extras = {
299 expected_extras = {
298 'extra1': 'value1',
300 'extra1': 'value1',
299 'hooks_protocol': protocol.lower()
301 'hooks_protocol': protocol.lower()
300 }
302 }
301 with pytest.raises(Exception):
303 with pytest.raises(Exception):
302 callback, extras = hooks_daemon.prepare_callback_daemon(
304 callback, extras = hooks_daemon.prepare_callback_daemon(
303 expected_extras.copy(),
305 expected_extras.copy(),
304 protocol=protocol,
306 protocol=protocol, host='127.0.0.1',
305 use_direct_calls=False)
307 use_direct_calls=False)
306
308
307
309
308 class MockRequest(object):
310 class MockRequest(object):
309 def __init__(self, request):
311 def __init__(self, request):
310 self.request = request
312 self.request = request
311 self.input_stream = StringIO(b'{}'.format(self.request))
313 self.input_stream = StringIO(b'{}'.format(self.request))
312 self.output_stream = StringIO()
314 self.output_stream = StringIO()
313
315
314 def makefile(self, mode, *args, **kwargs):
316 def makefile(self, mode, *args, **kwargs):
315 return self.output_stream if mode == 'wb' else self.input_stream
317 return self.output_stream if mode == 'wb' else self.input_stream
316
318
317
319
318 class MockServer(object):
320 class MockServer(object):
319 def __init__(self, Handler, request):
321 def __init__(self, Handler, request):
320 ip_port = ('0.0.0.0', 8888)
322 ip_port = ('0.0.0.0', 8888)
321 self.request = MockRequest(request)
323 self.request = MockRequest(request)
322 self.handler = Handler(self.request, ip_port, self)
324 self.handler = Handler(self.request, ip_port, self)
323
325
324
326
325 @pytest.fixture
327 @pytest.fixture
326 def tcp_server():
328 def tcp_server():
327 server = mock.Mock()
329 server = mock.Mock()
328 server.server_address = ('127.0.0.1', 8881)
330 server.server_address = ('127.0.0.1', 8881)
329 return server
331 return server
@@ -1,297 +1,298 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import json
21 import json
22 import platform
22 import platform
23 import socket
23 import socket
24
24
25 import pytest
25 import pytest
26
26
27 from rhodecode.lib.pyramid_utils import get_app_config
27 from rhodecode.lib.pyramid_utils import get_app_config
28 from rhodecode.tests.fixture import TestINI
28 from rhodecode.tests.fixture import TestINI
29 from rhodecode.tests.server_utils import RcVCSServer
29 from rhodecode.tests.server_utils import RcVCSServer
30
30
31
31
32 def _parse_json(value):
32 def _parse_json(value):
33 return json.loads(value) if value else None
33 return json.loads(value) if value else None
34
34
35
35
36 def pytest_addoption(parser):
36 def pytest_addoption(parser):
37 parser.addoption(
37 parser.addoption(
38 '--test-loglevel', dest='test_loglevel',
38 '--test-loglevel', dest='test_loglevel',
39 help="Set default Logging level for tests, warn (default), info, debug")
39 help="Set default Logging level for tests, warn (default), info, debug")
40 group = parser.getgroup('pylons')
40 group = parser.getgroup('pylons')
41 group.addoption(
41 group.addoption(
42 '--with-pylons', dest='pyramid_config',
42 '--with-pylons', dest='pyramid_config',
43 help="Set up a Pylons environment with the specified config file.")
43 help="Set up a Pylons environment with the specified config file.")
44 group.addoption(
44 group.addoption(
45 '--ini-config-override', action='store', type=_parse_json,
45 '--ini-config-override', action='store', type=_parse_json,
46 default=None, dest='pyramid_config_override', help=(
46 default=None, dest='pyramid_config_override', help=(
47 "Overrides the .ini file settings. Should be specified in JSON"
47 "Overrides the .ini file settings. Should be specified in JSON"
48 " format, e.g. '{\"section\": {\"parameter\": \"value\", ...}}'"
48 " format, e.g. '{\"section\": {\"parameter\": \"value\", ...}}'"
49 )
49 )
50 )
50 )
51 parser.addini(
51 parser.addini(
52 'pyramid_config',
52 'pyramid_config',
53 "Set up a Pyramid environment with the specified config file.")
53 "Set up a Pyramid environment with the specified config file.")
54
54
55 vcsgroup = parser.getgroup('vcs')
55 vcsgroup = parser.getgroup('vcs')
56 vcsgroup.addoption(
56 vcsgroup.addoption(
57 '--without-vcsserver', dest='with_vcsserver', action='store_false',
57 '--without-vcsserver', dest='with_vcsserver', action='store_false',
58 help="Do not start the VCSServer in a background process.")
58 help="Do not start the VCSServer in a background process.")
59 vcsgroup.addoption(
59 vcsgroup.addoption(
60 '--with-vcsserver-http', dest='vcsserver_config_http',
60 '--with-vcsserver-http', dest='vcsserver_config_http',
61 help="Start the HTTP VCSServer with the specified config file.")
61 help="Start the HTTP VCSServer with the specified config file.")
62 vcsgroup.addoption(
62 vcsgroup.addoption(
63 '--vcsserver-protocol', dest='vcsserver_protocol',
63 '--vcsserver-protocol', dest='vcsserver_protocol',
64 help="Start the VCSServer with HTTP protocol support.")
64 help="Start the VCSServer with HTTP protocol support.")
65 vcsgroup.addoption(
65 vcsgroup.addoption(
66 '--vcsserver-config-override', action='store', type=_parse_json,
66 '--vcsserver-config-override', action='store', type=_parse_json,
67 default=None, dest='vcsserver_config_override', help=(
67 default=None, dest='vcsserver_config_override', help=(
68 "Overrides the .ini file settings for the VCSServer. "
68 "Overrides the .ini file settings for the VCSServer. "
69 "Should be specified in JSON "
69 "Should be specified in JSON "
70 "format, e.g. '{\"section\": {\"parameter\": \"value\", ...}}'"
70 "format, e.g. '{\"section\": {\"parameter\": \"value\", ...}}'"
71 )
71 )
72 )
72 )
73 vcsgroup.addoption(
73 vcsgroup.addoption(
74 '--vcsserver-port', action='store', type=int,
74 '--vcsserver-port', action='store', type=int,
75 default=None, help=(
75 default=None, help=(
76 "Allows to set the port of the vcsserver. Useful when testing "
76 "Allows to set the port of the vcsserver. Useful when testing "
77 "against an already running server and random ports cause "
77 "against an already running server and random ports cause "
78 "trouble."))
78 "trouble."))
79 parser.addini(
79 parser.addini(
80 'vcsserver_config_http',
80 'vcsserver_config_http',
81 "Start the HTTP VCSServer with the specified config file.")
81 "Start the HTTP VCSServer with the specified config file.")
82 parser.addini(
82 parser.addini(
83 'vcsserver_protocol',
83 'vcsserver_protocol',
84 "Start the VCSServer with HTTP protocol support.")
84 "Start the VCSServer with HTTP protocol support.")
85
85
86
86
87 @pytest.fixture(scope='session')
87 @pytest.fixture(scope='session')
88 def vcsserver(request, vcsserver_port, vcsserver_factory):
88 def vcsserver(request, vcsserver_port, vcsserver_factory):
89 """
89 """
90 Session scope VCSServer.
90 Session scope VCSServer.
91
91
92 Tests wich need the VCSServer have to rely on this fixture in order
92 Tests wich need the VCSServer have to rely on this fixture in order
93 to ensure it will be running.
93 to ensure it will be running.
94
94
95 For specific needs, the fixture vcsserver_factory can be used. It allows to
95 For specific needs, the fixture vcsserver_factory can be used. It allows to
96 adjust the configuration file for the test run.
96 adjust the configuration file for the test run.
97
97
98 Command line args:
98 Command line args:
99
99
100 --without-vcsserver: Allows to switch this fixture off. You have to
100 --without-vcsserver: Allows to switch this fixture off. You have to
101 manually start the server.
101 manually start the server.
102
102
103 --vcsserver-port: Will expect the VCSServer to listen on this port.
103 --vcsserver-port: Will expect the VCSServer to listen on this port.
104 """
104 """
105
105
106 if not request.config.getoption('with_vcsserver'):
106 if not request.config.getoption('with_vcsserver'):
107 return None
107 return None
108
108
109 return vcsserver_factory(
109 return vcsserver_factory(
110 request, vcsserver_port=vcsserver_port)
110 request, vcsserver_port=vcsserver_port)
111
111
112
112
113 @pytest.fixture(scope='session')
113 @pytest.fixture(scope='session')
114 def vcsserver_factory(tmpdir_factory):
114 def vcsserver_factory(tmpdir_factory):
115 """
115 """
116 Use this if you need a running vcsserver with a special configuration.
116 Use this if you need a running vcsserver with a special configuration.
117 """
117 """
118
118
119 def factory(request, overrides=(), vcsserver_port=None,
119 def factory(request, overrides=(), vcsserver_port=None,
120 log_file=None):
120 log_file=None):
121
121
122 if vcsserver_port is None:
122 if vcsserver_port is None:
123 vcsserver_port = get_available_port()
123 vcsserver_port = get_available_port()
124
124
125 overrides = list(overrides)
125 overrides = list(overrides)
126 overrides.append({'server:main': {'port': vcsserver_port}})
126 overrides.append({'server:main': {'port': vcsserver_port}})
127
127
128 if is_cygwin():
128 if is_cygwin():
129 platform_override = {'DEFAULT': {
129 platform_override = {'DEFAULT': {
130 'beaker.cache.repo_object.type': 'nocache'}}
130 'beaker.cache.repo_object.type': 'nocache'}}
131 overrides.append(platform_override)
131 overrides.append(platform_override)
132
132
133 option_name = 'vcsserver_config_http'
133 option_name = 'vcsserver_config_http'
134 override_option_name = 'vcsserver_config_override'
134 override_option_name = 'vcsserver_config_override'
135 config_file = get_config(
135 config_file = get_config(
136 request.config, option_name=option_name,
136 request.config, option_name=option_name,
137 override_option_name=override_option_name, overrides=overrides,
137 override_option_name=override_option_name, overrides=overrides,
138 basetemp=tmpdir_factory.getbasetemp().strpath,
138 basetemp=tmpdir_factory.getbasetemp().strpath,
139 prefix='test_vcs_')
139 prefix='test_vcs_')
140
140
141 server = RcVCSServer(config_file, log_file)
141 server = RcVCSServer(config_file, log_file)
142 server.start()
142 server.start()
143
143
144 @request.addfinalizer
144 @request.addfinalizer
145 def cleanup():
145 def cleanup():
146 server.shutdown()
146 server.shutdown()
147
147
148 server.wait_until_ready()
148 server.wait_until_ready()
149 return server
149 return server
150
150
151 return factory
151 return factory
152
152
153
153
154 def is_cygwin():
154 def is_cygwin():
155 return 'cygwin' in platform.system().lower()
155 return 'cygwin' in platform.system().lower()
156
156
157
157
158 def _use_log_level(config):
158 def _use_log_level(config):
159 level = config.getoption('test_loglevel') or 'warn'
159 level = config.getoption('test_loglevel') or 'warn'
160 return level.upper()
160 return level.upper()
161
161
162
162
163 @pytest.fixture(scope='session')
163 @pytest.fixture(scope='session')
164 def ini_config(request, tmpdir_factory, rcserver_port, vcsserver_port):
164 def ini_config(request, tmpdir_factory, rcserver_port, vcsserver_port):
165 option_name = 'pyramid_config'
165 option_name = 'pyramid_config'
166 log_level = _use_log_level(request.config)
166 log_level = _use_log_level(request.config)
167
167
168 overrides = [
168 overrides = [
169 {'server:main': {'port': rcserver_port}},
169 {'server:main': {'port': rcserver_port}},
170 {'app:main': {
170 {'app:main': {
171 'vcs.server': 'localhost:%s' % vcsserver_port,
171 'vcs.server': 'localhost:%s' % vcsserver_port,
172 # johbo: We will always start the VCSServer on our own based on the
172 # johbo: We will always start the VCSServer on our own based on the
173 # fixtures of the test cases. For the test run it must always be
173 # fixtures of the test cases. For the test run it must always be
174 # off in the INI file.
174 # off in the INI file.
175 'vcs.start_server': 'false',
175 'vcs.start_server': 'false',
176
176
177 'vcs.server.protocol': 'http',
177 'vcs.server.protocol': 'http',
178 'vcs.scm_app_implementation': 'http',
178 'vcs.scm_app_implementation': 'http',
179 'vcs.hooks.protocol': 'http',
179 'vcs.hooks.protocol': 'http',
180 'vcs.hooks.host': '127.0.0.1',
180 }},
181 }},
181
182
182 {'handler_console': {
183 {'handler_console': {
183 'class ': 'StreamHandler',
184 'class ': 'StreamHandler',
184 'args ': '(sys.stderr,)',
185 'args ': '(sys.stderr,)',
185 'level': log_level,
186 'level': log_level,
186 }},
187 }},
187
188
188 ]
189 ]
189
190
190 filename = get_config(
191 filename = get_config(
191 request.config, option_name=option_name,
192 request.config, option_name=option_name,
192 override_option_name='{}_override'.format(option_name),
193 override_option_name='{}_override'.format(option_name),
193 overrides=overrides,
194 overrides=overrides,
194 basetemp=tmpdir_factory.getbasetemp().strpath,
195 basetemp=tmpdir_factory.getbasetemp().strpath,
195 prefix='test_rce_')
196 prefix='test_rce_')
196 return filename
197 return filename
197
198
198
199
199 @pytest.fixture(scope='session')
200 @pytest.fixture(scope='session')
200 def ini_settings(ini_config):
201 def ini_settings(ini_config):
201 ini_path = ini_config
202 ini_path = ini_config
202 return get_app_config(ini_path)
203 return get_app_config(ini_path)
203
204
204
205
205 def get_available_port():
206 def get_available_port():
206 family = socket.AF_INET
207 family = socket.AF_INET
207 socktype = socket.SOCK_STREAM
208 socktype = socket.SOCK_STREAM
208 host = '127.0.0.1'
209 host = '127.0.0.1'
209
210
210 mysocket = socket.socket(family, socktype)
211 mysocket = socket.socket(family, socktype)
211 mysocket.bind((host, 0))
212 mysocket.bind((host, 0))
212 port = mysocket.getsockname()[1]
213 port = mysocket.getsockname()[1]
213 mysocket.close()
214 mysocket.close()
214 del mysocket
215 del mysocket
215 return port
216 return port
216
217
217
218
218 @pytest.fixture(scope='session')
219 @pytest.fixture(scope='session')
219 def rcserver_port(request):
220 def rcserver_port(request):
220 port = get_available_port()
221 port = get_available_port()
221 print('Using rcserver port {}'.format(port))
222 print('Using rcserver port {}'.format(port))
222 return port
223 return port
223
224
224
225
225 @pytest.fixture(scope='session')
226 @pytest.fixture(scope='session')
226 def vcsserver_port(request):
227 def vcsserver_port(request):
227 port = request.config.getoption('--vcsserver-port')
228 port = request.config.getoption('--vcsserver-port')
228 if port is None:
229 if port is None:
229 port = get_available_port()
230 port = get_available_port()
230 print('Using vcsserver port {}'.format(port))
231 print('Using vcsserver port {}'.format(port))
231 return port
232 return port
232
233
233
234
234 @pytest.fixture(scope='session')
235 @pytest.fixture(scope='session')
235 def available_port_factory():
236 def available_port_factory():
236 """
237 """
237 Returns a callable which returns free port numbers.
238 Returns a callable which returns free port numbers.
238 """
239 """
239 return get_available_port
240 return get_available_port
240
241
241
242
242 @pytest.fixture
243 @pytest.fixture
243 def available_port(available_port_factory):
244 def available_port(available_port_factory):
244 """
245 """
245 Gives you one free port for the current test.
246 Gives you one free port for the current test.
246
247
247 Uses "available_port_factory" to retrieve the port.
248 Uses "available_port_factory" to retrieve the port.
248 """
249 """
249 return available_port_factory()
250 return available_port_factory()
250
251
251
252
252 @pytest.fixture(scope='session')
253 @pytest.fixture(scope='session')
253 def testini_factory(tmpdir_factory, ini_config):
254 def testini_factory(tmpdir_factory, ini_config):
254 """
255 """
255 Factory to create an INI file based on TestINI.
256 Factory to create an INI file based on TestINI.
256
257
257 It will make sure to place the INI file in the correct directory.
258 It will make sure to place the INI file in the correct directory.
258 """
259 """
259 basetemp = tmpdir_factory.getbasetemp().strpath
260 basetemp = tmpdir_factory.getbasetemp().strpath
260 return TestIniFactory(basetemp, ini_config)
261 return TestIniFactory(basetemp, ini_config)
261
262
262
263
263 class TestIniFactory(object):
264 class TestIniFactory(object):
264
265
265 def __init__(self, basetemp, template_ini):
266 def __init__(self, basetemp, template_ini):
266 self._basetemp = basetemp
267 self._basetemp = basetemp
267 self._template_ini = template_ini
268 self._template_ini = template_ini
268
269
269 def __call__(self, ini_params, new_file_prefix='test'):
270 def __call__(self, ini_params, new_file_prefix='test'):
270 ini_file = TestINI(
271 ini_file = TestINI(
271 self._template_ini, ini_params=ini_params,
272 self._template_ini, ini_params=ini_params,
272 new_file_prefix=new_file_prefix, dir=self._basetemp)
273 new_file_prefix=new_file_prefix, dir=self._basetemp)
273 result = ini_file.create()
274 result = ini_file.create()
274 return result
275 return result
275
276
276
277
277 def get_config(
278 def get_config(
278 config, option_name, override_option_name, overrides=None,
279 config, option_name, override_option_name, overrides=None,
279 basetemp=None, prefix='test'):
280 basetemp=None, prefix='test'):
280 """
281 """
281 Find a configuration file and apply overrides for the given `prefix`.
282 Find a configuration file and apply overrides for the given `prefix`.
282 """
283 """
283 config_file = (
284 config_file = (
284 config.getoption(option_name) or config.getini(option_name))
285 config.getoption(option_name) or config.getini(option_name))
285 if not config_file:
286 if not config_file:
286 pytest.exit(
287 pytest.exit(
287 "Configuration error, could not extract {}.".format(option_name))
288 "Configuration error, could not extract {}.".format(option_name))
288
289
289 overrides = overrides or []
290 overrides = overrides or []
290 config_override = config.getoption(override_option_name)
291 config_override = config.getoption(override_option_name)
291 if config_override:
292 if config_override:
292 overrides.append(config_override)
293 overrides.append(config_override)
293 temp_ini_file = TestINI(
294 temp_ini_file = TestINI(
294 config_file, ini_params=overrides, new_file_prefix=prefix,
295 config_file, ini_params=overrides, new_file_prefix=prefix,
295 dir=basetemp)
296 dir=basetemp)
296
297
297 return temp_ini_file.create()
298 return temp_ini_file.create()
@@ -1,686 +1,687 b''
1
1
2
2
3 ################################################################################
3 ################################################################################
4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
5 # The %(here)s variable will be replaced with the parent directory of this file#
5 # The %(here)s variable will be replaced with the parent directory of this file#
6 ################################################################################
6 ################################################################################
7
7
8 [DEFAULT]
8 [DEFAULT]
9 debug = true
9 debug = true
10
10
11 ################################################################################
11 ################################################################################
12 ## EMAIL CONFIGURATION ##
12 ## EMAIL CONFIGURATION ##
13 ## Uncomment and replace with the email address which should receive ##
13 ## Uncomment and replace with the email address which should receive ##
14 ## any error reports after an application crash ##
14 ## any error reports after an application crash ##
15 ## Additionally these settings will be used by the RhodeCode mailing system ##
15 ## Additionally these settings will be used by the RhodeCode mailing system ##
16 ################################################################################
16 ################################################################################
17
17
18 ## prefix all emails subjects with given prefix, helps filtering out emails
18 ## prefix all emails subjects with given prefix, helps filtering out emails
19 #email_prefix = [RhodeCode]
19 #email_prefix = [RhodeCode]
20
20
21 ## email FROM address all mails will be sent
21 ## email FROM address all mails will be sent
22 #app_email_from = rhodecode-noreply@localhost
22 #app_email_from = rhodecode-noreply@localhost
23
23
24 ## Uncomment and replace with the address which should receive any error report
24 ## Uncomment and replace with the address which should receive any error report
25 ## note: using appenlight for error handling doesn't need this to be uncommented
25 ## note: using appenlight for error handling doesn't need this to be uncommented
26 #email_to = admin@localhost
26 #email_to = admin@localhost
27
27
28 ## in case of Application errors, sent an error email form
28 ## in case of Application errors, sent an error email form
29 #error_email_from = rhodecode_error@localhost
29 #error_email_from = rhodecode_error@localhost
30
30
31 ## additional error message to be send in case of server crash
31 ## additional error message to be send in case of server crash
32 #error_message =
32 #error_message =
33
33
34
34
35 #smtp_server = mail.server.com
35 #smtp_server = mail.server.com
36 #smtp_username =
36 #smtp_username =
37 #smtp_password =
37 #smtp_password =
38 #smtp_port =
38 #smtp_port =
39 #smtp_use_tls = false
39 #smtp_use_tls = false
40 #smtp_use_ssl = true
40 #smtp_use_ssl = true
41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
42 #smtp_auth =
42 #smtp_auth =
43
43
44 [server:main]
44 [server:main]
45 ## COMMON ##
45 ## COMMON ##
46 host = 0.0.0.0
46 host = 0.0.0.0
47 port = 5000
47 port = 5000
48
48
49 ##########################
49 ##########################
50 ## GUNICORN WSGI SERVER ##
50 ## GUNICORN WSGI SERVER ##
51 ##########################
51 ##########################
52 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
52 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
53
53
54 use = egg:gunicorn#main
54 use = egg:gunicorn#main
55 ## Sets the number of process workers. You must set `instance_id = *`
55 ## Sets the number of process workers. You must set `instance_id = *`
56 ## when this option is set to more than one worker, recommended
56 ## when this option is set to more than one worker, recommended
57 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
57 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
58 ## The `instance_id = *` must be set in the [app:main] section below
58 ## The `instance_id = *` must be set in the [app:main] section below
59 #workers = 2
59 #workers = 2
60 ## number of threads for each of the worker, must be set to 1 for gevent
60 ## number of threads for each of the worker, must be set to 1 for gevent
61 ## generally recommened to be at 1
61 ## generally recommened to be at 1
62 #threads = 1
62 #threads = 1
63 ## process name
63 ## process name
64 #proc_name = rhodecode
64 #proc_name = rhodecode
65 ## type of worker class, one of sync, gevent
65 ## type of worker class, one of sync, gevent
66 ## recommended for bigger setup is using of of other than sync one
66 ## recommended for bigger setup is using of of other than sync one
67 #worker_class = sync
67 #worker_class = sync
68 ## The maximum number of simultaneous clients. Valid only for Gevent
68 ## The maximum number of simultaneous clients. Valid only for Gevent
69 #worker_connections = 10
69 #worker_connections = 10
70 ## max number of requests that worker will handle before being gracefully
70 ## max number of requests that worker will handle before being gracefully
71 ## restarted, could prevent memory leaks
71 ## restarted, could prevent memory leaks
72 #max_requests = 1000
72 #max_requests = 1000
73 #max_requests_jitter = 30
73 #max_requests_jitter = 30
74 ## amount of time a worker can spend with handling a request before it
74 ## amount of time a worker can spend with handling a request before it
75 ## gets killed and restarted. Set to 6hrs
75 ## gets killed and restarted. Set to 6hrs
76 #timeout = 21600
76 #timeout = 21600
77
77
78 ## prefix middleware for RhodeCode.
78 ## prefix middleware for RhodeCode.
79 ## recommended when using proxy setup.
79 ## recommended when using proxy setup.
80 ## allows to set RhodeCode under a prefix in server.
80 ## allows to set RhodeCode under a prefix in server.
81 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
81 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
82 ## And set your prefix like: `prefix = /custom_prefix`
82 ## And set your prefix like: `prefix = /custom_prefix`
83 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
83 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
84 ## to make your cookies only work on prefix url
84 ## to make your cookies only work on prefix url
85 [filter:proxy-prefix]
85 [filter:proxy-prefix]
86 use = egg:PasteDeploy#prefix
86 use = egg:PasteDeploy#prefix
87 prefix = /
87 prefix = /
88
88
89 [app:main]
89 [app:main]
90 is_test = True
90 is_test = True
91 use = egg:rhodecode-enterprise-ce
91 use = egg:rhodecode-enterprise-ce
92
92
93 ## enable proxy prefix middleware, defined above
93 ## enable proxy prefix middleware, defined above
94 #filter-with = proxy-prefix
94 #filter-with = proxy-prefix
95
95
96
96
97 ## RHODECODE PLUGINS ##
97 ## RHODECODE PLUGINS ##
98 rhodecode.includes = rhodecode.api
98 rhodecode.includes = rhodecode.api
99
99
100 # api prefix url
100 # api prefix url
101 rhodecode.api.url = /_admin/api
101 rhodecode.api.url = /_admin/api
102
102
103
103
104 ## END RHODECODE PLUGINS ##
104 ## END RHODECODE PLUGINS ##
105
105
106 ## encryption key used to encrypt social plugin tokens,
106 ## encryption key used to encrypt social plugin tokens,
107 ## remote_urls with credentials etc, if not set it defaults to
107 ## remote_urls with credentials etc, if not set it defaults to
108 ## `beaker.session.secret`
108 ## `beaker.session.secret`
109 #rhodecode.encrypted_values.secret =
109 #rhodecode.encrypted_values.secret =
110
110
111 ## decryption strict mode (enabled by default). It controls if decryption raises
111 ## decryption strict mode (enabled by default). It controls if decryption raises
112 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
112 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
113 #rhodecode.encrypted_values.strict = false
113 #rhodecode.encrypted_values.strict = false
114
114
115 ## return gzipped responses from Rhodecode (static files/application)
115 ## return gzipped responses from Rhodecode (static files/application)
116 gzip_responses = false
116 gzip_responses = false
117
117
118 ## autogenerate javascript routes file on startup
118 ## autogenerate javascript routes file on startup
119 generate_js_files = false
119 generate_js_files = false
120
120
121 ## Optional Languages
121 ## Optional Languages
122 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
122 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
123 lang = en
123 lang = en
124
124
125 ## perform a full repository scan on each server start, this should be
125 ## perform a full repository scan on each server start, this should be
126 ## set to false after first startup, to allow faster server restarts.
126 ## set to false after first startup, to allow faster server restarts.
127 startup.import_repos = true
127 startup.import_repos = true
128
128
129 ## Uncomment and set this path to use archive download cache.
129 ## Uncomment and set this path to use archive download cache.
130 ## Once enabled, generated archives will be cached at this location
130 ## Once enabled, generated archives will be cached at this location
131 ## and served from the cache during subsequent requests for the same archive of
131 ## and served from the cache during subsequent requests for the same archive of
132 ## the repository.
132 ## the repository.
133 #archive_cache_dir = /tmp/tarballcache
133 #archive_cache_dir = /tmp/tarballcache
134
134
135 ## URL at which the application is running. This is used for bootstraping
135 ## URL at which the application is running. This is used for bootstraping
136 ## requests in context when no web request is available. Used in ishell, or
136 ## requests in context when no web request is available. Used in ishell, or
137 ## SSH calls. Set this for events to receive proper url for SSH calls.
137 ## SSH calls. Set this for events to receive proper url for SSH calls.
138 app.base_url = http://rhodecode.local
138 app.base_url = http://rhodecode.local
139
139
140 ## change this to unique ID for security
140 ## change this to unique ID for security
141 app_instance_uuid = rc-production
141 app_instance_uuid = rc-production
142
142
143 ## cut off limit for large diffs (size in bytes)
143 ## cut off limit for large diffs (size in bytes)
144 cut_off_limit_diff = 1024000
144 cut_off_limit_diff = 1024000
145 cut_off_limit_file = 256000
145 cut_off_limit_file = 256000
146
146
147 ## use cache version of scm repo everywhere
147 ## use cache version of scm repo everywhere
148 vcs_full_cache = false
148 vcs_full_cache = false
149
149
150 ## force https in RhodeCode, fixes https redirects, assumes it's always https
150 ## force https in RhodeCode, fixes https redirects, assumes it's always https
151 ## Normally this is controlled by proper http flags sent from http server
151 ## Normally this is controlled by proper http flags sent from http server
152 force_https = false
152 force_https = false
153
153
154 ## use Strict-Transport-Security headers
154 ## use Strict-Transport-Security headers
155 use_htsts = false
155 use_htsts = false
156
156
157 ## git rev filter option, --all is the default filter, if you need to
157 ## git rev filter option, --all is the default filter, if you need to
158 ## hide all refs in changelog switch this to --branches --tags
158 ## hide all refs in changelog switch this to --branches --tags
159 git_rev_filter = --all
159 git_rev_filter = --all
160
160
161 # Set to true if your repos are exposed using the dumb protocol
161 # Set to true if your repos are exposed using the dumb protocol
162 git_update_server_info = false
162 git_update_server_info = false
163
163
164 ## RSS/ATOM feed options
164 ## RSS/ATOM feed options
165 rss_cut_off_limit = 256000
165 rss_cut_off_limit = 256000
166 rss_items_per_page = 10
166 rss_items_per_page = 10
167 rss_include_diff = false
167 rss_include_diff = false
168
168
169 ## gist URL alias, used to create nicer urls for gist. This should be an
169 ## gist URL alias, used to create nicer urls for gist. This should be an
170 ## url that does rewrites to _admin/gists/{gistid}.
170 ## url that does rewrites to _admin/gists/{gistid}.
171 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
171 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
172 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
172 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
173 gist_alias_url =
173 gist_alias_url =
174
174
175 ## List of views (using glob pattern syntax) that AUTH TOKENS could be
175 ## List of views (using glob pattern syntax) that AUTH TOKENS could be
176 ## used for access.
176 ## used for access.
177 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
177 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
178 ## came from the the logged in user who own this authentication token.
178 ## came from the the logged in user who own this authentication token.
179 ## Additionally @TOKEN syntaxt can be used to bound the view to specific
179 ## Additionally @TOKEN syntaxt can be used to bound the view to specific
180 ## authentication token. Such view would be only accessible when used together
180 ## authentication token. Such view would be only accessible when used together
181 ## with this authentication token
181 ## with this authentication token
182 ##
182 ##
183 ## list of all views can be found under `/_admin/permissions/auth_token_access`
183 ## list of all views can be found under `/_admin/permissions/auth_token_access`
184 ## The list should be "," separated and on a single line.
184 ## The list should be "," separated and on a single line.
185 ##
185 ##
186 ## Most common views to enable:
186 ## Most common views to enable:
187 # RepoCommitsView:repo_commit_download
187 # RepoCommitsView:repo_commit_download
188 # RepoCommitsView:repo_commit_patch
188 # RepoCommitsView:repo_commit_patch
189 # RepoCommitsView:repo_commit_raw
189 # RepoCommitsView:repo_commit_raw
190 # RepoCommitsView:repo_commit_raw@TOKEN
190 # RepoCommitsView:repo_commit_raw@TOKEN
191 # RepoFilesView:repo_files_diff
191 # RepoFilesView:repo_files_diff
192 # RepoFilesView:repo_archivefile
192 # RepoFilesView:repo_archivefile
193 # RepoFilesView:repo_file_raw
193 # RepoFilesView:repo_file_raw
194 # GistView:*
194 # GistView:*
195 api_access_controllers_whitelist =
195 api_access_controllers_whitelist =
196
196
197 ## default encoding used to convert from and to unicode
197 ## default encoding used to convert from and to unicode
198 ## can be also a comma separated list of encoding in case of mixed encodings
198 ## can be also a comma separated list of encoding in case of mixed encodings
199 default_encoding = UTF-8
199 default_encoding = UTF-8
200
200
201 ## instance-id prefix
201 ## instance-id prefix
202 ## a prefix key for this instance used for cache invalidation when running
202 ## a prefix key for this instance used for cache invalidation when running
203 ## multiple instances of rhodecode, make sure it's globally unique for
203 ## multiple instances of rhodecode, make sure it's globally unique for
204 ## all running rhodecode instances. Leave empty if you don't use it
204 ## all running rhodecode instances. Leave empty if you don't use it
205 instance_id =
205 instance_id =
206
206
207 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
207 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
208 ## of an authentication plugin also if it is disabled by it's settings.
208 ## of an authentication plugin also if it is disabled by it's settings.
209 ## This could be useful if you are unable to log in to the system due to broken
209 ## This could be useful if you are unable to log in to the system due to broken
210 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
210 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
211 ## module to log in again and fix the settings.
211 ## module to log in again and fix the settings.
212 ##
212 ##
213 ## Available builtin plugin IDs (hash is part of the ID):
213 ## Available builtin plugin IDs (hash is part of the ID):
214 ## egg:rhodecode-enterprise-ce#rhodecode
214 ## egg:rhodecode-enterprise-ce#rhodecode
215 ## egg:rhodecode-enterprise-ce#pam
215 ## egg:rhodecode-enterprise-ce#pam
216 ## egg:rhodecode-enterprise-ce#ldap
216 ## egg:rhodecode-enterprise-ce#ldap
217 ## egg:rhodecode-enterprise-ce#jasig_cas
217 ## egg:rhodecode-enterprise-ce#jasig_cas
218 ## egg:rhodecode-enterprise-ce#headers
218 ## egg:rhodecode-enterprise-ce#headers
219 ## egg:rhodecode-enterprise-ce#crowd
219 ## egg:rhodecode-enterprise-ce#crowd
220 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
220 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
221
221
222 ## alternative return HTTP header for failed authentication. Default HTTP
222 ## alternative return HTTP header for failed authentication. Default HTTP
223 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
223 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
224 ## handling that causing a series of failed authentication calls.
224 ## handling that causing a series of failed authentication calls.
225 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
225 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
226 ## This will be served instead of default 401 on bad authnetication
226 ## This will be served instead of default 401 on bad authnetication
227 auth_ret_code =
227 auth_ret_code =
228
228
229 ## use special detection method when serving auth_ret_code, instead of serving
229 ## use special detection method when serving auth_ret_code, instead of serving
230 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
230 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
231 ## and then serve auth_ret_code to clients
231 ## and then serve auth_ret_code to clients
232 auth_ret_code_detection = false
232 auth_ret_code_detection = false
233
233
234 ## locking return code. When repository is locked return this HTTP code. 2XX
234 ## locking return code. When repository is locked return this HTTP code. 2XX
235 ## codes don't break the transactions while 4XX codes do
235 ## codes don't break the transactions while 4XX codes do
236 lock_ret_code = 423
236 lock_ret_code = 423
237
237
238 ## allows to change the repository location in settings page
238 ## allows to change the repository location in settings page
239 allow_repo_location_change = true
239 allow_repo_location_change = true
240
240
241 ## allows to setup custom hooks in settings page
241 ## allows to setup custom hooks in settings page
242 allow_custom_hooks_settings = true
242 allow_custom_hooks_settings = true
243
243
244 ## generated license token, goto license page in RhodeCode settings to obtain
244 ## generated license token, goto license page in RhodeCode settings to obtain
245 ## new token
245 ## new token
246 license_token = abra-cada-bra1-rce3
246 license_token = abra-cada-bra1-rce3
247
247
248 ## supervisor connection uri, for managing supervisor and logs.
248 ## supervisor connection uri, for managing supervisor and logs.
249 supervisor.uri =
249 supervisor.uri =
250 ## supervisord group name/id we only want this RC instance to handle
250 ## supervisord group name/id we only want this RC instance to handle
251 supervisor.group_id = dev
251 supervisor.group_id = dev
252
252
253 ## Display extended labs settings
253 ## Display extended labs settings
254 labs_settings_active = true
254 labs_settings_active = true
255
255
256 ####################################
256 ####################################
257 ### CELERY CONFIG ####
257 ### CELERY CONFIG ####
258 ####################################
258 ####################################
259 use_celery = false
259 use_celery = false
260 broker.host = localhost
260 broker.host = localhost
261 broker.vhost = rabbitmqhost
261 broker.vhost = rabbitmqhost
262 broker.port = 5672
262 broker.port = 5672
263 broker.user = rabbitmq
263 broker.user = rabbitmq
264 broker.password = qweqwe
264 broker.password = qweqwe
265
265
266 celery.imports = rhodecode.lib.celerylib.tasks
266 celery.imports = rhodecode.lib.celerylib.tasks
267
267
268 celery.result.backend = amqp
268 celery.result.backend = amqp
269 celery.result.dburi = amqp://
269 celery.result.dburi = amqp://
270 celery.result.serialier = json
270 celery.result.serialier = json
271
271
272 #celery.send.task.error.emails = true
272 #celery.send.task.error.emails = true
273 #celery.amqp.task.result.expires = 18000
273 #celery.amqp.task.result.expires = 18000
274
274
275 celeryd.concurrency = 2
275 celeryd.concurrency = 2
276 #celeryd.log.file = celeryd.log
276 #celeryd.log.file = celeryd.log
277 celeryd.log.level = debug
277 celeryd.log.level = debug
278 celeryd.max.tasks.per.child = 1
278 celeryd.max.tasks.per.child = 1
279
279
280 ## tasks will never be sent to the queue, but executed locally instead.
280 ## tasks will never be sent to the queue, but executed locally instead.
281 celery.always.eager = false
281 celery.always.eager = false
282
282
283 ####################################
283 ####################################
284 ### BEAKER CACHE ####
284 ### BEAKER CACHE ####
285 ####################################
285 ####################################
286 # default cache dir for templates. Putting this into a ramdisk
286 # default cache dir for templates. Putting this into a ramdisk
287 ## can boost performance, eg. %(here)s/data_ramdisk
287 ## can boost performance, eg. %(here)s/data_ramdisk
288 cache_dir = %(here)s/data
288 cache_dir = %(here)s/data
289
289
290 ## locking and default file storage for Beaker. Putting this into a ramdisk
290 ## locking and default file storage for Beaker. Putting this into a ramdisk
291 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
291 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
292 beaker.cache.data_dir = %(here)s/rc/data/cache/beaker_data
292 beaker.cache.data_dir = %(here)s/rc/data/cache/beaker_data
293 beaker.cache.lock_dir = %(here)s/rc/data/cache/beaker_lock
293 beaker.cache.lock_dir = %(here)s/rc/data/cache/beaker_lock
294
294
295 beaker.cache.regions = short_term, long_term, sql_cache_short, auth_plugins, repo_cache_long
295 beaker.cache.regions = short_term, long_term, sql_cache_short, auth_plugins, repo_cache_long
296
296
297 beaker.cache.short_term.type = file
297 beaker.cache.short_term.type = file
298 beaker.cache.short_term.expire = 0
298 beaker.cache.short_term.expire = 0
299 beaker.cache.short_term.key_length = 256
299 beaker.cache.short_term.key_length = 256
300
300
301 beaker.cache.long_term.type = memory
301 beaker.cache.long_term.type = memory
302 beaker.cache.long_term.expire = 36000
302 beaker.cache.long_term.expire = 36000
303 beaker.cache.long_term.key_length = 256
303 beaker.cache.long_term.key_length = 256
304
304
305 beaker.cache.sql_cache_short.type = memory
305 beaker.cache.sql_cache_short.type = memory
306 beaker.cache.sql_cache_short.expire = 1
306 beaker.cache.sql_cache_short.expire = 1
307 beaker.cache.sql_cache_short.key_length = 256
307 beaker.cache.sql_cache_short.key_length = 256
308
308
309 ## default is memory cache, configure only if required
309 ## default is memory cache, configure only if required
310 ## using multi-node or multi-worker setup
310 ## using multi-node or multi-worker setup
311 #beaker.cache.auth_plugins.type = memory
311 #beaker.cache.auth_plugins.type = memory
312 #beaker.cache.auth_plugins.lock_dir = %(here)s/data/cache/auth_plugin_lock
312 #beaker.cache.auth_plugins.lock_dir = %(here)s/data/cache/auth_plugin_lock
313 #beaker.cache.auth_plugins.url = postgresql://postgres:secret@localhost/rhodecode
313 #beaker.cache.auth_plugins.url = postgresql://postgres:secret@localhost/rhodecode
314 #beaker.cache.auth_plugins.url = mysql://root:secret@127.0.0.1/rhodecode
314 #beaker.cache.auth_plugins.url = mysql://root:secret@127.0.0.1/rhodecode
315 #beaker.cache.auth_plugins.sa.pool_recycle = 3600
315 #beaker.cache.auth_plugins.sa.pool_recycle = 3600
316 #beaker.cache.auth_plugins.sa.pool_size = 10
316 #beaker.cache.auth_plugins.sa.pool_size = 10
317 #beaker.cache.auth_plugins.sa.max_overflow = 0
317 #beaker.cache.auth_plugins.sa.max_overflow = 0
318
318
319 beaker.cache.repo_cache_long.type = memorylru_base
319 beaker.cache.repo_cache_long.type = memorylru_base
320 beaker.cache.repo_cache_long.max_items = 4096
320 beaker.cache.repo_cache_long.max_items = 4096
321 beaker.cache.repo_cache_long.expire = 2592000
321 beaker.cache.repo_cache_long.expire = 2592000
322
322
323 ## default is memorylru_base cache, configure only if required
323 ## default is memorylru_base cache, configure only if required
324 ## using multi-node or multi-worker setup
324 ## using multi-node or multi-worker setup
325 #beaker.cache.repo_cache_long.type = ext:memcached
325 #beaker.cache.repo_cache_long.type = ext:memcached
326 #beaker.cache.repo_cache_long.url = localhost:11211
326 #beaker.cache.repo_cache_long.url = localhost:11211
327 #beaker.cache.repo_cache_long.expire = 1209600
327 #beaker.cache.repo_cache_long.expire = 1209600
328 #beaker.cache.repo_cache_long.key_length = 256
328 #beaker.cache.repo_cache_long.key_length = 256
329
329
330 ####################################
330 ####################################
331 ### BEAKER SESSION ####
331 ### BEAKER SESSION ####
332 ####################################
332 ####################################
333
333
334 ## .session.type is type of storage options for the session, current allowed
334 ## .session.type is type of storage options for the session, current allowed
335 ## types are file, ext:memcached, ext:database, and memory (default).
335 ## types are file, ext:memcached, ext:database, and memory (default).
336 beaker.session.type = file
336 beaker.session.type = file
337 beaker.session.data_dir = %(here)s/rc/data/sessions/data
337 beaker.session.data_dir = %(here)s/rc/data/sessions/data
338
338
339 ## db based session, fast, and allows easy management over logged in users
339 ## db based session, fast, and allows easy management over logged in users
340 #beaker.session.type = ext:database
340 #beaker.session.type = ext:database
341 #beaker.session.table_name = db_session
341 #beaker.session.table_name = db_session
342 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
342 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
343 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
343 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
344 #beaker.session.sa.pool_recycle = 3600
344 #beaker.session.sa.pool_recycle = 3600
345 #beaker.session.sa.echo = false
345 #beaker.session.sa.echo = false
346
346
347 beaker.session.key = rhodecode
347 beaker.session.key = rhodecode
348 beaker.session.secret = test-rc-uytcxaz
348 beaker.session.secret = test-rc-uytcxaz
349 beaker.session.lock_dir = %(here)s/rc/data/sessions/lock
349 beaker.session.lock_dir = %(here)s/rc/data/sessions/lock
350
350
351 ## Secure encrypted cookie. Requires AES and AES python libraries
351 ## Secure encrypted cookie. Requires AES and AES python libraries
352 ## you must disable beaker.session.secret to use this
352 ## you must disable beaker.session.secret to use this
353 #beaker.session.encrypt_key = key_for_encryption
353 #beaker.session.encrypt_key = key_for_encryption
354 #beaker.session.validate_key = validation_key
354 #beaker.session.validate_key = validation_key
355
355
356 ## sets session as invalid(also logging out user) if it haven not been
356 ## sets session as invalid(also logging out user) if it haven not been
357 ## accessed for given amount of time in seconds
357 ## accessed for given amount of time in seconds
358 beaker.session.timeout = 2592000
358 beaker.session.timeout = 2592000
359 beaker.session.httponly = true
359 beaker.session.httponly = true
360 ## Path to use for the cookie. Set to prefix if you use prefix middleware
360 ## Path to use for the cookie. Set to prefix if you use prefix middleware
361 #beaker.session.cookie_path = /custom_prefix
361 #beaker.session.cookie_path = /custom_prefix
362
362
363 ## uncomment for https secure cookie
363 ## uncomment for https secure cookie
364 beaker.session.secure = false
364 beaker.session.secure = false
365
365
366 ## auto save the session to not to use .save()
366 ## auto save the session to not to use .save()
367 beaker.session.auto = false
367 beaker.session.auto = false
368
368
369 ## default cookie expiration time in seconds, set to `true` to set expire
369 ## default cookie expiration time in seconds, set to `true` to set expire
370 ## at browser close
370 ## at browser close
371 #beaker.session.cookie_expires = 3600
371 #beaker.session.cookie_expires = 3600
372
372
373 ###################################
373 ###################################
374 ## SEARCH INDEXING CONFIGURATION ##
374 ## SEARCH INDEXING CONFIGURATION ##
375 ###################################
375 ###################################
376 ## Full text search indexer is available in rhodecode-tools under
376 ## Full text search indexer is available in rhodecode-tools under
377 ## `rhodecode-tools index` command
377 ## `rhodecode-tools index` command
378
378
379 ## WHOOSH Backend, doesn't require additional services to run
379 ## WHOOSH Backend, doesn't require additional services to run
380 ## it works good with few dozen repos
380 ## it works good with few dozen repos
381 search.module = rhodecode.lib.index.whoosh
381 search.module = rhodecode.lib.index.whoosh
382 search.location = %(here)s/data/index
382 search.location = %(here)s/data/index
383
383
384 ########################################
384 ########################################
385 ### CHANNELSTREAM CONFIG ####
385 ### CHANNELSTREAM CONFIG ####
386 ########################################
386 ########################################
387 ## channelstream enables persistent connections and live notification
387 ## channelstream enables persistent connections and live notification
388 ## in the system. It's also used by the chat system
388 ## in the system. It's also used by the chat system
389
389
390 channelstream.enabled = false
390 channelstream.enabled = false
391
391
392 ## server address for channelstream server on the backend
392 ## server address for channelstream server on the backend
393 channelstream.server = 127.0.0.1:9800
393 channelstream.server = 127.0.0.1:9800
394 ## location of the channelstream server from outside world
394 ## location of the channelstream server from outside world
395 ## use ws:// for http or wss:// for https. This address needs to be handled
395 ## use ws:// for http or wss:// for https. This address needs to be handled
396 ## by external HTTP server such as Nginx or Apache
396 ## by external HTTP server such as Nginx or Apache
397 ## see nginx/apache configuration examples in our docs
397 ## see nginx/apache configuration examples in our docs
398 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
398 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
399 channelstream.secret = secret
399 channelstream.secret = secret
400 channelstream.history.location = %(here)s/channelstream_history
400 channelstream.history.location = %(here)s/channelstream_history
401
401
402 ## Internal application path that Javascript uses to connect into.
402 ## Internal application path that Javascript uses to connect into.
403 ## If you use proxy-prefix the prefix should be added before /_channelstream
403 ## If you use proxy-prefix the prefix should be added before /_channelstream
404 channelstream.proxy_path = /_channelstream
404 channelstream.proxy_path = /_channelstream
405
405
406
406
407 ###################################
407 ###################################
408 ## APPENLIGHT CONFIG ##
408 ## APPENLIGHT CONFIG ##
409 ###################################
409 ###################################
410
410
411 ## Appenlight is tailored to work with RhodeCode, see
411 ## Appenlight is tailored to work with RhodeCode, see
412 ## http://appenlight.com for details how to obtain an account
412 ## http://appenlight.com for details how to obtain an account
413
413
414 ## appenlight integration enabled
414 ## appenlight integration enabled
415 appenlight = false
415 appenlight = false
416
416
417 appenlight.server_url = https://api.appenlight.com
417 appenlight.server_url = https://api.appenlight.com
418 appenlight.api_key = YOUR_API_KEY
418 appenlight.api_key = YOUR_API_KEY
419 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
419 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
420
420
421 # used for JS client
421 # used for JS client
422 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
422 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
423
423
424 ## TWEAK AMOUNT OF INFO SENT HERE
424 ## TWEAK AMOUNT OF INFO SENT HERE
425
425
426 ## enables 404 error logging (default False)
426 ## enables 404 error logging (default False)
427 appenlight.report_404 = false
427 appenlight.report_404 = false
428
428
429 ## time in seconds after request is considered being slow (default 1)
429 ## time in seconds after request is considered being slow (default 1)
430 appenlight.slow_request_time = 1
430 appenlight.slow_request_time = 1
431
431
432 ## record slow requests in application
432 ## record slow requests in application
433 ## (needs to be enabled for slow datastore recording and time tracking)
433 ## (needs to be enabled for slow datastore recording and time tracking)
434 appenlight.slow_requests = true
434 appenlight.slow_requests = true
435
435
436 ## enable hooking to application loggers
436 ## enable hooking to application loggers
437 appenlight.logging = true
437 appenlight.logging = true
438
438
439 ## minimum log level for log capture
439 ## minimum log level for log capture
440 appenlight.logging.level = WARNING
440 appenlight.logging.level = WARNING
441
441
442 ## send logs only from erroneous/slow requests
442 ## send logs only from erroneous/slow requests
443 ## (saves API quota for intensive logging)
443 ## (saves API quota for intensive logging)
444 appenlight.logging_on_error = false
444 appenlight.logging_on_error = false
445
445
446 ## list of additonal keywords that should be grabbed from environ object
446 ## list of additonal keywords that should be grabbed from environ object
447 ## can be string with comma separated list of words in lowercase
447 ## can be string with comma separated list of words in lowercase
448 ## (by default client will always send following info:
448 ## (by default client will always send following info:
449 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
449 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
450 ## start with HTTP* this list be extended with additional keywords here
450 ## start with HTTP* this list be extended with additional keywords here
451 appenlight.environ_keys_whitelist =
451 appenlight.environ_keys_whitelist =
452
452
453 ## list of keywords that should be blanked from request object
453 ## list of keywords that should be blanked from request object
454 ## can be string with comma separated list of words in lowercase
454 ## can be string with comma separated list of words in lowercase
455 ## (by default client will always blank keys that contain following words
455 ## (by default client will always blank keys that contain following words
456 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
456 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
457 ## this list be extended with additional keywords set here
457 ## this list be extended with additional keywords set here
458 appenlight.request_keys_blacklist =
458 appenlight.request_keys_blacklist =
459
459
460 ## list of namespaces that should be ignores when gathering log entries
460 ## list of namespaces that should be ignores when gathering log entries
461 ## can be string with comma separated list of namespaces
461 ## can be string with comma separated list of namespaces
462 ## (by default the client ignores own entries: appenlight_client.client)
462 ## (by default the client ignores own entries: appenlight_client.client)
463 appenlight.log_namespace_blacklist =
463 appenlight.log_namespace_blacklist =
464
464
465
465
466 ################################################################################
466 ################################################################################
467 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
467 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
468 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
468 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
469 ## execute malicious code after an exception is raised. ##
469 ## execute malicious code after an exception is raised. ##
470 ################################################################################
470 ################################################################################
471 set debug = false
471 set debug = false
472
472
473
473
474 ##############
474 ##############
475 ## STYLING ##
475 ## STYLING ##
476 ##############
476 ##############
477 debug_style = false
477 debug_style = false
478
478
479 ###########################################
479 ###########################################
480 ### MAIN RHODECODE DATABASE CONFIG ###
480 ### MAIN RHODECODE DATABASE CONFIG ###
481 ###########################################
481 ###########################################
482 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode_test.db?timeout=30
482 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode_test.db?timeout=30
483 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode_test
483 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode_test
484 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode_test
484 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode_test
485 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode_test.db?timeout=30
485 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode_test.db?timeout=30
486
486
487 # see sqlalchemy docs for other advanced settings
487 # see sqlalchemy docs for other advanced settings
488
488
489 ## print the sql statements to output
489 ## print the sql statements to output
490 sqlalchemy.db1.echo = false
490 sqlalchemy.db1.echo = false
491 ## recycle the connections after this amount of seconds
491 ## recycle the connections after this amount of seconds
492 sqlalchemy.db1.pool_recycle = 3600
492 sqlalchemy.db1.pool_recycle = 3600
493 sqlalchemy.db1.convert_unicode = true
493 sqlalchemy.db1.convert_unicode = true
494
494
495 ## the number of connections to keep open inside the connection pool.
495 ## the number of connections to keep open inside the connection pool.
496 ## 0 indicates no limit
496 ## 0 indicates no limit
497 #sqlalchemy.db1.pool_size = 5
497 #sqlalchemy.db1.pool_size = 5
498
498
499 ## the number of connections to allow in connection pool "overflow", that is
499 ## the number of connections to allow in connection pool "overflow", that is
500 ## connections that can be opened above and beyond the pool_size setting,
500 ## connections that can be opened above and beyond the pool_size setting,
501 ## which defaults to five.
501 ## which defaults to five.
502 #sqlalchemy.db1.max_overflow = 10
502 #sqlalchemy.db1.max_overflow = 10
503
503
504
504
505 ##################
505 ##################
506 ### VCS CONFIG ###
506 ### VCS CONFIG ###
507 ##################
507 ##################
508 vcs.server.enable = true
508 vcs.server.enable = true
509 vcs.server = localhost:9901
509 vcs.server = localhost:9901
510
510
511 ## Web server connectivity protocol, responsible for web based VCS operatations
511 ## Web server connectivity protocol, responsible for web based VCS operatations
512 ## Available protocols are:
512 ## Available protocols are:
513 ## `http` - use http-rpc backend (default)
513 ## `http` - use http-rpc backend (default)
514 vcs.server.protocol = http
514 vcs.server.protocol = http
515
515
516 ## Push/Pull operations protocol, available options are:
516 ## Push/Pull operations protocol, available options are:
517 ## `http` - use http-rpc backend (default)
517 ## `http` - use http-rpc backend (default)
518 ## `vcsserver.scm_app` - internal app (EE only)
518 ## `vcsserver.scm_app` - internal app (EE only)
519 vcs.scm_app_implementation = http
519 vcs.scm_app_implementation = http
520
520
521 ## Push/Pull operations hooks protocol, available options are:
521 ## Push/Pull operations hooks protocol, available options are:
522 ## `http` - use http-rpc backend (default)
522 ## `http` - use http-rpc backend (default)
523 vcs.hooks.protocol = http
523 vcs.hooks.protocol = http
524 vcs.hooks.host = 127.0.0.1
524
525
525 vcs.server.log_level = debug
526 vcs.server.log_level = debug
526 ## Start VCSServer with this instance as a subprocess, usefull for development
527 ## Start VCSServer with this instance as a subprocess, usefull for development
527 vcs.start_server = false
528 vcs.start_server = false
528
529
529 ## List of enabled VCS backends, available options are:
530 ## List of enabled VCS backends, available options are:
530 ## `hg` - mercurial
531 ## `hg` - mercurial
531 ## `git` - git
532 ## `git` - git
532 ## `svn` - subversion
533 ## `svn` - subversion
533 vcs.backends = hg, git, svn
534 vcs.backends = hg, git, svn
534
535
535 vcs.connection_timeout = 3600
536 vcs.connection_timeout = 3600
536 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
537 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
537 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
538 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
538 #vcs.svn.compatible_version = pre-1.8-compatible
539 #vcs.svn.compatible_version = pre-1.8-compatible
539
540
540
541
541 ############################################################
542 ############################################################
542 ### Subversion proxy support (mod_dav_svn) ###
543 ### Subversion proxy support (mod_dav_svn) ###
543 ### Maps RhodeCode repo groups into SVN paths for Apache ###
544 ### Maps RhodeCode repo groups into SVN paths for Apache ###
544 ############################################################
545 ############################################################
545 ## Enable or disable the config file generation.
546 ## Enable or disable the config file generation.
546 svn.proxy.generate_config = false
547 svn.proxy.generate_config = false
547 ## Generate config file with `SVNListParentPath` set to `On`.
548 ## Generate config file with `SVNListParentPath` set to `On`.
548 svn.proxy.list_parent_path = true
549 svn.proxy.list_parent_path = true
549 ## Set location and file name of generated config file.
550 ## Set location and file name of generated config file.
550 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
551 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
551 ## Used as a prefix to the `Location` block in the generated config file.
552 ## Used as a prefix to the `Location` block in the generated config file.
552 ## In most cases it should be set to `/`.
553 ## In most cases it should be set to `/`.
553 svn.proxy.location_root = /
554 svn.proxy.location_root = /
554 ## Command to reload the mod dav svn configuration on change.
555 ## Command to reload the mod dav svn configuration on change.
555 ## Example: `/etc/init.d/apache2 reload`
556 ## Example: `/etc/init.d/apache2 reload`
556 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
557 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
557 ## If the timeout expires before the reload command finishes, the command will
558 ## If the timeout expires before the reload command finishes, the command will
558 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
559 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
559 #svn.proxy.reload_timeout = 10
560 #svn.proxy.reload_timeout = 10
560
561
561 ############################################################
562 ############################################################
562 ### SSH Support Settings ###
563 ### SSH Support Settings ###
563 ############################################################
564 ############################################################
564
565
565 ## Defines if the authorized_keys file should be written on any change of
566 ## Defines if the authorized_keys file should be written on any change of
566 ## user ssh keys, setting this to false also disables posibility of adding
567 ## user ssh keys, setting this to false also disables posibility of adding
567 ## ssh keys for users from web interface.
568 ## ssh keys for users from web interface.
568 ssh.generate_authorized_keyfile = true
569 ssh.generate_authorized_keyfile = true
569
570
570 ## Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
571 ## Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
571 # ssh.authorized_keys_ssh_opts =
572 # ssh.authorized_keys_ssh_opts =
572
573
573 ## File to generate the authorized keys together with options
574 ## File to generate the authorized keys together with options
574 ## It is possible to have multiple key files specified in `sshd_config` e.g.
575 ## It is possible to have multiple key files specified in `sshd_config` e.g.
575 ## AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
576 ## AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
576 ssh.authorized_keys_file_path = %(here)s/rc/authorized_keys_rhodecode
577 ssh.authorized_keys_file_path = %(here)s/rc/authorized_keys_rhodecode
577
578
578 ## Command to execute the SSH wrapper. The binary is available in the
579 ## Command to execute the SSH wrapper. The binary is available in the
579 ## rhodecode installation directory.
580 ## rhodecode installation directory.
580 ## e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
581 ## e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
581 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
582 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
582
583
583 ## Allow shell when executing the ssh-wrapper command
584 ## Allow shell when executing the ssh-wrapper command
584 ssh.wrapper_cmd_allow_shell = false
585 ssh.wrapper_cmd_allow_shell = false
585
586
586 ## Enables logging, and detailed output send back to the client. Usefull for
587 ## Enables logging, and detailed output send back to the client. Usefull for
587 ## debugging, shouldn't be used in production.
588 ## debugging, shouldn't be used in production.
588 ssh.enable_debug_logging = false
589 ssh.enable_debug_logging = false
589
590
590 ## Paths to binary executrables, by default they are the names, but we can
591 ## Paths to binary executrables, by default they are the names, but we can
591 ## override them if we want to use a custom one
592 ## override them if we want to use a custom one
592 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
593 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
593 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
594 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
594 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
595 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
595
596
596
597
597 ## Dummy marker to add new entries after.
598 ## Dummy marker to add new entries after.
598 ## Add any custom entries below. Please don't remove.
599 ## Add any custom entries below. Please don't remove.
599 custom.conf = 1
600 custom.conf = 1
600
601
601
602
602 ################################
603 ################################
603 ### LOGGING CONFIGURATION ####
604 ### LOGGING CONFIGURATION ####
604 ################################
605 ################################
605 [loggers]
606 [loggers]
606 keys = root, sqlalchemy, beaker, rhodecode, ssh_wrapper
607 keys = root, sqlalchemy, beaker, rhodecode, ssh_wrapper
607
608
608 [handlers]
609 [handlers]
609 keys = console, console_sql
610 keys = console, console_sql
610
611
611 [formatters]
612 [formatters]
612 keys = generic, color_formatter, color_formatter_sql
613 keys = generic, color_formatter, color_formatter_sql
613
614
614 #############
615 #############
615 ## LOGGERS ##
616 ## LOGGERS ##
616 #############
617 #############
617 [logger_root]
618 [logger_root]
618 level = NOTSET
619 level = NOTSET
619 handlers = console
620 handlers = console
620
621
621 [logger_routes]
622 [logger_routes]
622 level = DEBUG
623 level = DEBUG
623 handlers =
624 handlers =
624 qualname = routes.middleware
625 qualname = routes.middleware
625 ## "level = DEBUG" logs the route matched and routing variables.
626 ## "level = DEBUG" logs the route matched and routing variables.
626 propagate = 1
627 propagate = 1
627
628
628 [logger_beaker]
629 [logger_beaker]
629 level = DEBUG
630 level = DEBUG
630 handlers =
631 handlers =
631 qualname = beaker.container
632 qualname = beaker.container
632 propagate = 1
633 propagate = 1
633
634
634 [logger_rhodecode]
635 [logger_rhodecode]
635 level = DEBUG
636 level = DEBUG
636 handlers =
637 handlers =
637 qualname = rhodecode
638 qualname = rhodecode
638 propagate = 1
639 propagate = 1
639
640
640 [logger_sqlalchemy]
641 [logger_sqlalchemy]
641 level = ERROR
642 level = ERROR
642 handlers = console_sql
643 handlers = console_sql
643 qualname = sqlalchemy.engine
644 qualname = sqlalchemy.engine
644 propagate = 0
645 propagate = 0
645
646
646 [logger_ssh_wrapper]
647 [logger_ssh_wrapper]
647 level = DEBUG
648 level = DEBUG
648 handlers =
649 handlers =
649 qualname = ssh_wrapper
650 qualname = ssh_wrapper
650 propagate = 1
651 propagate = 1
651
652
652
653
653 ##############
654 ##############
654 ## HANDLERS ##
655 ## HANDLERS ##
655 ##############
656 ##############
656
657
657 [handler_console]
658 [handler_console]
658 class = StreamHandler
659 class = StreamHandler
659 args = (sys.stderr,)
660 args = (sys.stderr,)
660 level = DEBUG
661 level = DEBUG
661 formatter = generic
662 formatter = generic
662
663
663 [handler_console_sql]
664 [handler_console_sql]
664 class = StreamHandler
665 class = StreamHandler
665 args = (sys.stderr,)
666 args = (sys.stderr,)
666 level = WARN
667 level = WARN
667 formatter = generic
668 formatter = generic
668
669
669 ################
670 ################
670 ## FORMATTERS ##
671 ## FORMATTERS ##
671 ################
672 ################
672
673
673 [formatter_generic]
674 [formatter_generic]
674 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
675 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
675 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
676 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
676 datefmt = %Y-%m-%d %H:%M:%S
677 datefmt = %Y-%m-%d %H:%M:%S
677
678
678 [formatter_color_formatter]
679 [formatter_color_formatter]
679 class = rhodecode.lib.logging_formatter.ColorFormatter
680 class = rhodecode.lib.logging_formatter.ColorFormatter
680 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
681 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
681 datefmt = %Y-%m-%d %H:%M:%S
682 datefmt = %Y-%m-%d %H:%M:%S
682
683
683 [formatter_color_formatter_sql]
684 [formatter_color_formatter_sql]
684 class = rhodecode.lib.logging_formatter.ColorFormatterSql
685 class = rhodecode.lib.logging_formatter.ColorFormatterSql
685 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
686 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
686 datefmt = %Y-%m-%d %H:%M:%S
687 datefmt = %Y-%m-%d %H:%M:%S
General Comments 0
You need to be logged in to leave comments. Login now